diff
stringlengths 41
2.03M
| msg
stringlengths 1
1.5k
⌀ | repo
stringlengths 5
40
| sha
stringlengths 40
40
| time
stringlengths 20
20
|
---|---|---|---|---|
mmm a / CHANGELOG . md <nl> ppp b / CHANGELOG . md <nl> <nl> + vcpkg ( 0 . 0 . 40 ) <nl> + mmmmmmmmmmmm - - <nl> + * Add ports : <nl> + - ace 6 . 4 . 0 <nl> + - asio 1 . 10 . 6 <nl> + - bond 5 . 0 . 0 <nl> + - constexpr 1 . 0 <nl> + - doctest 1 . 1 . 0 <nl> + - eigen3 3 . 2 . 9 <nl> + - fmt 3 . 0 . 0 <nl> + - gflags 2 . 1 . 2 <nl> + - glm 0 . 9 . 8 . 1 <nl> + - grpc 1 . 1 . 0 <nl> + - gsl 0 - fd5ad87bf <nl> + - gtest 1 . 8 <nl> + - libiconv 1 . 14 <nl> + - mpir 2 . 7 . 2 <nl> + - protobuf 3 . 0 . 2 <nl> + - ragel 6 . 9 <nl> + - rapidxml 1 . 13 <nl> + - sery 1 . 0 . 0 <nl> + - stb 1 . 0 <nl> + * Update ports : <nl> + - boost 1 . 62 <nl> + - glfw3 3 . 2 . 1 <nl> + - opencv 3 . 1 . 0 - 1 <nl> + * Various fixes in existing portfiles <nl> + * Introduce environment variable ` VCPKG_DEFAULT_TRIPLET ` <nl> + * Replace everything concerning MD5 with SHA512 <nl> + * Add mirror support <nl> + * ` vcpkg ` now checks for valid package names : only ASCII lowercase chars , digits , or dashes are allowed <nl> + * ` vcpkg create ` now also creates a templated CONTROL file <nl> + * ` vcpkg create ` now checks for invalid chars in the zip path <nl> + * ` vcpkg edit ` now throws an error if it cannot launch an editor <nl> + * Fix ` vcpkg integrate ` to only apply to C + + projects instead of all projects <nl> + * Fix ` vcpkg integrate ` locale - specific failures <nl> + * ` vcpkg search ` now does simple substring searching <nl> + * Fix path that assumed Visual Studio is installed in default location <nl> + * Enable multicore builds by default <nl> + * Add ` . vcpkg - root ` file to detect the root directory <nl> + * Fix ` bootstrap . ps1 ` to work with older versions of powershell <nl> + * Add ` SOURCE_PATH ` variable to all portfiles . <nl> + * Many improvements in error messages shown by ` vcpkg ` <nl> + * Various updates in FAQ <nl> + * Move ` CONTRIBUTING . md ` to root <nl> + <nl> + - - vcpkg team < vcpkg @ microsoft . com > WED , 05 Oct 2016 17 : 00 : 00 - 0700 <nl> + <nl> + <nl> vcpkg ( 0 . 0 . 30 ) <nl> mmmmmmmmmmmm - - <nl> * DLLs are now accompanied with their corresponding PDBs . <nl> mmm a / toolsrc / VERSION . txt <nl> ppp b / toolsrc / VERSION . txt <nl> @ @ - 1 + 1 @ @ <nl> - " 0 . 0 . 30 " <nl> \ No newline at end of file <nl> + " 0 . 0 . 40 " <nl> \ No newline at end of file <nl> | Update CHANGELOG and bump version to v0 . 0 . 40 | microsoft/vcpkg | 12d9cba5d8d4db546a979c1e58cacbc0c79d7953 | 2016-10-06T00:00:49Z |
mmm a / data / widgets / about . xml <nl> ppp b / data / widgets / about . xml <nl> <nl> < label text = " - Lead developer , graphics & amp ; & amp ; maintainer " / > <nl> < link text = " Gaspar Capello " url = " https : / / github . com / Gasparoken " / > <nl> < label text = " - Programmer , bug fixing " / > <nl> - < link text = " Ilija Melentijevic " url = " https : / / twitter . com / ilkkke " / > <nl> + < link text = " Ilija Melentijevic " url = " https : / / ilkke . net / " / > <nl> < label text = " - Default skin & amp ; & amp ; graphics introduced in v0 . 8 " / > <nl> < / grid > <nl> <nl> | Update About dialog , Ilija has just updated his website | aseprite/aseprite | 650c4eeeaa0de548cc6ed7f892794212ce246571 | 2018-06-18T13:06:52Z |
mmm a / tensorflow / lite / micro / micro_interpreter . cc <nl> ppp b / tensorflow / lite / micro / micro_interpreter . cc <nl> MicroInterpreter : : MicroInterpreter ( const Model * model , <nl> initialization_status_ = kTfLiteOk ; <nl> } <nl> <nl> - MicroInterpreter : : ~ MicroInterpreter ( ) { <nl> - if ( node_and_registrations_ ! = nullptr ) { <nl> - for ( size_t i = 0 ; i < operators_ - > size ( ) ; + + i ) { <nl> - TfLiteNode * node = & ( node_and_registrations_ [ i ] . node ) ; <nl> - const TfLiteRegistration * registration = <nl> - node_and_registrations_ [ i ] . registration ; <nl> - / / registration is allocated outside the interpreter , so double check to <nl> - / / make sure it ' s not nullptr ; <nl> - if ( registration ! = nullptr & & registration - > free ! = nullptr ) { <nl> - registration - > free ( & context_ , node - > user_data ) ; <nl> - } <nl> - } <nl> - } <nl> - } <nl> - <nl> void MicroInterpreter : : CorrectTensorEndianness ( TfLiteTensor * tensorCorr ) { <nl> int32_t tensorSize = 1 ; <nl> for ( int d = 0 ; d < tensorCorr - > dims - > size ; + + d ) <nl> TfLiteStatus MicroInterpreter : : AllocateTensors ( ) { <nl> op_resolver_ , & node_and_registrations_ ) ) ; <nl> TF_LITE_ENSURE_OK ( & context_ , allocator_ . FinishTensorAllocation ( ) ) ; <nl> <nl> + tensors_allocated_ = true ; <nl> + return kTfLiteOk ; <nl> + } <nl> + <nl> + TfLiteStatus MicroInterpreter : : Invoke ( ) { <nl> + if ( initialization_status_ ! = kTfLiteOk ) { <nl> + error_reporter_ - > Report ( " Invoke ( ) called after initialization failed \ n " ) ; <nl> + return kTfLiteError ; <nl> + } <nl> + <nl> + / / Ensure tensors are allocated before the interpreter is invoked to avoid <nl> + / / difficult to debug segfaults . <nl> + if ( ! tensors_allocated_ ) { <nl> + AllocateTensors ( ) ; <nl> + } <nl> + <nl> / / Init method is not yet implemented . <nl> for ( size_t i = 0 ; i < operators_ - > size ( ) ; + + i ) { <nl> auto * node = & ( node_and_registrations_ [ i ] . node ) ; <nl> TfLiteStatus MicroInterpreter : : AllocateTensors ( ) { <nl> } <nl> } <nl> <nl> - tensors_allocated_ = true ; <nl> - return kTfLiteOk ; <nl> - } <nl> - <nl> - TfLiteStatus MicroInterpreter : : Invoke ( ) { <nl> - if ( initialization_status_ ! = kTfLiteOk ) { <nl> - error_reporter_ - > Report ( " Invoke ( ) called after initialization failed \ n " ) ; <nl> - return kTfLiteError ; <nl> - } <nl> - <nl> - / / Ensure tensors are allocated before the interpreter is invoked to avoid <nl> - / / difficult to debug segfaults . <nl> - if ( ! tensors_allocated_ ) { <nl> - AllocateTensors ( ) ; <nl> - } <nl> - <nl> for ( size_t i = 0 ; i < operators_ - > size ( ) ; + + i ) { <nl> auto * node = & ( node_and_registrations_ [ i ] . node ) ; <nl> auto * registration = node_and_registrations_ [ i ] . registration ; <nl> TfLiteStatus MicroInterpreter : : Invoke ( ) { <nl> } <nl> } <nl> } <nl> + <nl> + / / This is actually a no - op . <nl> + / / TODO ( wangtz ) : Consider removing this code to slightly reduce binary size . <nl> + for ( size_t i = 0 ; i < operators_ - > size ( ) ; + + i ) { <nl> + auto * node = & ( node_and_registrations_ [ i ] . node ) ; <nl> + auto * registration = node_and_registrations_ [ i ] . registration ; <nl> + if ( registration - > free ) { <nl> + registration - > free ( & context_ , node - > user_data ) ; <nl> + } <nl> + } <nl> return kTfLiteOk ; <nl> } <nl> <nl> mmm a / tensorflow / lite / micro / micro_interpreter . h <nl> ppp b / tensorflow / lite / micro / micro_interpreter . h <nl> class MicroInterpreter { <nl> uint8_t * tensor_arena , size_t tensor_arena_size , <nl> ErrorReporter * error_reporter ) ; <nl> <nl> - ~ MicroInterpreter ( ) ; <nl> - <nl> / / Runs through the model and allocates all necessary input , output and <nl> / / intermediate tensors . <nl> TfLiteStatus AllocateTensors ( ) ; <nl> class MicroInterpreter { <nl> template < class T > <nl> void CorrectTensorDataEndianness ( T * data , int32_t size ) ; <nl> <nl> - NodeAndRegistration * node_and_registrations_ = nullptr ; <nl> + NodeAndRegistration * node_and_registrations_ ; <nl> <nl> const Model * model_ ; <nl> const OpResolver & op_resolver_ ; <nl> mmm a / tensorflow / lite / micro / micro_interpreter_test . cc <nl> ppp b / tensorflow / lite / micro / micro_interpreter_test . cc <nl> limitations under the License . <nl> <nl> namespace tflite { <nl> namespace { <nl> - <nl> void * MockInit ( TfLiteContext * context , const char * buffer , size_t length ) { <nl> / / We don ' t support delegate in TFL micro . This is a weak check to test if <nl> / / context struct being zero - initialized . <nl> void * MockInit ( TfLiteContext * context , const char * buffer , size_t length ) { <nl> return nullptr ; <nl> } <nl> <nl> - bool freed = false ; <nl> - void MockFree ( TfLiteContext * context , void * buffer ) { freed = true ; } <nl> + void MockFree ( TfLiteContext * context , void * buffer ) { <nl> + / / Do nothing . <nl> + } <nl> <nl> TfLiteStatus MockPrepare ( TfLiteContext * context , TfLiteNode * node ) { <nl> return kTfLiteOk ; <nl> class MockOpResolver : public OpResolver { <nl> TF_LITE_MICRO_TESTS_BEGIN <nl> <nl> TF_LITE_MICRO_TEST ( TestInterpreter ) { <nl> - tflite : : freed = false ; <nl> const tflite : : Model * model = tflite : : testing : : GetSimpleMockModel ( ) ; <nl> TF_LITE_MICRO_EXPECT_NE ( nullptr , model ) ; <nl> tflite : : MockOpResolver mock_resolver ; <nl> constexpr size_t allocator_buffer_size = 1024 ; <nl> uint8_t allocator_buffer [ allocator_buffer_size ] ; <nl> + tflite : : MicroInterpreter interpreter ( model , mock_resolver , allocator_buffer , <nl> + allocator_buffer_size , <nl> + micro_test : : reporter ) ; <nl> + TF_LITE_MICRO_EXPECT_EQ ( interpreter . AllocateTensors ( ) , kTfLiteOk ) ; <nl> + TF_LITE_MICRO_EXPECT_EQ ( 1 , interpreter . inputs_size ( ) ) ; <nl> + TF_LITE_MICRO_EXPECT_EQ ( 1 , interpreter . outputs_size ( ) ) ; <nl> <nl> - / / Create a new scope so that we can test the destructor . <nl> - { <nl> - tflite : : MicroInterpreter interpreter ( model , mock_resolver , allocator_buffer , <nl> - allocator_buffer_size , <nl> - micro_test : : reporter ) ; <nl> - TF_LITE_MICRO_EXPECT_EQ ( interpreter . AllocateTensors ( ) , kTfLiteOk ) ; <nl> - TF_LITE_MICRO_EXPECT_EQ ( 1 , interpreter . inputs_size ( ) ) ; <nl> - TF_LITE_MICRO_EXPECT_EQ ( 1 , interpreter . outputs_size ( ) ) ; <nl> - <nl> - TfLiteTensor * input = interpreter . input ( 0 ) ; <nl> - TF_LITE_MICRO_EXPECT_NE ( nullptr , input ) ; <nl> - TF_LITE_MICRO_EXPECT_EQ ( kTfLiteInt32 , input - > type ) ; <nl> - TF_LITE_MICRO_EXPECT_EQ ( 1 , input - > dims - > size ) ; <nl> - TF_LITE_MICRO_EXPECT_EQ ( 1 , input - > dims - > data [ 0 ] ) ; <nl> - TF_LITE_MICRO_EXPECT_EQ ( 4 , input - > bytes ) ; <nl> - TF_LITE_MICRO_EXPECT_NE ( nullptr , input - > data . i32 ) ; <nl> - input - > data . i32 [ 0 ] = 21 ; <nl> - <nl> - TF_LITE_MICRO_EXPECT_EQ ( kTfLiteOk , interpreter . Invoke ( ) ) ; <nl> - <nl> - TfLiteTensor * output = interpreter . output ( 0 ) ; <nl> - TF_LITE_MICRO_EXPECT_NE ( nullptr , output ) ; <nl> - TF_LITE_MICRO_EXPECT_EQ ( kTfLiteInt32 , output - > type ) ; <nl> - TF_LITE_MICRO_EXPECT_EQ ( 1 , output - > dims - > size ) ; <nl> - TF_LITE_MICRO_EXPECT_EQ ( 1 , output - > dims - > data [ 0 ] ) ; <nl> - TF_LITE_MICRO_EXPECT_EQ ( 4 , output - > bytes ) ; <nl> - TF_LITE_MICRO_EXPECT_NE ( nullptr , output - > data . i32 ) ; <nl> - TF_LITE_MICRO_EXPECT_EQ ( 42 , output - > data . i32 [ 0 ] ) ; <nl> - <nl> - / / Just to make sure that this method works . <nl> - tflite : : PrintInterpreterState ( & interpreter ) ; <nl> - TF_LITE_MICRO_EXPECT_EQ ( tflite : : freed , false ) ; <nl> - } <nl> - <nl> - TF_LITE_MICRO_EXPECT_EQ ( tflite : : freed , true ) ; <nl> + TfLiteTensor * input = interpreter . input ( 0 ) ; <nl> + TF_LITE_MICRO_EXPECT_NE ( nullptr , input ) ; <nl> + TF_LITE_MICRO_EXPECT_EQ ( kTfLiteInt32 , input - > type ) ; <nl> + TF_LITE_MICRO_EXPECT_EQ ( 1 , input - > dims - > size ) ; <nl> + TF_LITE_MICRO_EXPECT_EQ ( 1 , input - > dims - > data [ 0 ] ) ; <nl> + TF_LITE_MICRO_EXPECT_EQ ( 4 , input - > bytes ) ; <nl> + TF_LITE_MICRO_EXPECT_NE ( nullptr , input - > data . i32 ) ; <nl> + input - > data . i32 [ 0 ] = 21 ; <nl> + <nl> + TF_LITE_MICRO_EXPECT_EQ ( kTfLiteOk , interpreter . Invoke ( ) ) ; <nl> + <nl> + TfLiteTensor * output = interpreter . output ( 0 ) ; <nl> + TF_LITE_MICRO_EXPECT_NE ( nullptr , output ) ; <nl> + TF_LITE_MICRO_EXPECT_EQ ( kTfLiteInt32 , output - > type ) ; <nl> + TF_LITE_MICRO_EXPECT_EQ ( 1 , output - > dims - > size ) ; <nl> + TF_LITE_MICRO_EXPECT_EQ ( 1 , output - > dims - > data [ 0 ] ) ; <nl> + TF_LITE_MICRO_EXPECT_EQ ( 4 , output - > bytes ) ; <nl> + TF_LITE_MICRO_EXPECT_NE ( nullptr , output - > data . i32 ) ; <nl> + TF_LITE_MICRO_EXPECT_EQ ( 42 , output - > data . i32 [ 0 ] ) ; <nl> + <nl> + / / Just to make sure that this method works . <nl> + tflite : : PrintInterpreterState ( & interpreter ) ; <nl> } <nl> <nl> TF_LITE_MICRO_TEST ( TestVariableTensorReset ) { <nl> | TFLM : Move Init and Prepare into initialization so that they ' re only ran once . | tensorflow/tensorflow | 37123e9e82bf34002b656753970fde832c2708af | 2020-01-17T16:44:07Z |
mmm a / lib / SIL / SILPrinter . cpp <nl> ppp b / lib / SIL / SILPrinter . cpp <nl> ID SILPrintContext : : getID ( const SILNode * node ) { <nl> / / Lazily initialize the instruction - > ID mapping . <nl> if ( ValueToIDMap . empty ( ) ) <nl> F - > numberValues ( ValueToIDMap ) ; <nl> - } else { <nl> - setContext ( BB ) ; <nl> - / / Lazily initialize the instruction - > ID mapping . <nl> - if ( ValueToIDMap . empty ( ) ) { <nl> - unsigned idx = 0 ; <nl> - for ( auto & I : * BB ) { <nl> - / / Give the instruction itself the next ID . <nl> - ValueToIDMap [ & I ] = idx ; <nl> - <nl> - / / If there are no results , make sure we don ' t reuse that ID . <nl> - auto results = I . getResults ( ) ; <nl> - if ( results . empty ( ) ) { <nl> - idx + + ; <nl> - <nl> - / / Otherwise , assign all of the results an index . Note that <nl> - / / we ' ll assign the same ID to both the instruction and the <nl> - / / first result . <nl> - } else { <nl> - for ( auto result : results ) { <nl> - ValueToIDMap [ result ] = idx + + ; <nl> - } <nl> - } <nl> - } <nl> + ID R = { ID : : SSAValue , ValueToIDMap [ node ] } ; <nl> + return R ; <nl> + } <nl> + <nl> + setContext ( BB ) ; <nl> + <nl> + / / Check if we have initialized our ValueToIDMap yet . If we have , just use <nl> + / / that . <nl> + if ( ! ValueToIDMap . empty ( ) ) { <nl> + ID R = { ID : : SSAValue , ValueToIDMap [ node ] } ; <nl> + return R ; <nl> + } <nl> + <nl> + / / Otherwise , initialize the instruction - > ID mapping cache . <nl> + unsigned idx = 0 ; <nl> + for ( auto & I : * BB ) { <nl> + / / Give the instruction itself the next ID . <nl> + ValueToIDMap [ & I ] = idx ; <nl> + <nl> + / / If there are no results , make sure we don ' t reuse that ID . <nl> + auto results = I . getResults ( ) ; <nl> + if ( results . empty ( ) ) { <nl> + idx + + ; <nl> + continue ; <nl> + } <nl> + <nl> + / / Otherwise , assign all of the results an index . Note that <nl> + / / we ' ll assign the same ID to both the instruction and the <nl> + / / first result . <nl> + for ( auto result : results ) { <nl> + ValueToIDMap [ result ] = idx + + ; <nl> } <nl> } <nl> <nl> | [ gardening ] Reduce indentation by using early exits increasing readability . | apple/swift | 6dc66297da6d3a47ecddf8cdfdd2d2cc8309eb1c | 2017-10-06T23:47:48Z |
mmm a / scripts / VirtuaBuild / vm_access . py <nl> ppp b / scripts / VirtuaBuild / vm_access . py <nl> def command ( self , cmd , output = False ) : <nl> sys_exit ( " Error : command \ " % s \ " finished with exit value % d . " % ( cmd , proc . poll ( ) ) , proc . poll ( ) , True ) <nl> return proc <nl> def shut_down ( self ) : <nl> - subprocess . Popen ( [ " ssh % s ' VBoxManage controlvm % s poweroff ' " % ( control_user , self . uuid ) ] , shell = True ) . wait ( ) <nl> + pass # shutting down is currently disabled <nl> + # subprocess . Popen ( [ " ssh % s ' VBoxManage controlvm % s poweroff ' " % ( control_user , self . uuid ) ] , shell = True ) . wait ( ) <nl> <nl> def sys_exit ( message , exit_code , shut_down = False ) : <nl> print message <nl> | Disabled shutting down of VMs after using vm_access , because things get messed up when a VM shuts down while a repetition of the same test is running . | rethinkdb/rethinkdb | 832c01ae2fab403669d5a7275771894adb13f731 | 2012-07-13T19:43:33Z |
mmm a / samples / Cpp / TestCpp / Classes / LabelTest / LabelTest . cpp <nl> ppp b / samples / Cpp / TestCpp / Classes / LabelTest / LabelTest . cpp <nl> TTFFontShadowAndStroke : : TTFFontShadowAndStroke ( ) <nl> / / create the label shadow only <nl> CCLabelTTF * fontShadow = new CCLabelTTF ( ) ; <nl> <nl> - CCTextDefinition shadowTextDef ; <nl> + ccFontDefinition shadowTextDef ; <nl> shadowTextDef . m_fontSize = 20 ; <nl> shadowTextDef . m_fontName = std : : string ( " Marker Felt " ) ; <nl> <nl> TTFFontShadowAndStroke : : TTFFontShadowAndStroke ( ) <nl> shadowTextDef . m_shadow . m_shadowOffset = shadowOffset ; <nl> shadowTextDef . m_shadow . m_shadowOpacity = 1 . 0 ; <nl> shadowTextDef . m_shadow . m_shadowBlur = 1 . 0 ; <nl> + shadowTextDef . m_fontFillColor = tintColorRed ; <nl> <nl> - shadowTextDef . m_fontTint . m_tintEnabled = true ; <nl> - shadowTextDef . m_fontTint . m_tintColor = tintColorRed ; <nl> - <nl> - fontShadow - > initWithStringAndTextDefinition ( " Shadow Only Red Text " , & shadowTextDef ) ; <nl> + fontShadow - > initWithStringAndTextDefinition ( " Shadow Only Red Text " , shadowTextDef ) ; <nl> <nl> / / add label to the scene <nl> this - > addChild ( fontShadow ) ; <nl> TTFFontShadowAndStroke : : TTFFontShadowAndStroke ( ) <nl> / / create the label stroke only <nl> CCLabelTTF * fontStroke = new CCLabelTTF ( ) ; <nl> <nl> - CCTextDefinition strokeTextDef ; <nl> + ccFontDefinition strokeTextDef ; <nl> strokeTextDef . m_fontSize = 20 ; <nl> strokeTextDef . m_fontName = std : : string ( " Marker Felt " ) ; <nl> <nl> TTFFontShadowAndStroke : : TTFFontShadowAndStroke ( ) <nl> strokeTextDef . m_stroke . m_strokeColor = strokeColor ; <nl> strokeTextDef . m_stroke . m_strokeSize = 1 . 5 ; <nl> <nl> - strokeTextDef . m_fontTint . m_tintEnabled = true ; <nl> - strokeTextDef . m_fontTint . m_tintColor = tintColorYellow ; <nl> + strokeTextDef . m_fontFillColor = tintColorYellow ; <nl> <nl> - fontStroke - > initWithStringAndTextDefinition ( " Stroke Only Yellow Text " , & strokeTextDef ) ; <nl> + fontStroke - > initWithStringAndTextDefinition ( " Stroke Only Yellow Text " , strokeTextDef ) ; <nl> <nl> / / add label to the scene <nl> this - > addChild ( fontStroke ) ; <nl> TTFFontShadowAndStroke : : TTFFontShadowAndStroke ( ) <nl> / / create the label stroke and shadow <nl> CCLabelTTF * fontStrokeAndShadow = new CCLabelTTF ( ) ; <nl> <nl> - CCTextDefinition strokeShaodwTextDef ; <nl> + ccFontDefinition strokeShaodwTextDef ; <nl> strokeShaodwTextDef . m_fontSize = 20 ; <nl> strokeShaodwTextDef . m_fontName = std : : string ( " Marker Felt " ) ; <nl> <nl> TTFFontShadowAndStroke : : TTFFontShadowAndStroke ( ) <nl> strokeShaodwTextDef . m_shadow . m_shadowOpacity = 1 . 0 ; <nl> strokeShaodwTextDef . m_shadow . m_shadowBlur = 1 . 0 ; <nl> <nl> - strokeShaodwTextDef . m_fontTint . m_tintEnabled = true ; <nl> - strokeShaodwTextDef . m_fontTint . m_tintColor = tintColorBlue ; <nl> + <nl> + strokeShaodwTextDef . m_fontFillColor = tintColorBlue ; <nl> <nl> - fontStrokeAndShadow - > initWithStringAndTextDefinition ( " Stroke & Shadow Blue Text " , & strokeShaodwTextDef ) ; <nl> + fontStrokeAndShadow - > initWithStringAndTextDefinition ( " Stroke & Shadow Blue Text " , strokeShaodwTextDef ) ; <nl> <nl> / / add label to the scene <nl> this - > addChild ( fontStrokeAndShadow ) ; <nl> | CCLabelTTF C + + test fixed | cocos2d/cocos2d-x | 36b88039a15e99a94a222da63f189427b1272d3c | 2013-05-15T23:37:08Z |
mmm a / THCTensorMath . h <nl> ppp b / THCTensorMath . h <nl> THC_API float THCudaTensor_dist ( THCudaTensor * self , THCudaTensor * src , float val <nl> THC_API void THCudaTensor_rand ( THCudaTensor * r_ , THLongStorage * size ) ; <nl> THC_API void THCudaTensor_randn ( THCudaTensor * r_ , THLongStorage * size ) ; <nl> <nl> - THC_API void THCudaTensor_indexSelect ( THCudaTensor * tensor , THCudaTensor * src , int dim , THLongTensor * index ) <nl> + THC_API void THCudaTensor_indexSelect ( THCudaTensor * tensor , THCudaTensor * src , int dim , THLongTensor * index ) ; <nl> <nl> <nl> # endif <nl> | small bug | pytorch/pytorch | c68b50d4cf585ef63a8ff2ce30f9fed44ec0b87d | 2014-06-12T05:01:41Z |
mmm a / src / mongo / db / commands / geo_near_cmd . cpp <nl> ppp b / src / mongo / db / commands / geo_near_cmd . cpp <nl> class Geo2dFindNearCmd : public Command { <nl> <nl> const WhereCallbackReal whereCallback ( txn , nss . db ( ) ) ; <nl> auto statusWithCQ = CanonicalQuery : : canonicalize ( <nl> - nss . ns ( ) , rewritten , BSONObj ( ) , projObj , 0 , numWanted , BSONObj ( ) , whereCallback ) ; <nl> + nss , rewritten , BSONObj ( ) , projObj , 0 , numWanted , BSONObj ( ) , whereCallback ) ; <nl> if ( ! statusWithCQ . isOK ( ) ) { <nl> errmsg = " Can ' t parse filter / create query " ; <nl> return false ; <nl> mmm a / src / mongo / db / commands / index_filter_commands . cpp <nl> ppp b / src / mongo / db / commands / index_filter_commands . cpp <nl> Status ClearFilters : : clear ( OperationContext * txn , <nl> <nl> / / Create canonical query . <nl> auto statusWithCQ = CanonicalQuery : : canonicalize ( <nl> - ns , entry - > query , entry - > sort , entry - > projection , whereCallback ) ; <nl> + nss , entry - > query , entry - > sort , entry - > projection , whereCallback ) ; <nl> invariant ( statusWithCQ . isOK ( ) ) ; <nl> std : : unique_ptr < CanonicalQuery > cq = std : : move ( statusWithCQ . getValue ( ) ) ; <nl> <nl> mmm a / src / mongo / db / commands / index_filter_commands_test . cpp <nl> ppp b / src / mongo / db / commands / index_filter_commands_test . cpp <nl> using std : : string ; <nl> using std : : unique_ptr ; <nl> using std : : vector ; <nl> <nl> - static const char * ns = " test . t " ; <nl> + static const NamespaceString nss ( " test . collection " ) ; <nl> <nl> / * * <nl> * Utility function to get list of index filters from the query settings . <nl> void addQueryShapeToPlanCache ( PlanCache * planCache , <nl> BSONObj projectionObj = fromjson ( projectionStr ) ; <nl> <nl> / / Create canonical query . <nl> - auto statusWithCQ = CanonicalQuery : : canonicalize ( ns , queryObj , sortObj , projectionObj ) ; <nl> + auto statusWithCQ = CanonicalQuery : : canonicalize ( nss , queryObj , sortObj , projectionObj ) ; <nl> ASSERT_OK ( statusWithCQ . getStatus ( ) ) ; <nl> std : : unique_ptr < CanonicalQuery > cq = std : : move ( statusWithCQ . getValue ( ) ) ; <nl> <nl> bool planCacheContains ( const PlanCache & planCache , <nl> BSONObj projectionObj = fromjson ( projectionStr ) ; <nl> <nl> / / Create canonical query . <nl> - auto statusWithInputQuery = CanonicalQuery : : canonicalize ( ns , queryObj , sortObj , projectionObj ) ; <nl> + auto statusWithInputQuery = CanonicalQuery : : canonicalize ( nss , queryObj , sortObj , projectionObj ) ; <nl> ASSERT_OK ( statusWithInputQuery . getStatus ( ) ) ; <nl> unique_ptr < CanonicalQuery > inputQuery = std : : move ( statusWithInputQuery . getValue ( ) ) ; <nl> <nl> bool planCacheContains ( const PlanCache & planCache , <nl> / / Alternatively , we could add key to PlanCacheEntry but that would be used in one place <nl> / / only . <nl> auto statusWithCurrentQuery = <nl> - CanonicalQuery : : canonicalize ( ns , entry - > query , entry - > sort , entry - > projection ) ; <nl> + CanonicalQuery : : canonicalize ( nss , entry - > query , entry - > sort , entry - > projection ) ; <nl> ASSERT_OK ( statusWithCurrentQuery . getStatus ( ) ) ; <nl> unique_ptr < CanonicalQuery > currentQuery = std : : move ( statusWithCurrentQuery . getValue ( ) ) ; <nl> <nl> TEST ( IndexFilterCommandsTest , ClearFiltersInvalidParameter ) { <nl> OperationContextNoop txn ; <nl> <nl> / / If present , query has to be an object . <nl> - ASSERT_NOT_OK ( ClearFilters : : clear ( & txn , & empty , & planCache , ns , fromjson ( " { query : 1234 } " ) ) ) ; <nl> - / / If present , sort must be an object . <nl> ASSERT_NOT_OK ( <nl> - ClearFilters : : clear ( & txn , & empty , & planCache , ns , fromjson ( " { query : { a : 1 } , sort : 1234 } " ) ) ) ; <nl> + ClearFilters : : clear ( & txn , & empty , & planCache , nss . ns ( ) , fromjson ( " { query : 1234 } " ) ) ) ; <nl> + / / If present , sort must be an object . <nl> + ASSERT_NOT_OK ( ClearFilters : : clear ( <nl> + & txn , & empty , & planCache , nss . ns ( ) , fromjson ( " { query : { a : 1 } , sort : 1234 } " ) ) ) ; <nl> / / If present , projection must be an object . <nl> ASSERT_NOT_OK ( ClearFilters : : clear ( <nl> - & txn , & empty , & planCache , ns , fromjson ( " { query : { a : 1 } , projection : 1234 } " ) ) ) ; <nl> + & txn , & empty , & planCache , nss . ns ( ) , fromjson ( " { query : { a : 1 } , projection : 1234 } " ) ) ) ; <nl> / / Query must pass canonicalization . <nl> ASSERT_NOT_OK ( ClearFilters : : clear ( <nl> - & txn , & empty , & planCache , ns , fromjson ( " { query : { a : { $ no_such_op : 1 } } } " ) ) ) ; <nl> + & txn , & empty , & planCache , nss . ns ( ) , fromjson ( " { query : { a : { $ no_such_op : 1 } } } " ) ) ) ; <nl> / / Sort present without query is an error . <nl> - ASSERT_NOT_OK ( ClearFilters : : clear ( & txn , & empty , & planCache , ns , fromjson ( " { sort : { a : 1 } } " ) ) ) ; <nl> + ASSERT_NOT_OK ( <nl> + ClearFilters : : clear ( & txn , & empty , & planCache , nss . ns ( ) , fromjson ( " { sort : { a : 1 } } " ) ) ) ; <nl> / / Projection present without query is an error . <nl> ASSERT_NOT_OK ( ClearFilters : : clear ( <nl> - & txn , & empty , & planCache , ns , fromjson ( " { projection : { _id : 0 , a : 1 } } " ) ) ) ; <nl> + & txn , & empty , & planCache , nss . ns ( ) , fromjson ( " { projection : { _id : 0 , a : 1 } } " ) ) ) ; <nl> } <nl> <nl> TEST ( IndexFilterCommandsTest , ClearNonexistentHint ) { <nl> TEST ( IndexFilterCommandsTest , ClearNonexistentHint ) { <nl> PlanCache planCache ; <nl> OperationContextNoop txn ; <nl> <nl> - ASSERT_OK ( SetFilter : : set ( <nl> - & txn , & querySettings , & planCache , ns , fromjson ( " { query : { a : 1 } , indexes : [ { a : 1 } ] } " ) ) ) ; <nl> + ASSERT_OK ( SetFilter : : set ( & txn , <nl> + & querySettings , <nl> + & planCache , <nl> + nss . ns ( ) , <nl> + fromjson ( " { query : { a : 1 } , indexes : [ { a : 1 } ] } " ) ) ) ; <nl> vector < BSONObj > filters = getFilters ( querySettings ) ; <nl> ASSERT_EQUALS ( filters . size ( ) , 1U ) ; <nl> <nl> / / Clear nonexistent hint . <nl> / / Command should succeed and cache should remain unchanged . <nl> - ASSERT_OK ( <nl> - ClearFilters : : clear ( & txn , & querySettings , & planCache , ns , fromjson ( " { query : { b : 1 } } " ) ) ) ; <nl> + ASSERT_OK ( ClearFilters : : clear ( <nl> + & txn , & querySettings , & planCache , nss . ns ( ) , fromjson ( " { query : { b : 1 } } " ) ) ) ; <nl> filters = getFilters ( querySettings ) ; <nl> ASSERT_EQUALS ( filters . size ( ) , 1U ) ; <nl> } <nl> TEST ( IndexFilterCommandsTest , SetFilterInvalidParameter ) { <nl> PlanCache planCache ; <nl> OperationContextNoop txn ; <nl> <nl> - ASSERT_NOT_OK ( SetFilter : : set ( & txn , & empty , & planCache , ns , fromjson ( " { } " ) ) ) ; <nl> + ASSERT_NOT_OK ( SetFilter : : set ( & txn , & empty , & planCache , nss . ns ( ) , fromjson ( " { } " ) ) ) ; <nl> / / Missing required query field . <nl> - ASSERT_NOT_OK ( SetFilter : : set ( & txn , & empty , & planCache , ns , fromjson ( " { indexes : [ { a : 1 } ] } " ) ) ) ; <nl> + ASSERT_NOT_OK ( <nl> + SetFilter : : set ( & txn , & empty , & planCache , nss . ns ( ) , fromjson ( " { indexes : [ { a : 1 } ] } " ) ) ) ; <nl> / / Missing required indexes field . <nl> - ASSERT_NOT_OK ( SetFilter : : set ( & txn , & empty , & planCache , ns , fromjson ( " { query : { a : 1 } } " ) ) ) ; <nl> + ASSERT_NOT_OK ( SetFilter : : set ( & txn , & empty , & planCache , nss . ns ( ) , fromjson ( " { query : { a : 1 } } " ) ) ) ; <nl> / / Query has to be an object . <nl> ASSERT_NOT_OK ( SetFilter : : set ( <nl> - & txn , & empty , & planCache , ns , fromjson ( " { query : 1234 , indexes : [ { a : 1 } , { b : 1 } ] } " ) ) ) ; <nl> + & txn , & empty , & planCache , nss . ns ( ) , fromjson ( " { query : 1234 , indexes : [ { a : 1 } , { b : 1 } ] } " ) ) ) ; <nl> / / Indexes field has to be an array . <nl> - ASSERT_NOT_OK ( <nl> - SetFilter : : set ( & txn , & empty , & planCache , ns , fromjson ( " { query : { a : 1 } , indexes : 1234 } " ) ) ) ; <nl> + ASSERT_NOT_OK ( SetFilter : : set ( <nl> + & txn , & empty , & planCache , nss . ns ( ) , fromjson ( " { query : { a : 1 } , indexes : 1234 } " ) ) ) ; <nl> / / Array indexes field cannot empty . <nl> - ASSERT_NOT_OK ( <nl> - SetFilter : : set ( & txn , & empty , & planCache , ns , fromjson ( " { query : { a : 1 } , indexes : [ ] } " ) ) ) ; <nl> + ASSERT_NOT_OK ( SetFilter : : set ( <nl> + & txn , & empty , & planCache , nss . ns ( ) , fromjson ( " { query : { a : 1 } , indexes : [ ] } " ) ) ) ; <nl> / / Elements in indexes have to be objects . <nl> ASSERT_NOT_OK ( SetFilter : : set ( <nl> - & txn , & empty , & planCache , ns , fromjson ( " { query : { a : 1 } , indexes : [ { a : 1 } , 99 ] } " ) ) ) ; <nl> + & txn , & empty , & planCache , nss . ns ( ) , fromjson ( " { query : { a : 1 } , indexes : [ { a : 1 } , 99 ] } " ) ) ) ; <nl> / / Objects in indexes cannot be empty . <nl> ASSERT_NOT_OK ( SetFilter : : set ( <nl> - & txn , & empty , & planCache , ns , fromjson ( " { query : { a : 1 } , indexes : [ { a : 1 } , { } ] } " ) ) ) ; <nl> + & txn , & empty , & planCache , nss . ns ( ) , fromjson ( " { query : { a : 1 } , indexes : [ { a : 1 } , { } ] } " ) ) ) ; <nl> / / If present , sort must be an object . <nl> ASSERT_NOT_OK ( <nl> SetFilter : : set ( & txn , <nl> & empty , <nl> & planCache , <nl> - ns , <nl> + nss . ns ( ) , <nl> fromjson ( " { query : { a : 1 } , sort : 1234 , indexes : [ { a : 1 } , { b : 1 } ] } " ) ) ) ; <nl> / / If present , projection must be an object . <nl> ASSERT_NOT_OK ( <nl> SetFilter : : set ( & txn , <nl> & empty , <nl> & planCache , <nl> - ns , <nl> + nss . ns ( ) , <nl> fromjson ( " { query : { a : 1 } , projection : 1234 , indexes : [ { a : 1 } , { b : 1 } ] } " ) ) ) ; <nl> / / Query must pass canonicalization . <nl> ASSERT_NOT_OK ( <nl> SetFilter : : set ( & txn , <nl> & empty , <nl> & planCache , <nl> - ns , <nl> + nss . ns ( ) , <nl> fromjson ( " { query : { a : { $ no_such_op : 1 } } , indexes : [ { a : 1 } , { b : 1 } ] } " ) ) ) ; <nl> } <nl> <nl> TEST ( IndexFilterCommandsTest , SetAndClearFilters ) { <nl> ASSERT_OK ( SetFilter : : set ( & txn , <nl> & querySettings , <nl> & planCache , <nl> - ns , <nl> + nss . ns ( ) , <nl> fromjson ( <nl> " { query : { a : 1 , b : 1 } , sort : { a : - 1 } , projection : { _id : 0 , a : 1 } , " <nl> " indexes : [ { a : 1 } ] } " ) ) ) ; <nl> TEST ( IndexFilterCommandsTest , SetAndClearFilters ) { <nl> ASSERT_OK ( SetFilter : : set ( & txn , <nl> & querySettings , <nl> & planCache , <nl> - ns , <nl> + nss . ns ( ) , <nl> fromjson ( <nl> " { query : { b : 2 , a : 3 } , sort : { a : - 1 } , projection : { _id : 0 , a : 1 } , " <nl> " indexes : [ { a : 1 , b : 1 } ] } " ) ) ) ; <nl> TEST ( IndexFilterCommandsTest , SetAndClearFilters ) { <nl> ASSERT_EQUALS ( filters . size ( ) , 1U ) ; <nl> <nl> / / Add hint for different query shape . <nl> - ASSERT_OK ( SetFilter : : set ( <nl> - & txn , & querySettings , & planCache , ns , fromjson ( " { query : { b : 1 } , indexes : [ { b : 1 } ] } " ) ) ) ; <nl> + ASSERT_OK ( SetFilter : : set ( & txn , <nl> + & querySettings , <nl> + & planCache , <nl> + nss . ns ( ) , <nl> + fromjson ( " { query : { b : 1 } , indexes : [ { b : 1 } ] } " ) ) ) ; <nl> filters = getFilters ( querySettings ) ; <nl> ASSERT_EQUALS ( filters . size ( ) , 2U ) ; <nl> <nl> / / Add hint for 3rd query shape . This is to prepare for ClearHint tests . <nl> - ASSERT_OK ( SetFilter : : set ( <nl> - & txn , & querySettings , & planCache , ns , fromjson ( " { query : { a : 1 } , indexes : [ { a : 1 } ] } " ) ) ) ; <nl> + ASSERT_OK ( SetFilter : : set ( & txn , <nl> + & querySettings , <nl> + & planCache , <nl> + nss . ns ( ) , <nl> + fromjson ( " { query : { a : 1 } , indexes : [ { a : 1 } ] } " ) ) ) ; <nl> filters = getFilters ( querySettings ) ; <nl> ASSERT_EQUALS ( filters . size ( ) , 3U ) ; <nl> <nl> TEST ( IndexFilterCommandsTest , SetAndClearFilters ) { <nl> addQueryShapeToPlanCache ( & planCache , " { b : 1 } " , " { } " , " { } " ) ; <nl> <nl> / / Clear single hint . <nl> - ASSERT_OK ( <nl> - ClearFilters : : clear ( & txn , & querySettings , & planCache , ns , fromjson ( " { query : { a : 1 } } " ) ) ) ; <nl> + ASSERT_OK ( ClearFilters : : clear ( <nl> + & txn , & querySettings , & planCache , nss . ns ( ) , fromjson ( " { query : { a : 1 } } " ) ) ) ; <nl> filters = getFilters ( querySettings ) ; <nl> ASSERT_EQUALS ( filters . size ( ) , 2U ) ; <nl> <nl> TEST ( IndexFilterCommandsTest , SetAndClearFilters ) { <nl> ASSERT_TRUE ( planCacheContains ( planCache , " { b : 1 } " , " { } " , " { } " ) ) ; <nl> <nl> / / Clear all filters <nl> - ASSERT_OK ( ClearFilters : : clear ( & txn , & querySettings , & planCache , ns , fromjson ( " { } " ) ) ) ; <nl> + ASSERT_OK ( ClearFilters : : clear ( & txn , & querySettings , & planCache , nss . ns ( ) , fromjson ( " { } " ) ) ) ; <nl> filters = getFilters ( querySettings ) ; <nl> ASSERT_TRUE ( filters . empty ( ) ) ; <nl> <nl> mmm a / src / mongo / db / commands / mr . cpp <nl> ppp b / src / mongo / db / commands / mr . cpp <nl> void State : : finalReduce ( CurOp * op , ProgressMeterHolder & pm ) { <nl> const WhereCallbackReal whereCallback ( _txn , nss . db ( ) ) ; <nl> <nl> auto statusWithCQ = <nl> - CanonicalQuery : : canonicalize ( _config . incLong , BSONObj ( ) , sortKey , BSONObj ( ) , whereCallback ) ; <nl> + CanonicalQuery : : canonicalize ( nss , BSONObj ( ) , sortKey , BSONObj ( ) , whereCallback ) ; <nl> verify ( statusWithCQ . isOK ( ) ) ; <nl> std : : unique_ptr < CanonicalQuery > cq = std : : move ( statusWithCQ . getValue ( ) ) ; <nl> <nl> class MapReduceCommand : public Command { <nl> const WhereCallbackReal whereCallback ( txn , nss . db ( ) ) ; <nl> <nl> auto statusWithCQ = CanonicalQuery : : canonicalize ( <nl> - config . ns , config . filter , config . sort , BSONObj ( ) , whereCallback ) ; <nl> + nss , config . filter , config . sort , BSONObj ( ) , whereCallback ) ; <nl> if ( ! statusWithCQ . isOK ( ) ) { <nl> uasserted ( 17238 , " Can ' t canonicalize query " + config . filter . toString ( ) ) ; <nl> return 0 ; <nl> mmm a / src / mongo / db / commands / plan_cache_commands . cpp <nl> ppp b / src / mongo / db / commands / plan_cache_commands . cpp <nl> StatusWith < unique_ptr < CanonicalQuery > > PlanCacheCommand : : canonicalize ( OperationC <nl> const NamespaceString nss ( ns ) ; <nl> const WhereCallbackReal whereCallback ( txn , nss . db ( ) ) ; <nl> <nl> - auto statusWithCQ = CanonicalQuery : : canonicalize ( ns , queryObj , sortObj , projObj , whereCallback ) ; <nl> + auto statusWithCQ = <nl> + CanonicalQuery : : canonicalize ( std : : move ( nss ) , queryObj , sortObj , projObj , whereCallback ) ; <nl> if ( ! statusWithCQ . isOK ( ) ) { <nl> return statusWithCQ . getStatus ( ) ; <nl> } <nl> mmm a / src / mongo / db / commands / plan_cache_commands_test . cpp <nl> ppp b / src / mongo / db / commands / plan_cache_commands_test . cpp <nl> using std : : string ; <nl> using std : : unique_ptr ; <nl> using std : : vector ; <nl> <nl> - static const char * ns = " test . t " ; <nl> + static const NamespaceString nss ( " test . collection " ) ; <nl> <nl> / * * <nl> * Tests for planCacheListQueryShapes <nl> TEST ( PlanCacheCommandsTest , planCacheListQueryShapesEmpty ) { <nl> <nl> TEST ( PlanCacheCommandsTest , planCacheListQueryShapesOneKey ) { <nl> / / Create a canonical query <nl> - auto statusWithCQ = CanonicalQuery : : canonicalize ( ns , fromjson ( " { a : 1 } " ) ) ; <nl> + auto statusWithCQ = CanonicalQuery : : canonicalize ( nss , fromjson ( " { a : 1 } " ) ) ; <nl> ASSERT_OK ( statusWithCQ . getStatus ( ) ) ; <nl> unique_ptr < CanonicalQuery > cq = std : : move ( statusWithCQ . getValue ( ) ) ; <nl> <nl> TEST ( PlanCacheCommandsTest , planCacheListQueryShapesOneKey ) { <nl> <nl> TEST ( PlanCacheCommandsTest , planCacheClearAllShapes ) { <nl> / / Create a canonical query <nl> - auto statusWithCQ = CanonicalQuery : : canonicalize ( ns , fromjson ( " { a : 1 } " ) ) ; <nl> + auto statusWithCQ = CanonicalQuery : : canonicalize ( nss , fromjson ( " { a : 1 } " ) ) ; <nl> ASSERT_OK ( statusWithCQ . getStatus ( ) ) ; <nl> unique_ptr < CanonicalQuery > cq = std : : move ( statusWithCQ . getValue ( ) ) ; <nl> <nl> TEST ( PlanCacheCommandsTest , planCacheClearAllShapes ) { <nl> ASSERT_EQUALS ( getShapes ( planCache ) . size ( ) , 1U ) ; <nl> <nl> / / Clear cache and confirm number of keys afterwards . <nl> - ASSERT_OK ( PlanCacheClear : : clear ( & txn , & planCache , ns , BSONObj ( ) ) ) ; <nl> + ASSERT_OK ( PlanCacheClear : : clear ( & txn , & planCache , nss . ns ( ) , BSONObj ( ) ) ) ; <nl> ASSERT_EQUALS ( getShapes ( planCache ) . size ( ) , 0U ) ; <nl> } <nl> <nl> TEST ( PlanCacheCommandsTest , Canonicalize ) { <nl> OperationContextNoop txn ; <nl> <nl> / / Missing query field <nl> - ASSERT_NOT_OK ( PlanCacheCommand : : canonicalize ( & txn , ns , fromjson ( " { } " ) ) . getStatus ( ) ) ; <nl> + ASSERT_NOT_OK ( PlanCacheCommand : : canonicalize ( & txn , nss . ns ( ) , fromjson ( " { } " ) ) . getStatus ( ) ) ; <nl> / / Query needs to be an object <nl> - ASSERT_NOT_OK ( PlanCacheCommand : : canonicalize ( & txn , ns , fromjson ( " { query : 1 } " ) ) . getStatus ( ) ) ; <nl> - / / Sort needs to be an object <nl> ASSERT_NOT_OK ( <nl> - PlanCacheCommand : : canonicalize ( & txn , ns , fromjson ( " { query : { } , sort : 1 } " ) ) . getStatus ( ) ) ; <nl> - / / Bad query ( invalid sort order ) <nl> - ASSERT_NOT_OK ( PlanCacheCommand : : canonicalize ( & txn , ns , fromjson ( " { query : { } , sort : { a : 0 } } " ) ) <nl> + PlanCacheCommand : : canonicalize ( & txn , nss . ns ( ) , fromjson ( " { query : 1 } " ) ) . getStatus ( ) ) ; <nl> + / / Sort needs to be an object <nl> + ASSERT_NOT_OK ( PlanCacheCommand : : canonicalize ( & txn , nss . ns ( ) , fromjson ( " { query : { } , sort : 1 } " ) ) <nl> . getStatus ( ) ) ; <nl> + / / Bad query ( invalid sort order ) <nl> + ASSERT_NOT_OK ( PlanCacheCommand : : canonicalize ( <nl> + & txn , nss . ns ( ) , fromjson ( " { query : { } , sort : { a : 0 } } " ) ) . getStatus ( ) ) ; <nl> <nl> / / Valid parameters <nl> - auto statusWithCQ = PlanCacheCommand : : canonicalize ( & txn , ns , fromjson ( " { query : { a : 1 , b : 1 } } " ) ) ; <nl> + auto statusWithCQ = <nl> + PlanCacheCommand : : canonicalize ( & txn , nss . ns ( ) , fromjson ( " { query : { a : 1 , b : 1 } } " ) ) ; <nl> ASSERT_OK ( statusWithCQ . getStatus ( ) ) ; <nl> unique_ptr < CanonicalQuery > query = std : : move ( statusWithCQ . getValue ( ) ) ; <nl> <nl> <nl> / / Equivalent query should generate same key . <nl> - statusWithCQ = PlanCacheCommand : : canonicalize ( & txn , ns , fromjson ( " { query : { b : 1 , a : 1 } } " ) ) ; <nl> + statusWithCQ = <nl> + PlanCacheCommand : : canonicalize ( & txn , nss . ns ( ) , fromjson ( " { query : { b : 1 , a : 1 } } " ) ) ; <nl> ASSERT_OK ( statusWithCQ . getStatus ( ) ) ; <nl> unique_ptr < CanonicalQuery > equivQuery = std : : move ( statusWithCQ . getValue ( ) ) ; <nl> ASSERT_EQUALS ( planCache . computeKey ( * query ) , planCache . computeKey ( * equivQuery ) ) ; <nl> <nl> / / Sort query should generate different key from unsorted query . <nl> statusWithCQ = PlanCacheCommand : : canonicalize ( <nl> - & txn , ns , fromjson ( " { query : { a : 1 , b : 1 } , sort : { a : 1 , b : 1 } } " ) ) ; <nl> + & txn , nss . ns ( ) , fromjson ( " { query : { a : 1 , b : 1 } , sort : { a : 1 , b : 1 } } " ) ) ; <nl> ASSERT_OK ( statusWithCQ . getStatus ( ) ) ; <nl> unique_ptr < CanonicalQuery > sortQuery1 = std : : move ( statusWithCQ . getValue ( ) ) ; <nl> ASSERT_NOT_EQUALS ( planCache . computeKey ( * query ) , planCache . computeKey ( * sortQuery1 ) ) ; <nl> <nl> / / Confirm sort arguments are properly delimited ( SERVER - 17158 ) <nl> - statusWithCQ = <nl> - PlanCacheCommand : : canonicalize ( & txn , ns , fromjson ( " { query : { a : 1 , b : 1 } , sort : { aab : 1 } } " ) ) ; <nl> + statusWithCQ = PlanCacheCommand : : canonicalize ( <nl> + & txn , nss . ns ( ) , fromjson ( " { query : { a : 1 , b : 1 } , sort : { aab : 1 } } " ) ) ; <nl> ASSERT_OK ( statusWithCQ . getStatus ( ) ) ; <nl> unique_ptr < CanonicalQuery > sortQuery2 = std : : move ( statusWithCQ . getValue ( ) ) ; <nl> ASSERT_NOT_EQUALS ( planCache . computeKey ( * sortQuery1 ) , planCache . computeKey ( * sortQuery2 ) ) ; <nl> <nl> / / Changing order and / or value of predicates should not change key <nl> statusWithCQ = PlanCacheCommand : : canonicalize ( <nl> - & txn , ns , fromjson ( " { query : { b : 3 , a : 3 } , sort : { a : 1 , b : 1 } } " ) ) ; <nl> + & txn , nss . ns ( ) , fromjson ( " { query : { b : 3 , a : 3 } , sort : { a : 1 , b : 1 } } " ) ) ; <nl> ASSERT_OK ( statusWithCQ . getStatus ( ) ) ; <nl> unique_ptr < CanonicalQuery > sortQuery3 = std : : move ( statusWithCQ . getValue ( ) ) ; <nl> ASSERT_EQUALS ( planCache . computeKey ( * sortQuery1 ) , planCache . computeKey ( * sortQuery3 ) ) ; <nl> <nl> / / Projected query should generate different key from unprojected query . <nl> statusWithCQ = PlanCacheCommand : : canonicalize ( <nl> - & txn , ns , fromjson ( " { query : { a : 1 , b : 1 } , projection : { _id : 0 , a : 1 } } " ) ) ; <nl> + & txn , nss . ns ( ) , fromjson ( " { query : { a : 1 , b : 1 } , projection : { _id : 0 , a : 1 } } " ) ) ; <nl> ASSERT_OK ( statusWithCQ . getStatus ( ) ) ; <nl> unique_ptr < CanonicalQuery > projectionQuery = std : : move ( statusWithCQ . getValue ( ) ) ; <nl> ASSERT_NOT_EQUALS ( planCache . computeKey ( * query ) , planCache . computeKey ( * projectionQuery ) ) ; <nl> TEST ( PlanCacheCommandsTest , planCacheClearInvalidParameter ) { <nl> OperationContextNoop txn ; <nl> <nl> / / Query field type must be BSON object . <nl> - ASSERT_NOT_OK ( PlanCacheClear : : clear ( & txn , & planCache , ns , fromjson ( " { query : 12345 } " ) ) ) ; <nl> + ASSERT_NOT_OK ( PlanCacheClear : : clear ( & txn , & planCache , nss . ns ( ) , fromjson ( " { query : 12345 } " ) ) ) ; <nl> ASSERT_NOT_OK ( <nl> - PlanCacheClear : : clear ( & txn , & planCache , ns , fromjson ( " { query : / keyisnotregex / } " ) ) ) ; <nl> + PlanCacheClear : : clear ( & txn , & planCache , nss . ns ( ) , fromjson ( " { query : / keyisnotregex / } " ) ) ) ; <nl> / / Query must pass canonicalization . <nl> - ASSERT_NOT_OK ( <nl> - PlanCacheClear : : clear ( & txn , & planCache , ns , fromjson ( " { query : { a : { $ no_such_op : 1 } } } " ) ) ) ; <nl> + ASSERT_NOT_OK ( PlanCacheClear : : clear ( <nl> + & txn , & planCache , nss . ns ( ) , fromjson ( " { query : { a : { $ no_such_op : 1 } } } " ) ) ) ; <nl> / / Sort present without query is an error . <nl> - ASSERT_NOT_OK ( PlanCacheClear : : clear ( & txn , & planCache , ns , fromjson ( " { sort : { a : 1 } } " ) ) ) ; <nl> + ASSERT_NOT_OK ( PlanCacheClear : : clear ( & txn , & planCache , nss . ns ( ) , fromjson ( " { sort : { a : 1 } } " ) ) ) ; <nl> / / Projection present without query is an error . <nl> - ASSERT_NOT_OK ( <nl> - PlanCacheClear : : clear ( & txn , & planCache , ns , fromjson ( " { projection : { _id : 0 , a : 1 } } " ) ) ) ; <nl> + ASSERT_NOT_OK ( PlanCacheClear : : clear ( <nl> + & txn , & planCache , nss . ns ( ) , fromjson ( " { projection : { _id : 0 , a : 1 } } " ) ) ) ; <nl> } <nl> <nl> TEST ( PlanCacheCommandsTest , planCacheClearUnknownKey ) { <nl> PlanCache planCache ; <nl> OperationContextNoop txn ; <nl> <nl> - ASSERT_OK ( PlanCacheClear : : clear ( & txn , & planCache , ns , fromjson ( " { query : { a : 1 } } " ) ) ) ; <nl> + ASSERT_OK ( PlanCacheClear : : clear ( & txn , & planCache , nss . ns ( ) , fromjson ( " { query : { a : 1 } } " ) ) ) ; <nl> } <nl> <nl> TEST ( PlanCacheCommandsTest , planCacheClearOneKey ) { <nl> / / Create 2 canonical queries . <nl> - auto statusWithCQA = CanonicalQuery : : canonicalize ( ns , fromjson ( " { a : 1 } " ) ) ; <nl> + auto statusWithCQA = CanonicalQuery : : canonicalize ( nss , fromjson ( " { a : 1 } " ) ) ; <nl> ASSERT_OK ( statusWithCQA . getStatus ( ) ) ; <nl> unique_ptr < CanonicalQuery > cqA = std : : move ( statusWithCQA . getValue ( ) ) ; <nl> - auto statusWithCQB = CanonicalQuery : : canonicalize ( ns , fromjson ( " { b : 1 } " ) ) ; <nl> + auto statusWithCQB = CanonicalQuery : : canonicalize ( nss , fromjson ( " { b : 1 } " ) ) ; <nl> ASSERT_OK ( statusWithCQB . getStatus ( ) ) ; <nl> unique_ptr < CanonicalQuery > cqB = std : : move ( statusWithCQB . getValue ( ) ) ; <nl> <nl> TEST ( PlanCacheCommandsTest , planCacheClearOneKey ) { <nl> BSONObjBuilder bob ; <nl> OperationContextNoop txn ; <nl> <nl> - ASSERT_OK ( PlanCacheClear : : clear ( & txn , & planCache , ns , BSON ( " query " < < cqB - > getQueryObj ( ) ) ) ) ; <nl> + ASSERT_OK ( <nl> + PlanCacheClear : : clear ( & txn , & planCache , nss . ns ( ) , BSON ( " query " < < cqB - > getQueryObj ( ) ) ) ) ; <nl> vector < BSONObj > shapesAfter = getShapes ( planCache ) ; <nl> ASSERT_EQUALS ( shapesAfter . size ( ) , 1U ) ; <nl> ASSERT_EQUALS ( shapesAfter [ 0 ] , shapeA ) ; <nl> vector < BSONObj > getPlans ( const PlanCache & planCache , <nl> <nl> BSONObjBuilder bob ; <nl> BSONObj cmdObj = BSON ( " query " < < query < < " sort " < < sort < < " projection " < < projection ) ; <nl> - ASSERT_OK ( PlanCacheListPlans : : list ( & txn , planCache , ns , cmdObj , & bob ) ) ; <nl> + ASSERT_OK ( PlanCacheListPlans : : list ( & txn , planCache , nss . ns ( ) , cmdObj , & bob ) ) ; <nl> BSONObj resultObj = bob . obj ( ) ; <nl> BSONElement plansElt = resultObj . getField ( " plans " ) ; <nl> ASSERT_EQUALS ( plansElt . type ( ) , mongo : : Array ) ; <nl> TEST ( PlanCacheCommandsTest , planCacheListPlansInvalidParameter ) { <nl> OperationContextNoop txn ; <nl> <nl> / / Missing query field is not ok . <nl> - ASSERT_NOT_OK ( PlanCacheListPlans : : list ( & txn , planCache , ns , BSONObj ( ) , & ignored ) ) ; <nl> + ASSERT_NOT_OK ( PlanCacheListPlans : : list ( & txn , planCache , nss . ns ( ) , BSONObj ( ) , & ignored ) ) ; <nl> / / Query field type must be BSON object . <nl> ASSERT_NOT_OK ( <nl> - PlanCacheListPlans : : list ( & txn , planCache , ns , fromjson ( " { query : 12345 } " ) , & ignored ) ) ; <nl> + PlanCacheListPlans : : list ( & txn , planCache , nss . ns ( ) , fromjson ( " { query : 12345 } " ) , & ignored ) ) ; <nl> ASSERT_NOT_OK ( PlanCacheListPlans : : list ( <nl> - & txn , planCache , ns , fromjson ( " { query : / keyisnotregex / } " ) , & ignored ) ) ; <nl> + & txn , planCache , nss . ns ( ) , fromjson ( " { query : / keyisnotregex / } " ) , & ignored ) ) ; <nl> } <nl> <nl> TEST ( PlanCacheCommandsTest , planCacheListPlansUnknownKey ) { <nl> TEST ( PlanCacheCommandsTest , planCacheListPlansUnknownKey ) { <nl> OperationContextNoop txn ; <nl> <nl> BSONObjBuilder ignored ; <nl> - ASSERT_OK ( PlanCacheListPlans : : list ( & txn , planCache , ns , fromjson ( " { query : { a : 1 } } " ) , & ignored ) ) ; <nl> + ASSERT_OK ( <nl> + PlanCacheListPlans : : list ( & txn , planCache , nss . ns ( ) , fromjson ( " { query : { a : 1 } } " ) , & ignored ) ) ; <nl> } <nl> <nl> TEST ( PlanCacheCommandsTest , planCacheListPlansOnlyOneSolutionTrue ) { <nl> / / Create a canonical query <nl> - auto statusWithCQ = CanonicalQuery : : canonicalize ( ns , fromjson ( " { a : 1 } " ) ) ; <nl> + auto statusWithCQ = CanonicalQuery : : canonicalize ( nss , fromjson ( " { a : 1 } " ) ) ; <nl> ASSERT_OK ( statusWithCQ . getStatus ( ) ) ; <nl> unique_ptr < CanonicalQuery > cq = std : : move ( statusWithCQ . getValue ( ) ) ; <nl> <nl> TEST ( PlanCacheCommandsTest , planCacheListPlansOnlyOneSolutionTrue ) { <nl> <nl> TEST ( PlanCacheCommandsTest , planCacheListPlansOnlyOneSolutionFalse ) { <nl> / / Create a canonical query <nl> - auto statusWithCQ = CanonicalQuery : : canonicalize ( ns , fromjson ( " { a : 1 } " ) ) ; <nl> + auto statusWithCQ = CanonicalQuery : : canonicalize ( nss , fromjson ( " { a : 1 } " ) ) ; <nl> ASSERT_OK ( statusWithCQ . getStatus ( ) ) ; <nl> unique_ptr < CanonicalQuery > cq = std : : move ( statusWithCQ . getValue ( ) ) ; <nl> <nl> mmm a / src / mongo / db / dbcommands . cpp <nl> ppp b / src / mongo / db / dbcommands . cpp <nl> class CmdFileMD5 : public Command { <nl> BSONObj sort = BSON ( " files_id " < < 1 < < " n " < < 1 ) ; <nl> <nl> MONGO_WRITE_CONFLICT_RETRY_LOOP_BEGIN { <nl> - auto statusWithCQ = CanonicalQuery : : canonicalize ( ns , query , sort , BSONObj ( ) ) ; <nl> + auto statusWithCQ = <nl> + CanonicalQuery : : canonicalize ( NamespaceString ( ns ) , query , sort , BSONObj ( ) ) ; <nl> if ( ! statusWithCQ . isOK ( ) ) { <nl> uasserted ( 17240 , " Can ' t canonicalize query " + query . toString ( ) ) ; <nl> return 0 ; <nl> mmm a / src / mongo / db / dbhelpers . cpp <nl> ppp b / src / mongo / db / dbhelpers . cpp <nl> RecordId Helpers : : findOne ( OperationContext * txn , <nl> <nl> const WhereCallbackReal whereCallback ( txn , collection - > ns ( ) . db ( ) ) ; <nl> <nl> - auto statusWithCQ = CanonicalQuery : : canonicalize ( collection - > ns ( ) . ns ( ) , query , whereCallback ) ; <nl> + auto statusWithCQ = CanonicalQuery : : canonicalize ( collection - > ns ( ) , query , whereCallback ) ; <nl> massert ( 17244 , " Could not canonicalize " + query . toString ( ) , statusWithCQ . isOK ( ) ) ; <nl> unique_ptr < CanonicalQuery > cq = std : : move ( statusWithCQ . getValue ( ) ) ; <nl> <nl> mmm a / src / mongo / db / exec / sort . cpp <nl> ppp b / src / mongo / db / exec / sort . cpp <nl> void SortStageKeyGenerator : : getBoundsForSort ( const BSONObj & queryObj , const BSON <nl> params . indices . push_back ( sortOrder ) ; <nl> <nl> auto statusWithQueryForSort = <nl> - CanonicalQuery : : canonicalize ( " fake_ns " , queryObj , WhereCallbackNoop ( ) ) ; <nl> + CanonicalQuery : : canonicalize ( NamespaceString ( " fake . ns " ) , queryObj , WhereCallbackNoop ( ) ) ; <nl> verify ( statusWithQueryForSort . isOK ( ) ) ; <nl> unique_ptr < CanonicalQuery > queryForSort = std : : move ( statusWithQueryForSort . getValue ( ) ) ; <nl> <nl> mmm a / src / mongo / db / ops / parsed_delete . cpp <nl> ppp b / src / mongo / db / ops / parsed_delete . cpp <nl> Status ParsedDelete : : parseQueryToCQ ( ) { <nl> / / The projection needs to be applied after the delete operation , so we specify an empty <nl> / / BSONObj as the projection during canonicalization . <nl> const BSONObj emptyObj ; <nl> - auto statusWithCQ = CanonicalQuery : : canonicalize ( _request - > getNamespaceString ( ) . ns ( ) , <nl> + auto statusWithCQ = CanonicalQuery : : canonicalize ( _request - > getNamespaceString ( ) , <nl> _request - > getQuery ( ) , <nl> _request - > getSort ( ) , <nl> emptyObj , / / projection <nl> mmm a / src / mongo / db / ops / parsed_update . cpp <nl> ppp b / src / mongo / db / ops / parsed_update . cpp <nl> Status ParsedUpdate : : parseQueryToCQ ( ) { <nl> / / The projection needs to be applied after the update operation , so we specify an empty <nl> / / BSONObj as the projection during canonicalization . <nl> const BSONObj emptyObj ; <nl> - auto statusWithCQ = CanonicalQuery : : canonicalize ( _request - > getNamespaceString ( ) . ns ( ) , <nl> + auto statusWithCQ = CanonicalQuery : : canonicalize ( _request - > getNamespaceString ( ) , <nl> _request - > getQuery ( ) , <nl> _request - > getSort ( ) , <nl> emptyObj , / / projection <nl> mmm a / src / mongo / db / ops / update_driver . cpp <nl> ppp b / src / mongo / db / ops / update_driver . cpp <nl> Status UpdateDriver : : populateDocumentWithQueryFields ( const BSONObj & query , <nl> / / We canonicalize the query to collapse $ and / $ or , and the first arg ( ns ) is not needed <nl> / / Also , because this is for the upsert case , where we insert a new document if one was <nl> / / not found , the $ where clause does not make sense , hence empty WhereCallback . <nl> - auto statusWithCQ = CanonicalQuery : : canonicalize ( " " , query , WhereCallbackNoop ( ) ) ; <nl> + auto statusWithCQ = <nl> + CanonicalQuery : : canonicalize ( NamespaceString ( " " ) , query , WhereCallbackNoop ( ) ) ; <nl> if ( ! statusWithCQ . isOK ( ) ) { <nl> return statusWithCQ . getStatus ( ) ; <nl> } <nl> mmm a / src / mongo / db / pipeline / pipeline_d . cpp <nl> ppp b / src / mongo / db / pipeline / pipeline_d . cpp <nl> shared_ptr < PlanExecutor > PipelineD : : prepareCursorSource ( <nl> <nl> if ( sortStage ) { <nl> auto statusWithCQ = CanonicalQuery : : canonicalize ( <nl> - pExpCtx - > ns . ns ( ) , queryObj , sortObj , projectionForQuery , whereCallback ) ; <nl> + pExpCtx - > ns , queryObj , sortObj , projectionForQuery , whereCallback ) ; <nl> <nl> if ( statusWithCQ . isOK ( ) ) { <nl> auto statusWithPlanExecutor = getExecutor ( txn , <nl> shared_ptr < PlanExecutor > PipelineD : : prepareCursorSource ( <nl> if ( ! exec . get ( ) ) { <nl> const BSONObj noSort ; <nl> auto statusWithCQ = CanonicalQuery : : canonicalize ( <nl> - pExpCtx - > ns . ns ( ) , queryObj , noSort , projectionForQuery , whereCallback ) ; <nl> + pExpCtx - > ns , queryObj , noSort , projectionForQuery , whereCallback ) ; <nl> uassertStatusOK ( statusWithCQ . getStatus ( ) ) ; <nl> <nl> exec = uassertStatusOK ( getExecutor ( txn , <nl> mmm a / src / mongo / db / query / canonical_query . cpp <nl> ppp b / src / mongo / db / query / canonical_query . cpp <nl> bool matchExpressionLessThan ( const MatchExpression * lhs , const MatchExpression * <nl> <nl> / / static <nl> StatusWith < std : : unique_ptr < CanonicalQuery > > CanonicalQuery : : canonicalize ( <nl> - const std : : string & ns , <nl> + NamespaceString nss , <nl> const BSONObj & query , <nl> const MatchExpressionParser : : WhereCallback & whereCallback ) { <nl> const BSONObj emptyObj ; <nl> - return CanonicalQuery : : canonicalize ( ns , query , emptyObj , emptyObj , 0 , 0 , whereCallback ) ; <nl> + return CanonicalQuery : : canonicalize ( <nl> + std : : move ( nss ) , query , emptyObj , emptyObj , 0 , 0 , whereCallback ) ; <nl> } <nl> <nl> / / static <nl> StatusWith < std : : unique_ptr < CanonicalQuery > > CanonicalQuery : : canonicalize ( <nl> - const std : : string & ns , <nl> + NamespaceString nss , <nl> const BSONObj & query , <nl> bool explain , <nl> const MatchExpressionParser : : WhereCallback & whereCallback ) { <nl> const BSONObj emptyObj ; <nl> - return CanonicalQuery : : canonicalize ( ns , <nl> + return CanonicalQuery : : canonicalize ( std : : move ( nss ) , <nl> query , <nl> emptyObj , / / sort <nl> emptyObj , / / projection <nl> StatusWith < std : : unique_ptr < CanonicalQuery > > CanonicalQuery : : canonicalize ( <nl> <nl> / / static <nl> StatusWith < std : : unique_ptr < CanonicalQuery > > CanonicalQuery : : canonicalize ( <nl> - const std : : string & ns , <nl> + NamespaceString nss , <nl> const BSONObj & query , <nl> long long skip , <nl> long long limit , <nl> const MatchExpressionParser : : WhereCallback & whereCallback ) { <nl> const BSONObj emptyObj ; <nl> - return CanonicalQuery : : canonicalize ( ns , query , emptyObj , emptyObj , skip , limit , whereCallback ) ; <nl> + return CanonicalQuery : : canonicalize ( <nl> + std : : move ( nss ) , query , emptyObj , emptyObj , skip , limit , whereCallback ) ; <nl> } <nl> <nl> / / static <nl> StatusWith < std : : unique_ptr < CanonicalQuery > > CanonicalQuery : : canonicalize ( <nl> - const std : : string & ns , <nl> + NamespaceString nss , <nl> const BSONObj & query , <nl> const BSONObj & sort , <nl> const BSONObj & proj , <nl> const MatchExpressionParser : : WhereCallback & whereCallback ) { <nl> - return CanonicalQuery : : canonicalize ( ns , query , sort , proj , 0 , 0 , whereCallback ) ; <nl> + return CanonicalQuery : : canonicalize ( std : : move ( nss ) , query , sort , proj , 0 , 0 , whereCallback ) ; <nl> } <nl> <nl> / / static <nl> StatusWith < std : : unique_ptr < CanonicalQuery > > CanonicalQuery : : canonicalize ( <nl> - const std : : string & ns , <nl> + NamespaceString nss , <nl> const BSONObj & query , <nl> const BSONObj & sort , <nl> const BSONObj & proj , <nl> StatusWith < std : : unique_ptr < CanonicalQuery > > CanonicalQuery : : canonicalize ( <nl> const MatchExpressionParser : : WhereCallback & whereCallback ) { <nl> const BSONObj emptyObj ; <nl> return CanonicalQuery : : canonicalize ( <nl> - ns , query , sort , proj , skip , limit , emptyObj , whereCallback ) ; <nl> + std : : move ( nss ) , query , sort , proj , skip , limit , emptyObj , whereCallback ) ; <nl> } <nl> <nl> / / static <nl> StatusWith < std : : unique_ptr < CanonicalQuery > > CanonicalQuery : : canonicalize ( <nl> - const std : : string & ns , <nl> + NamespaceString nss , <nl> const BSONObj & query , <nl> const BSONObj & sort , <nl> const BSONObj & proj , <nl> StatusWith < std : : unique_ptr < CanonicalQuery > > CanonicalQuery : : canonicalize ( <nl> const BSONObj & hint , <nl> const MatchExpressionParser : : WhereCallback & whereCallback ) { <nl> const BSONObj emptyObj ; <nl> - return CanonicalQuery : : canonicalize ( ns , <nl> + return CanonicalQuery : : canonicalize ( std : : move ( nss ) , <nl> query , <nl> sort , <nl> proj , <nl> StatusWith < std : : unique_ptr < CanonicalQuery > > CanonicalQuery : : canonicalize ( <nl> <nl> / / static <nl> StatusWith < std : : unique_ptr < CanonicalQuery > > CanonicalQuery : : canonicalize ( <nl> - const std : : string & ns , <nl> + NamespaceString nss , <nl> const BSONObj & query , <nl> const BSONObj & sort , <nl> const BSONObj & proj , <nl> StatusWith < std : : unique_ptr < CanonicalQuery > > CanonicalQuery : : canonicalize ( <nl> / / Pass empty sort and projection . <nl> BSONObj emptyObj ; <nl> <nl> - auto lpqStatus = LiteParsedQuery : : makeAsOpQuery ( NamespaceString ( ns ) , <nl> - skip , <nl> - limit , <nl> - 0 , <nl> - query , <nl> - proj , <nl> - sort , <nl> - hint , <nl> - minObj , <nl> - maxObj , <nl> - snapshot , <nl> - explain ) ; <nl> + auto lpqStatus = LiteParsedQuery : : makeAsOpQuery ( <nl> + std : : move ( nss ) , skip , limit , 0 , query , proj , sort , hint , minObj , maxObj , snapshot , explain ) ; <nl> if ( ! lpqStatus . isOK ( ) ) { <nl> return lpqStatus . getStatus ( ) ; <nl> } <nl> mmm a / src / mongo / db / query / canonical_query . h <nl> ppp b / src / mongo / db / query / canonical_query . h <nl> class CanonicalQuery { <nl> MatchExpressionParser : : WhereCallback ( ) ) ; <nl> <nl> static StatusWith < std : : unique_ptr < CanonicalQuery > > canonicalize ( <nl> - const std : : string & ns , <nl> + NamespaceString nss , <nl> const BSONObj & query , <nl> const MatchExpressionParser : : WhereCallback & whereCallback = <nl> MatchExpressionParser : : WhereCallback ( ) ) ; <nl> <nl> static StatusWith < std : : unique_ptr < CanonicalQuery > > canonicalize ( <nl> - const std : : string & ns , <nl> + NamespaceString nss , <nl> const BSONObj & query , <nl> bool explain , <nl> const MatchExpressionParser : : WhereCallback & whereCallback = <nl> MatchExpressionParser : : WhereCallback ( ) ) ; <nl> <nl> static StatusWith < std : : unique_ptr < CanonicalQuery > > canonicalize ( <nl> - const std : : string & ns , <nl> + NamespaceString nss , <nl> const BSONObj & query , <nl> long long skip , <nl> long long limit , <nl> class CanonicalQuery { <nl> MatchExpressionParser : : WhereCallback ( ) ) ; <nl> <nl> static StatusWith < std : : unique_ptr < CanonicalQuery > > canonicalize ( <nl> - const std : : string & ns , <nl> + NamespaceString nss , <nl> const BSONObj & query , <nl> const BSONObj & sort , <nl> const BSONObj & proj , <nl> class CanonicalQuery { <nl> MatchExpressionParser : : WhereCallback ( ) ) ; <nl> <nl> static StatusWith < std : : unique_ptr < CanonicalQuery > > canonicalize ( <nl> - const std : : string & ns , <nl> + NamespaceString nss , <nl> const BSONObj & query , <nl> const BSONObj & sort , <nl> const BSONObj & proj , <nl> class CanonicalQuery { <nl> MatchExpressionParser : : WhereCallback ( ) ) ; <nl> <nl> static StatusWith < std : : unique_ptr < CanonicalQuery > > canonicalize ( <nl> - const std : : string & ns , <nl> + NamespaceString nss , <nl> const BSONObj & query , <nl> const BSONObj & sort , <nl> const BSONObj & proj , <nl> class CanonicalQuery { <nl> MatchExpressionParser : : WhereCallback ( ) ) ; <nl> <nl> static StatusWith < std : : unique_ptr < CanonicalQuery > > canonicalize ( <nl> - const std : : string & ns , <nl> + NamespaceString nss , <nl> const BSONObj & query , <nl> const BSONObj & sort , <nl> const BSONObj & proj , <nl> mmm a / src / mongo / db / query / canonical_query_test . cpp <nl> ppp b / src / mongo / db / query / canonical_query_test . cpp <nl> TEST ( CanonicalQueryTest , SortTreeNumChildrenComparison ) { <nl> * / <nl> unique_ptr < CanonicalQuery > canonicalize ( const char * queryStr ) { <nl> BSONObj queryObj = fromjson ( queryStr ) ; <nl> - auto statusWithCQ = CanonicalQuery : : canonicalize ( nss . ns ( ) , queryObj ) ; <nl> + auto statusWithCQ = CanonicalQuery : : canonicalize ( nss , queryObj ) ; <nl> ASSERT_OK ( statusWithCQ . getStatus ( ) ) ; <nl> return std : : move ( statusWithCQ . getValue ( ) ) ; <nl> } <nl> std : : unique_ptr < CanonicalQuery > canonicalize ( const char * queryStr , <nl> BSONObj queryObj = fromjson ( queryStr ) ; <nl> BSONObj sortObj = fromjson ( sortStr ) ; <nl> BSONObj projObj = fromjson ( projStr ) ; <nl> - auto statusWithCQ = CanonicalQuery : : canonicalize ( nss . ns ( ) , queryObj , sortObj , projObj ) ; <nl> + auto statusWithCQ = CanonicalQuery : : canonicalize ( nss , queryObj , sortObj , projObj ) ; <nl> ASSERT_OK ( statusWithCQ . getStatus ( ) ) ; <nl> return std : : move ( statusWithCQ . getValue ( ) ) ; <nl> } <nl> mmm a / src / mongo / db / query / get_executor . cpp <nl> ppp b / src / mongo / db / query / get_executor . cpp <nl> StatusWith < unique_ptr < PlanExecutor > > getExecutor ( OperationContext * txn , <nl> ! collection - > getIndexCatalog ( ) - > findIdIndex ( txn ) ) { <nl> const WhereCallbackReal whereCallback ( txn , collection - > ns ( ) . db ( ) ) ; <nl> auto statusWithCQ = <nl> - CanonicalQuery : : canonicalize ( collection - > ns ( ) . ns ( ) , unparsedQuery , whereCallback ) ; <nl> + CanonicalQuery : : canonicalize ( collection - > ns ( ) , unparsedQuery , whereCallback ) ; <nl> if ( ! statusWithCQ . isOK ( ) ) { <nl> return statusWithCQ . getStatus ( ) ; <nl> } <nl> StatusWith < unique_ptr < PlanExecutor > > getExecutorGroup ( OperationContext * txn , <nl> const WhereCallbackReal whereCallback ( txn , nss . db ( ) ) ; <nl> <nl> auto statusWithCQ = <nl> - CanonicalQuery : : canonicalize ( request . ns , request . query , request . explain , whereCallback ) ; <nl> + CanonicalQuery : : canonicalize ( nss , request . query , request . explain , whereCallback ) ; <nl> if ( ! statusWithCQ . isOK ( ) ) { <nl> return statusWithCQ . getStatus ( ) ; <nl> } <nl> StatusWith < unique_ptr < PlanExecutor > > getExecutorCount ( OperationContext * txn , <nl> / / If query or hint is not empty , canonicalize the query before working with collection . <nl> typedef MatchExpressionParser : : WhereCallback WhereCallback ; <nl> auto statusWithCQ = CanonicalQuery : : canonicalize ( <nl> - request . getNs ( ) . ns ( ) , <nl> + request . getNs ( ) , <nl> request . getQuery ( ) , <nl> BSONObj ( ) , / / sort <nl> BSONObj ( ) , / / projection <nl> StatusWith < unique_ptr < PlanExecutor > > getExecutorDistinct ( OperationContext * txn , <nl> / / If there are no suitable indices for the distinct hack bail out now into regular planning <nl> / / with no projection . <nl> if ( plannerParams . indices . empty ( ) ) { <nl> - auto statusWithCQ = <nl> - CanonicalQuery : : canonicalize ( collection - > ns ( ) . ns ( ) , query , whereCallback ) ; <nl> + auto statusWithCQ = CanonicalQuery : : canonicalize ( collection - > ns ( ) , query , whereCallback ) ; <nl> if ( ! statusWithCQ . isOK ( ) ) { <nl> return statusWithCQ . getStatus ( ) ; <nl> } <nl> StatusWith < unique_ptr < PlanExecutor > > getExecutorDistinct ( OperationContext * txn , <nl> BSONObj projection = getDistinctProjection ( field ) ; <nl> <nl> / / Apply a projection of the key . Empty BSONObj ( ) is for the sort . <nl> - auto statusWithCQ = CanonicalQuery : : canonicalize ( <nl> - collection - > ns ( ) . ns ( ) , query , BSONObj ( ) , projection , whereCallback ) ; <nl> + auto statusWithCQ = <nl> + CanonicalQuery : : canonicalize ( collection - > ns ( ) , query , BSONObj ( ) , projection , whereCallback ) ; <nl> if ( ! statusWithCQ . isOK ( ) ) { <nl> return statusWithCQ . getStatus ( ) ; <nl> } <nl> StatusWith < unique_ptr < PlanExecutor > > getExecutorDistinct ( OperationContext * txn , <nl> } <nl> <nl> / / We drop the projection from the ' cq ' . Unfortunately this is not trivial . <nl> - statusWithCQ = CanonicalQuery : : canonicalize ( collection - > ns ( ) . ns ( ) , query , whereCallback ) ; <nl> + statusWithCQ = CanonicalQuery : : canonicalize ( collection - > ns ( ) , query , whereCallback ) ; <nl> if ( ! statusWithCQ . isOK ( ) ) { <nl> return statusWithCQ . getStatus ( ) ; <nl> } <nl> mmm a / src / mongo / db / query / get_executor_test . cpp <nl> ppp b / src / mongo / db / query / get_executor_test . cpp <nl> namespace { <nl> <nl> using std : : unique_ptr ; <nl> <nl> - static const char * ns = " somebogusns " ; <nl> + static const NamespaceString nss ( " test . collection " ) ; <nl> <nl> / * * <nl> * Utility functions to create a CanonicalQuery <nl> unique_ptr < CanonicalQuery > canonicalize ( const char * queryStr , <nl> BSONObj queryObj = fromjson ( queryStr ) ; <nl> BSONObj sortObj = fromjson ( sortStr ) ; <nl> BSONObj projObj = fromjson ( projStr ) ; <nl> - auto statusWithCQ = CanonicalQuery : : canonicalize ( ns , queryObj , sortObj , projObj ) ; <nl> + auto statusWithCQ = CanonicalQuery : : canonicalize ( nss , queryObj , sortObj , projObj ) ; <nl> ASSERT_OK ( statusWithCQ . getStatus ( ) ) ; <nl> return std : : move ( statusWithCQ . getValue ( ) ) ; <nl> } <nl> mmm a / src / mongo / db / query / plan_cache_test . cpp <nl> ppp b / src / mongo / db / query / plan_cache_test . cpp <nl> using std : : string ; <nl> using std : : unique_ptr ; <nl> using std : : vector ; <nl> <nl> - static const char * ns = " somebogusns " ; <nl> + static const NamespaceString nss ( " test . collection " ) ; <nl> <nl> / * * <nl> * Utility functions to create a CanonicalQuery <nl> * / <nl> unique_ptr < CanonicalQuery > canonicalize ( const BSONObj & queryObj ) { <nl> - auto statusWithCQ = CanonicalQuery : : canonicalize ( ns , queryObj ) ; <nl> + auto statusWithCQ = CanonicalQuery : : canonicalize ( nss , queryObj ) ; <nl> ASSERT_OK ( statusWithCQ . getStatus ( ) ) ; <nl> return std : : move ( statusWithCQ . getValue ( ) ) ; <nl> } <nl> unique_ptr < CanonicalQuery > canonicalize ( const char * queryStr , <nl> BSONObj queryObj = fromjson ( queryStr ) ; <nl> BSONObj sortObj = fromjson ( sortStr ) ; <nl> BSONObj projObj = fromjson ( projStr ) ; <nl> - auto statusWithCQ = CanonicalQuery : : canonicalize ( ns , queryObj , sortObj , projObj ) ; <nl> + auto statusWithCQ = CanonicalQuery : : canonicalize ( nss , queryObj , sortObj , projObj ) ; <nl> ASSERT_OK ( statusWithCQ . getStatus ( ) ) ; <nl> return std : : move ( statusWithCQ . getValue ( ) ) ; <nl> } <nl> unique_ptr < CanonicalQuery > canonicalize ( const char * queryStr , <nl> BSONObj hintObj = fromjson ( hintStr ) ; <nl> BSONObj minObj = fromjson ( minStr ) ; <nl> BSONObj maxObj = fromjson ( maxStr ) ; <nl> - auto statusWithCQ = CanonicalQuery : : canonicalize ( ns , <nl> + auto statusWithCQ = CanonicalQuery : : canonicalize ( nss , <nl> queryObj , <nl> sortObj , <nl> projObj , <nl> unique_ptr < CanonicalQuery > canonicalize ( const char * queryStr , <nl> BSONObj minObj = fromjson ( minStr ) ; <nl> BSONObj maxObj = fromjson ( maxStr ) ; <nl> auto statusWithCQ = CanonicalQuery : : canonicalize ( <nl> - ns , queryObj , sortObj , projObj , skip , limit , hintObj , minObj , maxObj , snapshot , explain ) ; <nl> + nss , queryObj , sortObj , projObj , skip , limit , hintObj , minObj , maxObj , snapshot , explain ) ; <nl> ASSERT_OK ( statusWithCQ . getStatus ( ) ) ; <nl> return std : : move ( statusWithCQ . getValue ( ) ) ; <nl> } <nl> class CachePlanSelectionTest : public mongo : : unittest : : Test { <nl> solns . clear ( ) ; <nl> <nl> <nl> - auto statusWithCQ = CanonicalQuery : : canonicalize ( ns , <nl> + auto statusWithCQ = CanonicalQuery : : canonicalize ( nss , <nl> query , <nl> sort , <nl> proj , <nl> class CachePlanSelectionTest : public mongo : : unittest : : Test { <nl> const BSONObj & sort , <nl> const BSONObj & proj , <nl> const QuerySolution & soln ) const { <nl> - auto statusWithCQ = CanonicalQuery : : canonicalize ( ns , query , sort , proj ) ; <nl> + auto statusWithCQ = CanonicalQuery : : canonicalize ( nss , query , sort , proj ) ; <nl> ASSERT_OK ( statusWithCQ . getStatus ( ) ) ; <nl> unique_ptr < CanonicalQuery > scopedCq = std : : move ( statusWithCQ . getValue ( ) ) ; <nl> <nl> mmm a / src / mongo / db / query / query_planner_test . cpp <nl> ppp b / src / mongo / db / query / query_planner_test . cpp <nl> TEST ( BadInputTest , CacheDataFromTaggedTree ) { <nl> / / No relevant index matching the index tag . <nl> relevantIndices . push_back ( IndexEntry ( BSON ( " a " < < 1 ) ) ) ; <nl> <nl> - auto statusWithCQ = CanonicalQuery : : canonicalize ( " ns " , BSON ( " a " < < 3 ) ) ; <nl> + auto statusWithCQ = <nl> + CanonicalQuery : : canonicalize ( NamespaceString ( " test . collection " ) , BSON ( " a " < < 3 ) ) ; <nl> ASSERT_OK ( statusWithCQ . getStatus ( ) ) ; <nl> std : : unique_ptr < CanonicalQuery > scopedCq = std : : move ( statusWithCQ . getValue ( ) ) ; <nl> scopedCq - > root ( ) - > setTag ( new IndexTag ( 1 ) ) ; <nl> TEST ( BadInputTest , CacheDataFromTaggedTree ) { <nl> } <nl> <nl> TEST ( BadInputTest , TagAccordingToCache ) { <nl> - auto statusWithCQ = CanonicalQuery : : canonicalize ( " ns " , BSON ( " a " < < 3 ) ) ; <nl> + const NamespaceString nss ( " test . collection " ) ; <nl> + <nl> + auto statusWithCQ = CanonicalQuery : : canonicalize ( nss , BSON ( " a " < < 3 ) ) ; <nl> ASSERT_OK ( statusWithCQ . getStatus ( ) ) ; <nl> std : : unique_ptr < CanonicalQuery > scopedCq = std : : move ( statusWithCQ . getValue ( ) ) ; <nl> <nl> TEST ( BadInputTest , TagAccordingToCache ) { <nl> ASSERT_OK ( s ) ; <nl> <nl> / / Regenerate canonical query in order to clear tags . <nl> - statusWithCQ = CanonicalQuery : : canonicalize ( " ns " , BSON ( " a " < < 3 ) ) ; <nl> + statusWithCQ = CanonicalQuery : : canonicalize ( nss , BSON ( " a " < < 3 ) ) ; <nl> ASSERT_OK ( statusWithCQ . getStatus ( ) ) ; <nl> scopedCq = std : : move ( statusWithCQ . getValue ( ) ) ; <nl> <nl> mmm a / src / mongo / db / query / query_planner_test_fixture . cpp <nl> ppp b / src / mongo / db / query / query_planner_test_fixture . cpp <nl> namespace mongo { <nl> <nl> using unittest : : assertGet ; <nl> <nl> - const char * QueryPlannerTest : : ns = " somebogus . ns " ; <nl> + const NamespaceString QueryPlannerTest : : nss ( " test . collection " ) ; <nl> <nl> void QueryPlannerTest : : setUp ( ) { <nl> internalQueryPlannerEnableHashIntersection = true ; <nl> void QueryPlannerTest : : runQueryFull ( const BSONObj & query , <nl> / / Clean up any previous state from a call to runQueryFull <nl> solns . clear ( ) ; <nl> <nl> - auto statusWithCQ = CanonicalQuery : : canonicalize ( ns , <nl> + auto statusWithCQ = CanonicalQuery : : canonicalize ( nss , <nl> query , <nl> sort , <nl> proj , <nl> void QueryPlannerTest : : runInvalidQueryFull ( const BSONObj & query , <nl> bool snapshot ) { <nl> solns . clear ( ) ; <nl> <nl> - auto statusWithCQ = CanonicalQuery : : canonicalize ( ns , <nl> + auto statusWithCQ = CanonicalQuery : : canonicalize ( nss , <nl> query , <nl> sort , <nl> proj , <nl> void QueryPlannerTest : : runInvalidQueryFull ( const BSONObj & query , <nl> void QueryPlannerTest : : runQueryAsCommand ( const BSONObj & cmdObj ) { <nl> solns . clear ( ) ; <nl> <nl> - const NamespaceString nss ( ns ) ; <nl> invariant ( nss . isValid ( ) ) ; <nl> <nl> const bool isExplain = false ; <nl> mmm a / src / mongo / db / query / query_planner_test_fixture . h <nl> ppp b / src / mongo / db / query / query_planner_test_fixture . h <nl> class QueryPlannerTest : public mongo : : unittest : : Test { <nl> / / Data members . <nl> / / <nl> <nl> - static const char * ns ; <nl> + static const NamespaceString nss ; <nl> <nl> BSONObj queryObj ; <nl> std : : unique_ptr < CanonicalQuery > cq ; <nl> mmm a / src / mongo / dbtests / documentsourcetests . cpp <nl> ppp b / src / mongo / dbtests / documentsourcetests . cpp <nl> using boost : : intrusive_ptr ; <nl> using std : : unique_ptr ; <nl> using std : : vector ; <nl> <nl> - static const char * const ns = " unittests . documentsourcetests " ; <nl> + static const NamespaceString nss ( " unittests . documentsourcetests " ) ; <nl> static const BSONObj metaTextScore = BSON ( " $ meta " <nl> < < " textScore " ) ; <nl> <nl> class CollectionBase { <nl> CollectionBase ( ) : client ( & _opCtx ) { } <nl> <nl> ~ CollectionBase ( ) { <nl> - client . dropCollection ( ns ) ; <nl> + client . dropCollection ( nss . ns ( ) ) ; <nl> } <nl> <nl> protected : <nl> using mongo : : DocumentSourceCursor ; <nl> <nl> class Base : public CollectionBase { <nl> public : <nl> - Base ( ) : _ctx ( new ExpressionContext ( & _opCtx , NamespaceString ( ns ) ) ) { <nl> + Base ( ) : _ctx ( new ExpressionContext ( & _opCtx , nss ) ) { <nl> _ctx - > tempDir = storageGlobalParams . dbpath + " / _tmp " ; <nl> } <nl> <nl> class Base : public CollectionBase { <nl> _source . reset ( ) ; <nl> _exec . reset ( ) ; <nl> <nl> - OldClientWriteContext ctx ( & _opCtx , ns ) ; <nl> - auto cq = uassertStatusOK ( CanonicalQuery : : canonicalize ( ns , / * query = * / BSONObj ( ) ) ) ; <nl> + OldClientWriteContext ctx ( & _opCtx , nss . ns ( ) ) ; <nl> + auto cq = uassertStatusOK ( CanonicalQuery : : canonicalize ( nss , / * query = * / BSONObj ( ) ) ) ; <nl> _exec = uassertStatusOK ( <nl> getExecutor ( & _opCtx , ctx . getCollection ( ) , std : : move ( cq ) , PlanExecutor : : YIELD_MANUAL ) ) ; <nl> <nl> _exec - > saveState ( ) ; <nl> _exec - > registerExec ( ) ; <nl> <nl> - _source = DocumentSourceCursor : : create ( ns , _exec , _ctx ) ; <nl> + _source = DocumentSourceCursor : : create ( nss . ns ( ) , _exec , _ctx ) ; <nl> } <nl> intrusive_ptr < ExpressionContext > ctx ( ) { <nl> return _ctx ; <nl> class Empty : public Base { <nl> class Iterate : public Base { <nl> public : <nl> void run ( ) { <nl> - client . insert ( ns , BSON ( " a " < < 1 ) ) ; <nl> + client . insert ( nss . ns ( ) , BSON ( " a " < < 1 ) ) ; <nl> createSource ( ) ; <nl> / / The DocumentSourceCursor doesn ' t hold a read lock . <nl> ASSERT ( ! _opCtx . lockState ( ) - > isReadLocked ( ) ) ; <nl> class Dispose : public Base { <nl> class IterateDispose : public Base { <nl> public : <nl> void run ( ) { <nl> - client . insert ( ns , BSON ( " a " < < 1 ) ) ; <nl> - client . insert ( ns , BSON ( " a " < < 2 ) ) ; <nl> - client . insert ( ns , BSON ( " a " < < 3 ) ) ; <nl> + client . insert ( nss . ns ( ) , BSON ( " a " < < 1 ) ) ; <nl> + client . insert ( nss . ns ( ) , BSON ( " a " < < 2 ) ) ; <nl> + client . insert ( nss . ns ( ) , BSON ( " a " < < 3 ) ) ; <nl> createSource ( ) ; <nl> / / The result is as expected . <nl> boost : : optional < Document > next = source ( ) - > getNext ( ) ; <nl> class LimitCoalesce : public Base { <nl> return DocumentSourceLimit : : create ( ctx ( ) , limit ) ; <nl> } <nl> void run ( ) { <nl> - client . insert ( ns , BSON ( " a " < < 1 ) ) ; <nl> - client . insert ( ns , BSON ( " a " < < 2 ) ) ; <nl> - client . insert ( ns , BSON ( " a " < < 3 ) ) ; <nl> + client . insert ( nss . ns ( ) , BSON ( " a " < < 1 ) ) ; <nl> + client . insert ( nss . ns ( ) , BSON ( " a " < < 2 ) ) ; <nl> + client . insert ( nss . ns ( ) , BSON ( " a " < < 3 ) ) ; <nl> createSource ( ) ; <nl> <nl> / / initial limit becomes limit of cursor <nl> mmm a / src / mongo / dbtests / executor_registry . cpp <nl> ppp b / src / mongo / dbtests / executor_registry . cpp <nl> namespace ExecutorRegistry { <nl> <nl> using std : : unique_ptr ; <nl> <nl> + static const NamespaceString nss ( " unittests . ExecutorRegistryDiskLocInvalidation " ) ; <nl> + <nl> class ExecutorRegistryBase { <nl> public : <nl> ExecutorRegistryBase ( ) : _client ( & _opCtx ) { <nl> - _ctx . reset ( new OldClientWriteContext ( & _opCtx , ns ( ) ) ) ; <nl> - _client . dropCollection ( ns ( ) ) ; <nl> + _ctx . reset ( new OldClientWriteContext ( & _opCtx , nss . ns ( ) ) ) ; <nl> + _client . dropCollection ( nss . ns ( ) ) ; <nl> <nl> for ( int i = 0 ; i < N ( ) ; + + i ) { <nl> - _client . insert ( ns ( ) , BSON ( " foo " < < i ) ) ; <nl> + _client . insert ( nss . ns ( ) , BSON ( " foo " < < i ) ) ; <nl> } <nl> } <nl> <nl> / * * <nl> - * Return a plan executor that is going over the collection in ns ( ) . <nl> + * Return a plan executor that is going over the collection in nss . ns ( ) . <nl> * / <nl> PlanExecutor * getCollscan ( ) { <nl> unique_ptr < WorkingSet > ws ( new WorkingSet ( ) ) ; <nl> class ExecutorRegistryBase { <nl> unique_ptr < CollectionScan > scan ( new CollectionScan ( & _opCtx , params , ws . get ( ) , NULL ) ) ; <nl> <nl> / / Create a plan executor to hold it <nl> - auto statusWithCQ = CanonicalQuery : : canonicalize ( ns ( ) , BSONObj ( ) ) ; <nl> + auto statusWithCQ = CanonicalQuery : : canonicalize ( nss , BSONObj ( ) ) ; <nl> ASSERT_OK ( statusWithCQ . getStatus ( ) ) ; <nl> std : : unique_ptr < CanonicalQuery > cq = std : : move ( statusWithCQ . getValue ( ) ) ; <nl> <nl> class ExecutorRegistryBase { <nl> std : : move ( ws ) , <nl> std : : move ( scan ) , <nl> std : : move ( cq ) , <nl> - _ctx - > db ( ) - > getCollection ( ns ( ) ) , <nl> + _ctx - > db ( ) - > getCollection ( nss . ns ( ) ) , <nl> PlanExecutor : : YIELD_MANUAL ) ; <nl> ASSERT_OK ( statusWithPlanExecutor . getStatus ( ) ) ; <nl> return statusWithPlanExecutor . getValue ( ) . release ( ) ; <nl> class ExecutorRegistryBase { <nl> void registerExecutor ( PlanExecutor * exec ) { <nl> WriteUnitOfWork wuow ( & _opCtx ) ; <nl> _ctx - > db ( ) <nl> - - > getOrCreateCollection ( & _opCtx , ns ( ) ) <nl> + - > getOrCreateCollection ( & _opCtx , nss . ns ( ) ) <nl> - > getCursorManager ( ) <nl> - > registerExecutor ( exec ) ; <nl> wuow . commit ( ) ; <nl> class ExecutorRegistryBase { <nl> void deregisterExecutor ( PlanExecutor * exec ) { <nl> WriteUnitOfWork wuow ( & _opCtx ) ; <nl> _ctx - > db ( ) <nl> - - > getOrCreateCollection ( & _opCtx , ns ( ) ) <nl> + - > getOrCreateCollection ( & _opCtx , nss . ns ( ) ) <nl> - > getCursorManager ( ) <nl> - > deregisterExecutor ( exec ) ; <nl> wuow . commit ( ) ; <nl> class ExecutorRegistryBase { <nl> } <nl> <nl> Collection * collection ( ) { <nl> - return _ctx - > db ( ) - > getCollection ( ns ( ) ) ; <nl> - } <nl> - <nl> - static const char * ns ( ) { <nl> - return " unittests . ExecutorRegistryDiskLocInvalidation " ; <nl> + return _ctx - > db ( ) - > getCollection ( nss . ns ( ) ) ; <nl> } <nl> <nl> / / Order of these is important for initialization <nl> class ExecutorRegistryDiskLocInvalid : public ExecutorRegistryBase { <nl> / / stuff going on in the yield . <nl> <nl> / / Delete some data , namely the next 2 things we ' d expect . <nl> - _client . remove ( ns ( ) , BSON ( " foo " < < 10 ) ) ; <nl> - _client . remove ( ns ( ) , BSON ( " foo " < < 11 ) ) ; <nl> + _client . remove ( nss . ns ( ) , BSON ( " foo " < < 10 ) ) ; <nl> + _client . remove ( nss . ns ( ) , BSON ( " foo " < < 11 ) ) ; <nl> <nl> / / At this point , we ' re done yielding . We recover our lock . <nl> <nl> class ExecutorRegistryDropCollection : public ExecutorRegistryBase { <nl> registerExecutor ( run . get ( ) ) ; <nl> <nl> / / Drop our collection . <nl> - _client . dropCollection ( ns ( ) ) ; <nl> + _client . dropCollection ( nss . ns ( ) ) ; <nl> <nl> / / Unregister and restore state . <nl> deregisterExecutor ( run . get ( ) ) ; <nl> class ExecutorRegistryDropAllIndices : public ExecutorRegistryBase { <nl> unique_ptr < PlanExecutor > run ( getCollscan ( ) ) ; <nl> BSONObj obj ; <nl> <nl> - ASSERT_OK ( dbtests : : createIndex ( & _opCtx , ns ( ) , BSON ( " foo " < < 1 ) ) ) ; <nl> + ASSERT_OK ( dbtests : : createIndex ( & _opCtx , nss . ns ( ) , BSON ( " foo " < < 1 ) ) ) ; <nl> <nl> / / Read some of it . <nl> for ( int i = 0 ; i < 10 ; + + i ) { <nl> class ExecutorRegistryDropAllIndices : public ExecutorRegistryBase { <nl> registerExecutor ( run . get ( ) ) ; <nl> <nl> / / Drop all indices . <nl> - _client . dropIndexes ( ns ( ) ) ; <nl> + _client . dropIndexes ( nss . ns ( ) ) ; <nl> <nl> / / Unregister and restore state . <nl> deregisterExecutor ( run . get ( ) ) ; <nl> class ExecutorRegistryDropOneIndex : public ExecutorRegistryBase { <nl> unique_ptr < PlanExecutor > run ( getCollscan ( ) ) ; <nl> BSONObj obj ; <nl> <nl> - ASSERT_OK ( dbtests : : createIndex ( & _opCtx , ns ( ) , BSON ( " foo " < < 1 ) ) ) ; <nl> + ASSERT_OK ( dbtests : : createIndex ( & _opCtx , nss . ns ( ) , BSON ( " foo " < < 1 ) ) ) ; <nl> <nl> / / Read some of it . <nl> for ( int i = 0 ; i < 10 ; + + i ) { <nl> class ExecutorRegistryDropOneIndex : public ExecutorRegistryBase { <nl> registerExecutor ( run . get ( ) ) ; <nl> <nl> / / Drop a specific index . <nl> - _client . dropIndex ( ns ( ) , BSON ( " foo " < < 1 ) ) ; <nl> + _client . dropIndex ( nss . ns ( ) , BSON ( " foo " < < 1 ) ) ; <nl> <nl> / / Unregister and restore state . <nl> deregisterExecutor ( run . get ( ) ) ; <nl> class ExecutorRegistryDropDatabase : public ExecutorRegistryBase { <nl> / / requires a " global write lock . " <nl> _ctx . reset ( ) ; <nl> _client . dropDatabase ( " somesillydb " ) ; <nl> - _ctx . reset ( new OldClientWriteContext ( & _opCtx , ns ( ) ) ) ; <nl> + _ctx . reset ( new OldClientWriteContext ( & _opCtx , nss . ns ( ) ) ) ; <nl> <nl> / / Unregister and restore state . <nl> deregisterExecutor ( run . get ( ) ) ; <nl> class ExecutorRegistryDropDatabase : public ExecutorRegistryBase { <nl> / / Drop our DB . Once again , must give up the lock . <nl> _ctx . reset ( ) ; <nl> _client . dropDatabase ( " unittests " ) ; <nl> - _ctx . reset ( new OldClientWriteContext ( & _opCtx , ns ( ) ) ) ; <nl> + _ctx . reset ( new OldClientWriteContext ( & _opCtx , nss . ns ( ) ) ) ; <nl> <nl> / / Unregister and restore state . <nl> deregisterExecutor ( run . get ( ) ) ; <nl> mmm a / src / mongo / dbtests / oplogstarttests . cpp <nl> ppp b / src / mongo / dbtests / oplogstarttests . cpp <nl> namespace OplogStartTests { <nl> using std : : unique_ptr ; <nl> using std : : string ; <nl> <nl> + static const NamespaceString nss ( " unittests . oplogstarttests " ) ; <nl> + <nl> class Base { <nl> public : <nl> Base ( ) <nl> : _txn ( ) , <nl> _scopedXact ( & _txn , MODE_X ) , <nl> _lk ( _txn . lockState ( ) ) , <nl> - _context ( & _txn , ns ( ) ) , <nl> + _context ( & _txn , nss . ns ( ) ) , <nl> _client ( & _txn ) { <nl> - Collection * c = _context . db ( ) - > getCollection ( ns ( ) ) ; <nl> + Collection * c = _context . db ( ) - > getCollection ( nss . ns ( ) ) ; <nl> if ( ! c ) { <nl> WriteUnitOfWork wuow ( & _txn ) ; <nl> - c = _context . db ( ) - > createCollection ( & _txn , ns ( ) ) ; <nl> + c = _context . db ( ) - > createCollection ( & _txn , nss . ns ( ) ) ; <nl> wuow . commit ( ) ; <nl> } <nl> ASSERT ( c - > getIndexCatalog ( ) - > haveIdIndex ( & _txn ) ) ; <nl> } <nl> <nl> ~ Base ( ) { <nl> - client ( ) - > dropCollection ( ns ( ) ) ; <nl> + client ( ) - > dropCollection ( nss . ns ( ) ) ; <nl> <nl> / / The OplogStart stage is not allowed to outlive it ' s RecoveryUnit . <nl> _stage . reset ( ) ; <nl> } <nl> <nl> protected : <nl> - static const char * ns ( ) { <nl> - return " unittests . oplogstarttests " ; <nl> - } <nl> - static const char * dbname ( ) { <nl> - return " unittests " ; <nl> - } <nl> - static const char * collname ( ) { <nl> - return " oplogstarttests " ; <nl> - } <nl> - <nl> Collection * collection ( ) { <nl> - return _context . db ( ) - > getCollection ( ns ( ) ) ; <nl> + return _context . db ( ) - > getCollection ( nss . ns ( ) ) ; <nl> } <nl> <nl> DBDirectClient * client ( ) { <nl> class Base { <nl> } <nl> <nl> void setupFromQuery ( const BSONObj & query ) { <nl> - auto statusWithCQ = CanonicalQuery : : canonicalize ( ns ( ) , query ) ; <nl> + auto statusWithCQ = CanonicalQuery : : canonicalize ( nss , query ) ; <nl> ASSERT_OK ( statusWithCQ . getStatus ( ) ) ; <nl> _cq = std : : move ( statusWithCQ . getValue ( ) ) ; <nl> _oplogws . reset ( new WorkingSet ( ) ) ; <nl> class OplogStartIsOldest : public Base { <nl> public : <nl> void run ( ) { <nl> for ( int i = 0 ; i < 10 ; + + i ) { <nl> - client ( ) - > insert ( ns ( ) , BSON ( " _id " < < i < < " ts " < < i ) ) ; <nl> + client ( ) - > insert ( nss . ns ( ) , BSON ( " _id " < < i < < " ts " < < i ) ) ; <nl> } <nl> <nl> setupFromQuery ( BSON ( " ts " < < BSON ( " $ gte " < < 10 ) ) ) ; <nl> class OplogStartIsNewest : public Base { <nl> public : <nl> void run ( ) { <nl> for ( int i = 0 ; i < 10 ; + + i ) { <nl> - client ( ) - > insert ( ns ( ) , BSON ( " _id " < < i < < " ts " < < i ) ) ; <nl> + client ( ) - > insert ( nss . ns ( ) , BSON ( " _id " < < i < < " ts " < < i ) ) ; <nl> } <nl> <nl> setupFromQuery ( BSON ( " ts " < < BSON ( " $ gte " < < 1 ) ) ) ; <nl> class OplogStartIsNewestExtentHop : public Base { <nl> public : <nl> void run ( ) { <nl> for ( int i = 0 ; i < 10 ; + + i ) { <nl> - client ( ) - > insert ( ns ( ) , BSON ( " _id " < < i < < " ts " < < i ) ) ; <nl> + client ( ) - > insert ( nss . ns ( ) , BSON ( " _id " < < i < < " ts " < < i ) ) ; <nl> } <nl> <nl> setupFromQuery ( BSON ( " ts " < < BSON ( " $ gte " < < 1 ) ) ) ; <nl> class OplogStartIsNewestExtentHop : public Base { <nl> class SizedExtentHopBase : public Base { <nl> public : <nl> SizedExtentHopBase ( ) { <nl> - client ( ) - > dropCollection ( ns ( ) ) ; <nl> + client ( ) - > dropCollection ( nss . ns ( ) ) ; <nl> } <nl> virtual ~ SizedExtentHopBase ( ) { <nl> - client ( ) - > dropCollection ( ns ( ) ) ; <nl> + client ( ) - > dropCollection ( nss . ns ( ) ) ; <nl> } <nl> <nl> void run ( ) { <nl> class SizedExtentHopBase : public Base { <nl> void buildCollection ( ) { <nl> BSONObj info ; <nl> / / Create a collection with specified extent sizes <nl> - BSONObj command = BSON ( " create " < < collname ( ) < < " capped " < < true < < " $ nExtents " <nl> + BSONObj command = BSON ( " create " < < nss . coll ( ) < < " capped " < < true < < " $ nExtents " <nl> < < extentSizes ( ) < < " autoIndexId " < < false ) ; <nl> - ASSERT ( client ( ) - > runCommand ( dbname ( ) , command , info ) ) ; <nl> + ASSERT ( client ( ) - > runCommand ( nss . db ( ) . toString ( ) , command , info ) ) ; <nl> <nl> / / Populate documents . <nl> for ( int i = 0 ; i < numDocs ( ) ; + + i ) { <nl> - client ( ) - > insert ( ns ( ) , BSON ( " _id " < < i < < " ts " < < i < < " payload " < < payload8k ( ) ) ) ; <nl> + client ( ) - > insert ( nss . ns ( ) , BSON ( " _id " < < i < < " ts " < < i < < " payload " < < payload8k ( ) ) ) ; <nl> } <nl> } <nl> <nl> mmm a / src / mongo / dbtests / plan_ranking . cpp <nl> ppp b / src / mongo / dbtests / plan_ranking . cpp <nl> namespace PlanRankingTests { <nl> using std : : unique_ptr ; <nl> using std : : vector ; <nl> <nl> - static const char * ns = " unittests . PlanRankingTests " ; <nl> + static const NamespaceString nss ( " unittests . PlanRankingTests " ) ; <nl> <nl> class PlanRankingTestBase { <nl> public : <nl> class PlanRankingTestBase { <nl> / / Run all tests with hash - based intersection enabled . <nl> internalQueryPlannerEnableHashIntersection = true ; <nl> <nl> - OldClientWriteContext ctx ( & _txn , ns ) ; <nl> - _client . dropCollection ( ns ) ; <nl> + OldClientWriteContext ctx ( & _txn , nss . ns ( ) ) ; <nl> + _client . dropCollection ( nss . ns ( ) ) ; <nl> } <nl> <nl> virtual ~ PlanRankingTestBase ( ) { <nl> class PlanRankingTestBase { <nl> } <nl> <nl> void insert ( const BSONObj & obj ) { <nl> - OldClientWriteContext ctx ( & _txn , ns ) ; <nl> - _client . insert ( ns , obj ) ; <nl> + OldClientWriteContext ctx ( & _txn , nss . ns ( ) ) ; <nl> + _client . insert ( nss . ns ( ) , obj ) ; <nl> } <nl> <nl> void addIndex ( const BSONObj & obj ) { <nl> - ASSERT_OK ( dbtests : : createIndex ( & _txn , ns , obj ) ) ; <nl> + ASSERT_OK ( dbtests : : createIndex ( & _txn , nss . ns ( ) , obj ) ) ; <nl> } <nl> <nl> / * * <nl> class PlanRankingTestBase { <nl> * Does NOT take ownership of ' cq ' . Caller DOES NOT own the returned QuerySolution * . <nl> * / <nl> QuerySolution * pickBestPlan ( CanonicalQuery * cq ) { <nl> - AutoGetCollectionForRead ctx ( & _txn , ns ) ; <nl> + AutoGetCollectionForRead ctx ( & _txn , nss . ns ( ) ) ; <nl> Collection * collection = ctx . getCollection ( ) ; <nl> <nl> QueryPlannerParams plannerParams ; <nl> class PlanRankingIntersectOverride : public PlanRankingTestBase { <nl> <nl> / / Run the query { a : 4 , b : 1 } . <nl> { <nl> - auto statusWithCQ = CanonicalQuery : : canonicalize ( ns , BSON ( " a " < < 100 < < " b " < < 1 ) ) ; <nl> + auto statusWithCQ = CanonicalQuery : : canonicalize ( nss , BSON ( " a " < < 100 < < " b " < < 1 ) ) ; <nl> verify ( statusWithCQ . isOK ( ) ) ; <nl> cq = std : : move ( statusWithCQ . getValue ( ) ) ; <nl> ASSERT ( cq . get ( ) ) ; <nl> class PlanRankingIntersectOverride : public PlanRankingTestBase { <nl> <nl> / / And run the same query again . <nl> { <nl> - auto statusWithCQ = CanonicalQuery : : canonicalize ( ns , BSON ( " a " < < 100 < < " b " < < 1 ) ) ; <nl> + auto statusWithCQ = CanonicalQuery : : canonicalize ( nss , BSON ( " a " < < 100 < < " b " < < 1 ) ) ; <nl> verify ( statusWithCQ . isOK ( ) ) ; <nl> cq = std : : move ( statusWithCQ . getValue ( ) ) ; <nl> } <nl> class PlanRankingIntersectWithBackup : public PlanRankingTestBase { <nl> <nl> / / Run the query { a : 1 , b : { $ gt : 1 } . <nl> auto statusWithCQ = <nl> - CanonicalQuery : : canonicalize ( ns , BSON ( " a " < < 1 < < " b " < < BSON ( " $ gt " < < 1 ) ) ) ; <nl> + CanonicalQuery : : canonicalize ( nss , BSON ( " a " < < 1 < < " b " < < BSON ( " $ gt " < < 1 ) ) ) ; <nl> verify ( statusWithCQ . isOK ( ) ) ; <nl> unique_ptr < CanonicalQuery > cq = std : : move ( statusWithCQ . getValue ( ) ) ; <nl> ASSERT ( NULL ! = cq . get ( ) ) ; <nl> class PlanRankingPreferCovered : public PlanRankingTestBase { <nl> <nl> / / Query for a = = 27 with projection that wants ' a ' and ' b ' . BSONObj ( ) is for sort . <nl> auto statusWithCQ = CanonicalQuery : : canonicalize ( <nl> - ns , BSON ( " a " < < 27 ) , BSONObj ( ) , BSON ( " _id " < < 0 < < " a " < < 1 < < " b " < < 1 ) ) ; <nl> + nss , BSON ( " a " < < 27 ) , BSONObj ( ) , BSON ( " _id " < < 0 < < " a " < < 1 < < " b " < < 1 ) ) ; <nl> ASSERT_OK ( statusWithCQ . getStatus ( ) ) ; <nl> unique_ptr < CanonicalQuery > cq = std : : move ( statusWithCQ . getValue ( ) ) ; <nl> ASSERT ( NULL ! = cq . get ( ) ) ; <nl> class PlanRankingAvoidIntersectIfNoResults : public PlanRankingTestBase { <nl> <nl> / / There is no data that matches this query but we don ' t know that until EOF . <nl> BSONObj queryObj = BSON ( " a " < < 1 < < " b " < < 1 < < " c " < < 99 ) ; <nl> - auto statusWithCQ = CanonicalQuery : : canonicalize ( ns , queryObj ) ; <nl> + auto statusWithCQ = CanonicalQuery : : canonicalize ( nss , queryObj ) ; <nl> ASSERT_OK ( statusWithCQ . getStatus ( ) ) ; <nl> unique_ptr < CanonicalQuery > cq = std : : move ( statusWithCQ . getValue ( ) ) ; <nl> ASSERT ( NULL ! = cq . get ( ) ) ; <nl> class PlanRankingPreferCoveredEvenIfNoResults : public PlanRankingTestBase { <nl> / / returning any data . <nl> <nl> auto statusWithCQ = CanonicalQuery : : canonicalize ( <nl> - ns , BSON ( " a " < < 2 ) , BSONObj ( ) , BSON ( " _id " < < 0 < < " a " < < 1 < < " b " < < 1 ) ) ; <nl> + nss , BSON ( " a " < < 2 ) , BSONObj ( ) , BSON ( " _id " < < 0 < < " a " < < 1 < < " b " < < 1 ) ) ; <nl> ASSERT_OK ( statusWithCQ . getStatus ( ) ) ; <nl> unique_ptr < CanonicalQuery > cq = std : : move ( statusWithCQ . getValue ( ) ) ; <nl> ASSERT ( NULL ! = cq . get ( ) ) ; <nl> class PlanRankingPreferImmediateEOF : public PlanRankingTestBase { <nl> addIndex ( BSON ( " b " < < 1 ) ) ; <nl> <nl> / / Run the query { a : N + 1 , b : 1 } . ( No such document . ) <nl> - auto statusWithCQ = CanonicalQuery : : canonicalize ( ns , BSON ( " a " < < N + 1 < < " b " < < 1 ) ) ; <nl> + auto statusWithCQ = CanonicalQuery : : canonicalize ( nss , BSON ( " a " < < N + 1 < < " b " < < 1 ) ) ; <nl> verify ( statusWithCQ . isOK ( ) ) ; <nl> unique_ptr < CanonicalQuery > cq = std : : move ( statusWithCQ . getValue ( ) ) ; <nl> ASSERT ( NULL ! = cq . get ( ) ) ; <nl> class PlanRankingPreferImmediateEOFAgainstHashed : public PlanRankingTestBase { <nl> <nl> / / Run the query { a : N + 1 , b : 1 } . ( No such document . ) <nl> auto statusWithCQ = <nl> - CanonicalQuery : : canonicalize ( ns , BSON ( " a " < < BSON ( " $ gte " < < N + 1 ) < < " b " < < 1 ) ) ; <nl> + CanonicalQuery : : canonicalize ( nss , BSON ( " a " < < BSON ( " $ gte " < < N + 1 ) < < " b " < < 1 ) ) ; <nl> verify ( statusWithCQ . isOK ( ) ) ; <nl> unique_ptr < CanonicalQuery > cq = std : : move ( statusWithCQ . getValue ( ) ) ; <nl> ASSERT ( NULL ! = cq . get ( ) ) ; <nl> class PlanRankingNoCollscan : public PlanRankingTestBase { <nl> BSONObj queryObj = BSON ( " _id " < < BSON ( " $ gte " < < 20 < < " $ lte " < < 200 ) ) ; <nl> BSONObj sortObj = BSON ( " c " < < 1 ) ; <nl> BSONObj projObj = BSONObj ( ) ; <nl> - auto statusWithCQ = CanonicalQuery : : canonicalize ( ns , queryObj , sortObj , projObj ) ; <nl> + auto statusWithCQ = CanonicalQuery : : canonicalize ( nss , queryObj , sortObj , projObj ) ; <nl> ASSERT_OK ( statusWithCQ . getStatus ( ) ) ; <nl> unique_ptr < CanonicalQuery > cq = std : : move ( statusWithCQ . getValue ( ) ) ; <nl> <nl> class PlanRankingCollscan : public PlanRankingTestBase { <nl> } <nl> <nl> / / Look for A Space Odyssey . <nl> - auto statusWithCQ = CanonicalQuery : : canonicalize ( ns , BSON ( " foo " < < 2001 ) ) ; <nl> + auto statusWithCQ = CanonicalQuery : : canonicalize ( nss , BSON ( " foo " < < 2001 ) ) ; <nl> verify ( statusWithCQ . isOK ( ) ) ; <nl> unique_ptr < CanonicalQuery > cq = std : : move ( statusWithCQ . getValue ( ) ) ; <nl> ASSERT ( NULL ! = cq . get ( ) ) ; <nl> class PlanRankingAvoidBlockingSort : public PlanRankingTestBase { <nl> addIndex ( BSON ( " d " < < 1 < < " e " < < 1 ) ) ; <nl> <nl> / / Query : find ( { a : 1 } ) . sort ( { d : 1 } ) <nl> - auto statusWithCQ = CanonicalQuery : : canonicalize ( ns , <nl> + auto statusWithCQ = CanonicalQuery : : canonicalize ( nss , <nl> BSON ( " a " < < 1 ) , <nl> BSON ( " d " < < 1 ) , / / sort <nl> BSONObj ( ) ) ; / / projection <nl> class PlanRankingWorkPlansLongEnough : public PlanRankingTestBase { <nl> / / results . However , an index scan on ' b ' will start producing results sooner <nl> / / than an index scan on ' a ' . <nl> auto statusWithCQ = <nl> - CanonicalQuery : : canonicalize ( ns , fromjson ( " { a : 1 , b : 1 , c : { $ gte : 5000 } } " ) ) ; <nl> + CanonicalQuery : : canonicalize ( nss , fromjson ( " { a : 1 , b : 1 , c : { $ gte : 5000 } } " ) ) ; <nl> ASSERT_OK ( statusWithCQ . getStatus ( ) ) ; <nl> unique_ptr < CanonicalQuery > cq = std : : move ( statusWithCQ . getValue ( ) ) ; <nl> ASSERT ( NULL ! = cq . get ( ) ) ; <nl> class PlanRankingAccountForKeySkips : public PlanRankingTestBase { <nl> addIndex ( BSON ( " a " < < 1 ) ) ; <nl> <nl> auto statusWithCQ = <nl> - CanonicalQuery : : canonicalize ( ns , fromjson ( " { a : 9 , b : { $ ne : 10 } , c : 9 } " ) ) ; <nl> + CanonicalQuery : : canonicalize ( nss , fromjson ( " { a : 9 , b : { $ ne : 10 } , c : 9 } " ) ) ; <nl> ASSERT_OK ( statusWithCQ . getStatus ( ) ) ; <nl> unique_ptr < CanonicalQuery > cq = std : : move ( statusWithCQ . getValue ( ) ) ; <nl> ASSERT ( NULL ! = cq . get ( ) ) ; <nl> mmm a / src / mongo / dbtests / query_multi_plan_runner . cpp <nl> ppp b / src / mongo / dbtests / query_multi_plan_runner . cpp <nl> using std : : unique_ptr ; <nl> using std : : vector ; <nl> using stdx : : make_unique ; <nl> <nl> + static const NamespaceString nss ( " unittests . QueryStageMultiPlanRunner " ) ; <nl> + <nl> / * * <nl> * Create query solution . <nl> * / <nl> QuerySolution * createQuerySolution ( ) { <nl> class MultiPlanRunnerBase { <nl> public : <nl> MultiPlanRunnerBase ( ) : _client ( & _txn ) { <nl> - OldClientWriteContext ctx ( & _txn , ns ( ) ) ; <nl> - _client . dropCollection ( ns ( ) ) ; <nl> + OldClientWriteContext ctx ( & _txn , nss . ns ( ) ) ; <nl> + _client . dropCollection ( nss . ns ( ) ) ; <nl> } <nl> <nl> virtual ~ MultiPlanRunnerBase ( ) { <nl> - OldClientWriteContext ctx ( & _txn , ns ( ) ) ; <nl> - _client . dropCollection ( ns ( ) ) ; <nl> + OldClientWriteContext ctx ( & _txn , nss . ns ( ) ) ; <nl> + _client . dropCollection ( nss . ns ( ) ) ; <nl> } <nl> <nl> void addIndex ( const BSONObj & obj ) { <nl> - ASSERT_OK ( dbtests : : createIndex ( & _txn , ns ( ) , obj ) ) ; <nl> + ASSERT_OK ( dbtests : : createIndex ( & _txn , nss . ns ( ) , obj ) ) ; <nl> } <nl> <nl> void insert ( const BSONObj & obj ) { <nl> - OldClientWriteContext ctx ( & _txn , ns ( ) ) ; <nl> - _client . insert ( ns ( ) , obj ) ; <nl> + OldClientWriteContext ctx ( & _txn , nss . ns ( ) ) ; <nl> + _client . insert ( nss . ns ( ) , obj ) ; <nl> } <nl> <nl> void remove ( const BSONObj & obj ) { <nl> - OldClientWriteContext ctx ( & _txn , ns ( ) ) ; <nl> - _client . remove ( ns ( ) , obj ) ; <nl> - } <nl> - <nl> - static const char * ns ( ) { <nl> - return " unittests . QueryStageMultiPlanRunner " ; <nl> + OldClientWriteContext ctx ( & _txn , nss . ns ( ) ) ; <nl> + _client . remove ( nss . ns ( ) , obj ) ; <nl> } <nl> <nl> protected : <nl> class MPRCollectionScanVsHighlySelectiveIXScan : public MultiPlanRunnerBase { <nl> <nl> addIndex ( BSON ( " foo " < < 1 ) ) ; <nl> <nl> - AutoGetCollectionForRead ctx ( & _txn , ns ( ) ) ; <nl> + AutoGetCollectionForRead ctx ( & _txn , nss . ns ( ) ) ; <nl> const Collection * coll = ctx . getCollection ( ) ; <nl> <nl> / / Plan 0 : IXScan over foo = = 7 <nl> class MPRCollectionScanVsHighlySelectiveIXScan : public MultiPlanRunnerBase { <nl> new CollectionScan ( & _txn , csparams , sharedWs . get ( ) , filter . get ( ) ) ) ; <nl> <nl> / / Hand the plans off to the runner . <nl> - auto statusWithCQ = CanonicalQuery : : canonicalize ( ns ( ) , BSON ( " foo " < < 7 ) ) ; <nl> + auto statusWithCQ = CanonicalQuery : : canonicalize ( nss , BSON ( " foo " < < 7 ) ) ; <nl> verify ( statusWithCQ . isOK ( ) ) ; <nl> unique_ptr < CanonicalQuery > cq = std : : move ( statusWithCQ . getValue ( ) ) ; <nl> verify ( NULL ! = cq . get ( ) ) ; <nl> class MPRBackupPlan : public MultiPlanRunnerBase { <nl> addIndex ( BSON ( " a " < < 1 ) ) ; <nl> addIndex ( BSON ( " b " < < 1 ) ) ; <nl> <nl> - AutoGetCollectionForRead ctx ( & _txn , ns ( ) ) ; <nl> + AutoGetCollectionForRead ctx ( & _txn , nss . ns ( ) ) ; <nl> Collection * collection = ctx . getCollection ( ) ; <nl> <nl> / / Query for both ' a ' and ' b ' and sort on ' b ' . <nl> - auto statusWithCQ = CanonicalQuery : : canonicalize ( ns ( ) , <nl> + auto statusWithCQ = CanonicalQuery : : canonicalize ( nss , <nl> BSON ( " a " < < 1 < < " b " < < 1 ) , / / query <nl> BSON ( " b " < < 1 ) , / / sort <nl> BSONObj ( ) ) ; / / proj <nl> mmm a / src / mongo / dbtests / query_plan_executor . cpp <nl> ppp b / src / mongo / dbtests / query_plan_executor . cpp <nl> using std : : shared_ptr ; <nl> using std : : string ; <nl> using std : : unique_ptr ; <nl> <nl> + static const NamespaceString nss ( " unittests . QueryPlanExecutor " ) ; <nl> + <nl> class PlanExecutorBase { <nl> public : <nl> PlanExecutorBase ( ) : _client ( & _txn ) { } <nl> <nl> virtual ~ PlanExecutorBase ( ) { <nl> - _client . dropCollection ( ns ( ) ) ; <nl> + _client . dropCollection ( nss . ns ( ) ) ; <nl> } <nl> <nl> void addIndex ( const BSONObj & obj ) { <nl> - ASSERT_OK ( dbtests : : createIndex ( & _txn , ns ( ) , obj ) ) ; <nl> + ASSERT_OK ( dbtests : : createIndex ( & _txn , nss . ns ( ) , obj ) ) ; <nl> } <nl> <nl> void insert ( const BSONObj & obj ) { <nl> - _client . insert ( ns ( ) , obj ) ; <nl> + _client . insert ( nss . ns ( ) , obj ) ; <nl> } <nl> <nl> void remove ( const BSONObj & obj ) { <nl> - _client . remove ( ns ( ) , obj ) ; <nl> + _client . remove ( nss . ns ( ) , obj ) ; <nl> } <nl> <nl> void dropCollection ( ) { <nl> - _client . dropCollection ( ns ( ) ) ; <nl> + _client . dropCollection ( nss . ns ( ) ) ; <nl> } <nl> <nl> void update ( BSONObj & query , BSONObj & updateSpec ) { <nl> - _client . update ( ns ( ) , query , updateSpec , false , false ) ; <nl> + _client . update ( nss . ns ( ) , query , updateSpec , false , false ) ; <nl> } <nl> <nl> / * * <nl> class PlanExecutorBase { <nl> csparams . direction = CollectionScanParams : : FORWARD ; <nl> unique_ptr < WorkingSet > ws ( new WorkingSet ( ) ) ; <nl> <nl> - / / Canonicalize the query <nl> - auto statusWithCQ = CanonicalQuery : : canonicalize ( ns ( ) , filterObj ) ; <nl> + / / Canonicalize the query . <nl> + auto statusWithCQ = CanonicalQuery : : canonicalize ( nss , filterObj ) ; <nl> verify ( statusWithCQ . isOK ( ) ) ; <nl> unique_ptr < CanonicalQuery > cq = std : : move ( statusWithCQ . getValue ( ) ) ; <nl> verify ( NULL ! = cq . get ( ) ) ; <nl> class PlanExecutorBase { <nl> ixparams . bounds . endKeyInclusive = true ; <nl> ixparams . direction = 1 ; <nl> <nl> - const Collection * coll = db - > getCollection ( ns ( ) ) ; <nl> + const Collection * coll = db - > getCollection ( nss . ns ( ) ) ; <nl> <nl> unique_ptr < WorkingSet > ws ( new WorkingSet ( ) ) ; <nl> IndexScan * ix = new IndexScan ( & _txn , ixparams , ws . get ( ) , NULL ) ; <nl> unique_ptr < PlanStage > root ( new FetchStage ( & _txn , ws . get ( ) , ix , NULL , coll ) ) ; <nl> <nl> - auto statusWithCQ = CanonicalQuery : : canonicalize ( ns ( ) , BSONObj ( ) ) ; <nl> + auto statusWithCQ = CanonicalQuery : : canonicalize ( nss , BSONObj ( ) ) ; <nl> verify ( statusWithCQ . isOK ( ) ) ; <nl> unique_ptr < CanonicalQuery > cq = std : : move ( statusWithCQ . getValue ( ) ) ; <nl> verify ( NULL ! = cq . get ( ) ) ; <nl> class PlanExecutorBase { <nl> return statusWithPlanExecutor . getValue ( ) . release ( ) ; <nl> } <nl> <nl> - static const char * ns ( ) { <nl> - return " unittests . QueryPlanExecutor " ; <nl> - } <nl> - <nl> size_t numCursors ( ) { <nl> - AutoGetCollectionForRead ctx ( & _txn , ns ( ) ) ; <nl> + AutoGetCollectionForRead ctx ( & _txn , nss . ns ( ) ) ; <nl> Collection * collection = ctx . getCollection ( ) ; <nl> if ( ! collection ) <nl> return 0 ; <nl> class PlanExecutorBase { <nl> <nl> void registerExec ( PlanExecutor * exec ) { <nl> / / TODO : This is not correct ( create collection under S - lock ) <nl> - AutoGetCollectionForRead ctx ( & _txn , ns ( ) ) ; <nl> + AutoGetCollectionForRead ctx ( & _txn , nss . ns ( ) ) ; <nl> WriteUnitOfWork wunit ( & _txn ) ; <nl> - Collection * collection = ctx . getDb ( ) - > getOrCreateCollection ( & _txn , ns ( ) ) ; <nl> + Collection * collection = ctx . getDb ( ) - > getOrCreateCollection ( & _txn , nss . ns ( ) ) ; <nl> collection - > getCursorManager ( ) - > registerExecutor ( exec ) ; <nl> wunit . commit ( ) ; <nl> } <nl> <nl> void deregisterExec ( PlanExecutor * exec ) { <nl> / / TODO : This is not correct ( create collection under S - lock ) <nl> - AutoGetCollectionForRead ctx ( & _txn , ns ( ) ) ; <nl> + AutoGetCollectionForRead ctx ( & _txn , nss . ns ( ) ) ; <nl> WriteUnitOfWork wunit ( & _txn ) ; <nl> - Collection * collection = ctx . getDb ( ) - > getOrCreateCollection ( & _txn , ns ( ) ) ; <nl> + Collection * collection = ctx . getDb ( ) - > getOrCreateCollection ( & _txn , nss . ns ( ) ) ; <nl> collection - > getCursorManager ( ) - > deregisterExecutor ( exec ) ; <nl> wunit . commit ( ) ; <nl> } <nl> class PlanExecutorBase { <nl> <nl> private : <nl> IndexDescriptor * getIndex ( Database * db , const BSONObj & obj ) { <nl> - Collection * collection = db - > getCollection ( ns ( ) ) ; <nl> + Collection * collection = db - > getCollection ( nss . ns ( ) ) ; <nl> return collection - > getIndexCatalog ( ) - > findIndexByKeyPattern ( & _txn , obj ) ; <nl> } <nl> <nl> class PlanExecutorBase { <nl> class DropCollScan : public PlanExecutorBase { <nl> public : <nl> void run ( ) { <nl> - OldClientWriteContext ctx ( & _txn , ns ( ) ) ; <nl> + OldClientWriteContext ctx ( & _txn , nss . ns ( ) ) ; <nl> insert ( BSON ( " _id " < < 1 ) ) ; <nl> insert ( BSON ( " _id " < < 2 ) ) ; <nl> <nl> class DropCollScan : public PlanExecutorBase { <nl> class DropIndexScan : public PlanExecutorBase { <nl> public : <nl> void run ( ) { <nl> - OldClientWriteContext ctx ( & _txn , ns ( ) ) ; <nl> + OldClientWriteContext ctx ( & _txn , nss . ns ( ) ) ; <nl> insert ( BSON ( " _id " < < 1 < < " a " < < 6 ) ) ; <nl> insert ( BSON ( " _id " < < 2 < < " a " < < 7 ) ) ; <nl> insert ( BSON ( " _id " < < 3 < < " a " < < 8 ) ) ; <nl> class DropIndexScan : public PlanExecutorBase { <nl> class DropIndexScanAgg : public PlanExecutorBase { <nl> public : <nl> void run ( ) { <nl> - OldClientWriteContext ctx ( & _txn , ns ( ) ) ; <nl> + OldClientWriteContext ctx ( & _txn , nss . ns ( ) ) ; <nl> <nl> insert ( BSON ( " _id " < < 1 < < " a " < < 6 ) ) ; <nl> insert ( BSON ( " _id " < < 2 < < " a " < < 7 ) ) ; <nl> class DropIndexScanAgg : public PlanExecutorBase { <nl> <nl> / / Create the aggregation pipeline . <nl> boost : : intrusive_ptr < ExpressionContext > expCtx = <nl> - new ExpressionContext ( & _txn , NamespaceString ( ns ( ) ) ) ; <nl> + new ExpressionContext ( & _txn , NamespaceString ( nss . ns ( ) ) ) ; <nl> <nl> string errmsg ; <nl> BSONObj inputBson = fromjson ( " { $ match : { a : { $ gte : 7 , $ lte : 10 } } } " ) ; <nl> class SnapshotBase : public PlanExecutorBase { <nl> class SnapshotControl : public SnapshotBase { <nl> public : <nl> void run ( ) { <nl> - OldClientWriteContext ctx ( & _txn , ns ( ) ) ; <nl> + OldClientWriteContext ctx ( & _txn , nss . ns ( ) ) ; <nl> setupCollection ( ) ; <nl> <nl> BSONObj filterObj = fromjson ( " { a : { $ gte : 2 } } " ) ; <nl> class SnapshotControl : public SnapshotBase { <nl> class SnapshotTest : public SnapshotBase { <nl> public : <nl> void run ( ) { <nl> - OldClientWriteContext ctx ( & _txn , ns ( ) ) ; <nl> + OldClientWriteContext ctx ( & _txn , nss . ns ( ) ) ; <nl> setupCollection ( ) ; <nl> BSONObj indexSpec = BSON ( " _id " < < 1 ) ; <nl> addIndex ( indexSpec ) ; <nl> using mongo : : ClientCursor ; <nl> class Invalidate : public PlanExecutorBase { <nl> public : <nl> void run ( ) { <nl> - OldClientWriteContext ctx ( & _txn , ns ( ) ) ; <nl> + OldClientWriteContext ctx ( & _txn , nss . ns ( ) ) ; <nl> insert ( BSON ( " a " < < 1 < < " b " < < 1 ) ) ; <nl> <nl> BSONObj filterObj = fromjson ( " { _id : { $ gt : 0 } , b : { $ gt : 0 } } " ) ; <nl> class Invalidate : public PlanExecutorBase { <nl> PlanExecutor * exec = makeCollScanExec ( coll , filterObj ) ; <nl> <nl> / / Make a client cursor from the runner . <nl> - new ClientCursor ( coll - > getCursorManager ( ) , exec , ns ( ) , false , 0 , BSONObj ( ) ) ; <nl> + new ClientCursor ( coll - > getCursorManager ( ) , exec , nss . ns ( ) , false , 0 , BSONObj ( ) ) ; <nl> <nl> / / There should be one cursor before invalidation , <nl> / / and zero cursors after invalidation . <nl> class Invalidate : public PlanExecutorBase { <nl> class InvalidatePinned : public PlanExecutorBase { <nl> public : <nl> void run ( ) { <nl> - OldClientWriteContext ctx ( & _txn , ns ( ) ) ; <nl> + OldClientWriteContext ctx ( & _txn , nss . ns ( ) ) ; <nl> insert ( BSON ( " a " < < 1 < < " b " < < 1 ) ) ; <nl> <nl> Collection * collection = ctx . getCollection ( ) ; <nl> class InvalidatePinned : public PlanExecutorBase { <nl> <nl> / / Make a client cursor from the runner . <nl> ClientCursor * cc = <nl> - new ClientCursor ( collection - > getCursorManager ( ) , exec , ns ( ) , false , 0 , BSONObj ( ) ) ; <nl> + new ClientCursor ( collection - > getCursorManager ( ) , exec , nss . ns ( ) , false , 0 , BSONObj ( ) ) ; <nl> ClientCursorPin ccPin ( collection - > getCursorManager ( ) , cc - > cursorid ( ) ) ; <nl> <nl> / / If the cursor is pinned , it sticks around , <nl> class Timeout : public PlanExecutorBase { <nl> public : <nl> void run ( ) { <nl> { <nl> - OldClientWriteContext ctx ( & _txn , ns ( ) ) ; <nl> + OldClientWriteContext ctx ( & _txn , nss . ns ( ) ) ; <nl> insert ( BSON ( " a " < < 1 < < " b " < < 1 ) ) ; <nl> } <nl> <nl> { <nl> - AutoGetCollectionForRead ctx ( & _txn , ns ( ) ) ; <nl> + AutoGetCollectionForRead ctx ( & _txn , nss . ns ( ) ) ; <nl> Collection * collection = ctx . getCollection ( ) ; <nl> <nl> BSONObj filterObj = fromjson ( " { _id : { $ gt : 0 } , b : { $ gt : 0 } } " ) ; <nl> PlanExecutor * exec = makeCollScanExec ( collection , filterObj ) ; <nl> <nl> / / Make a client cursor from the runner . <nl> - new ClientCursor ( collection - > getCursorManager ( ) , exec , ns ( ) , false , 0 , BSONObj ( ) ) ; <nl> + new ClientCursor ( collection - > getCursorManager ( ) , exec , nss . ns ( ) , false , 0 , BSONObj ( ) ) ; <nl> } <nl> <nl> / / There should be one cursor before timeout , <nl> mmm a / src / mongo / dbtests / query_stage_cached_plan . cpp <nl> ppp b / src / mongo / dbtests / query_stage_cached_plan . cpp <nl> <nl> <nl> namespace QueryStageCachedPlan { <nl> <nl> + static const NamespaceString nss ( " unittests . QueryStageCachedPlan " ) ; <nl> + <nl> class QueryStageCachedPlanBase { <nl> public : <nl> QueryStageCachedPlanBase ( ) { <nl> class QueryStageCachedPlanBase { <nl> addIndex ( BSON ( " a " < < 1 ) ) ; <nl> addIndex ( BSON ( " b " < < 1 ) ) ; <nl> <nl> - OldClientWriteContext ctx ( & _txn , ns ( ) ) ; <nl> + OldClientWriteContext ctx ( & _txn , nss . ns ( ) ) ; <nl> Collection * collection = ctx . getCollection ( ) ; <nl> ASSERT ( collection ) ; <nl> <nl> class QueryStageCachedPlanBase { <nl> } <nl> <nl> void addIndex ( const BSONObj & obj ) { <nl> - ASSERT_OK ( dbtests : : createIndex ( & _txn , ns ( ) , obj ) ) ; <nl> + ASSERT_OK ( dbtests : : createIndex ( & _txn , nss . ns ( ) , obj ) ) ; <nl> } <nl> <nl> void dropCollection ( ) { <nl> - const NamespaceString nsString ( ns ( ) ) ; <nl> ScopedTransaction transaction ( & _txn , MODE_X ) ; <nl> - Lock : : DBLock dbLock ( _txn . lockState ( ) , nsString . db ( ) , MODE_X ) ; <nl> - Database * database = dbHolder ( ) . get ( & _txn , nsString . db ( ) ) ; <nl> + Lock : : DBLock dbLock ( _txn . lockState ( ) , nss . db ( ) , MODE_X ) ; <nl> + Database * database = dbHolder ( ) . get ( & _txn , nss . db ( ) ) ; <nl> if ( ! database ) { <nl> return ; <nl> } <nl> <nl> WriteUnitOfWork wuow ( & _txn ) ; <nl> - database - > dropCollection ( & _txn , ns ( ) ) ; <nl> + database - > dropCollection ( & _txn , nss . ns ( ) ) ; <nl> wuow . commit ( ) ; <nl> } <nl> <nl> class QueryStageCachedPlanBase { <nl> wuow . commit ( ) ; <nl> } <nl> <nl> - static const char * ns ( ) { <nl> - return " unittests . QueryStageCachedPlan " ; <nl> - } <nl> - <nl> protected : <nl> OperationContextImpl _txn ; <nl> WorkingSet _ws ; <nl> class QueryStageCachedPlanBase { <nl> class QueryStageCachedPlanFailure : public QueryStageCachedPlanBase { <nl> public : <nl> void run ( ) { <nl> - AutoGetCollectionForRead ctx ( & _txn , ns ( ) ) ; <nl> + AutoGetCollectionForRead ctx ( & _txn , nss . ns ( ) ) ; <nl> Collection * collection = ctx . getCollection ( ) ; <nl> ASSERT ( collection ) ; <nl> <nl> / / Query can be answered by either index on " a " or index on " b " . <nl> - auto statusWithCQ = CanonicalQuery : : canonicalize ( ns ( ) , fromjson ( " { a : { $ gte : 8 } , b : 1 } " ) ) ; <nl> + auto statusWithCQ = CanonicalQuery : : canonicalize ( nss , fromjson ( " { a : { $ gte : 8 } , b : 1 } " ) ) ; <nl> ASSERT_OK ( statusWithCQ . getStatus ( ) ) ; <nl> const std : : unique_ptr < CanonicalQuery > cq = std : : move ( statusWithCQ . getValue ( ) ) ; <nl> <nl> class QueryStageCachedPlanFailure : public QueryStageCachedPlanBase { <nl> class QueryStageCachedPlanHitMaxWorks : public QueryStageCachedPlanBase { <nl> public : <nl> void run ( ) { <nl> - AutoGetCollectionForRead ctx ( & _txn , ns ( ) ) ; <nl> + AutoGetCollectionForRead ctx ( & _txn , nss . ns ( ) ) ; <nl> Collection * collection = ctx . getCollection ( ) ; <nl> ASSERT ( collection ) ; <nl> <nl> / / Query can be answered by either index on " a " or index on " b " . <nl> - auto statusWithCQ = CanonicalQuery : : canonicalize ( ns ( ) , fromjson ( " { a : { $ gte : 8 } , b : 1 } " ) ) ; <nl> + auto statusWithCQ = CanonicalQuery : : canonicalize ( nss , fromjson ( " { a : { $ gte : 8 } , b : 1 } " ) ) ; <nl> ASSERT_OK ( statusWithCQ . getStatus ( ) ) ; <nl> const std : : unique_ptr < CanonicalQuery > cq = std : : move ( statusWithCQ . getValue ( ) ) ; <nl> <nl> mmm a / src / mongo / dbtests / query_stage_delete . cpp <nl> ppp b / src / mongo / dbtests / query_stage_delete . cpp <nl> namespace QueryStageDelete { <nl> using std : : unique_ptr ; <nl> using std : : vector ; <nl> <nl> + static const NamespaceString nss ( " unittests . QueryStageDelete " ) ; <nl> + <nl> / / <nl> / / Stage - specific tests . <nl> / / <nl> using std : : vector ; <nl> class QueryStageDeleteBase { <nl> public : <nl> QueryStageDeleteBase ( ) : _client ( & _txn ) { <nl> - OldClientWriteContext ctx ( & _txn , ns ( ) ) ; <nl> + OldClientWriteContext ctx ( & _txn , nss . ns ( ) ) ; <nl> <nl> for ( size_t i = 0 ; i < numObj ( ) ; + + i ) { <nl> BSONObjBuilder bob ; <nl> bob . append ( " _id " , static_cast < long long int > ( i ) ) ; <nl> bob . append ( " foo " , static_cast < long long int > ( i ) ) ; <nl> - _client . insert ( ns ( ) , bob . obj ( ) ) ; <nl> + _client . insert ( nss . ns ( ) , bob . obj ( ) ) ; <nl> } <nl> } <nl> <nl> virtual ~ QueryStageDeleteBase ( ) { <nl> - OldClientWriteContext ctx ( & _txn , ns ( ) ) ; <nl> - _client . dropCollection ( ns ( ) ) ; <nl> + OldClientWriteContext ctx ( & _txn , nss . ns ( ) ) ; <nl> + _client . dropCollection ( nss . ns ( ) ) ; <nl> } <nl> <nl> void remove ( const BSONObj & obj ) { <nl> - _client . remove ( ns ( ) , obj ) ; <nl> + _client . remove ( nss . ns ( ) , obj ) ; <nl> } <nl> <nl> void getLocs ( Collection * collection , <nl> class QueryStageDeleteBase { <nl> } <nl> <nl> unique_ptr < CanonicalQuery > canonicalize ( const BSONObj & query ) { <nl> - auto statusWithCQ = CanonicalQuery : : canonicalize ( ns ( ) , query ) ; <nl> + auto statusWithCQ = CanonicalQuery : : canonicalize ( nss , query ) ; <nl> ASSERT_OK ( statusWithCQ . getStatus ( ) ) ; <nl> return std : : move ( statusWithCQ . getValue ( ) ) ; <nl> } <nl> class QueryStageDeleteBase { <nl> return 50 ; <nl> } <nl> <nl> - static const char * ns ( ) { <nl> - return " unittests . QueryStageDelete " ; <nl> - } <nl> - <nl> protected : <nl> OperationContextImpl _txn ; <nl> <nl> class QueryStageDeleteBase { <nl> class QueryStageDeleteInvalidateUpcomingObject : public QueryStageDeleteBase { <nl> public : <nl> void run ( ) { <nl> - OldClientWriteContext ctx ( & _txn , ns ( ) ) ; <nl> + OldClientWriteContext ctx ( & _txn , nss . ns ( ) ) ; <nl> <nl> Collection * coll = ctx . getCollection ( ) ; <nl> <nl> class QueryStageDeleteReturnOldDoc : public QueryStageDeleteBase { <nl> public : <nl> void run ( ) { <nl> / / Various variables we ' ll need . <nl> - OldClientWriteContext ctx ( & _txn , ns ( ) ) ; <nl> + OldClientWriteContext ctx ( & _txn , nss . ns ( ) ) ; <nl> Collection * coll = ctx . getCollection ( ) ; <nl> - const NamespaceString nss ( ns ( ) ) ; <nl> const int targetDocIndex = 0 ; <nl> const BSONObj query = BSON ( " foo " < < BSON ( " $ gte " < < targetDocIndex ) ) ; <nl> const unique_ptr < WorkingSet > ws ( stdx : : make_unique < WorkingSet > ( ) ) ; <nl> class QueryStageDeleteSkipOwnedObjects : public QueryStageDeleteBase { <nl> public : <nl> void run ( ) { <nl> / / Various variables we ' ll need . <nl> - OldClientWriteContext ctx ( & _txn , ns ( ) ) ; <nl> + OldClientWriteContext ctx ( & _txn , nss . ns ( ) ) ; <nl> Collection * coll = ctx . getCollection ( ) ; <nl> const BSONObj query = BSONObj ( ) ; <nl> const unique_ptr < WorkingSet > ws ( stdx : : make_unique < WorkingSet > ( ) ) ; <nl> mmm a / src / mongo / dbtests / query_stage_subplan . cpp <nl> ppp b / src / mongo / dbtests / query_stage_subplan . cpp <nl> <nl> <nl> namespace QueryStageSubplan { <nl> <nl> + static const NamespaceString nss ( " unittests . QueryStageSubplan " ) ; <nl> + <nl> class QueryStageSubplanBase { <nl> public : <nl> QueryStageSubplanBase ( ) : _client ( & _txn ) { } <nl> <nl> virtual ~ QueryStageSubplanBase ( ) { <nl> - OldClientWriteContext ctx ( & _txn , ns ( ) ) ; <nl> - _client . dropCollection ( ns ( ) ) ; <nl> + OldClientWriteContext ctx ( & _txn , nss . ns ( ) ) ; <nl> + _client . dropCollection ( nss . ns ( ) ) ; <nl> } <nl> <nl> void addIndex ( const BSONObj & obj ) { <nl> - ASSERT_OK ( dbtests : : createIndex ( & _txn , ns ( ) , obj ) ) ; <nl> + ASSERT_OK ( dbtests : : createIndex ( & _txn , nss . ns ( ) , obj ) ) ; <nl> } <nl> <nl> void insert ( const BSONObj & doc ) { <nl> - _client . insert ( ns ( ) , doc ) ; <nl> - } <nl> - <nl> - static const char * ns ( ) { <nl> - return " unittests . QueryStageSubplan " ; <nl> + _client . insert ( nss . ns ( ) , doc ) ; <nl> } <nl> <nl> protected : <nl> class QueryStageSubplanBase { <nl> std : : unique_ptr < CanonicalQuery > cqFromFindCommand ( const std : : string & findCmd ) { <nl> BSONObj cmdObj = fromjson ( findCmd ) ; <nl> <nl> - const NamespaceString nss ( " testns . testcoll " ) ; <nl> bool isExplain = false ; <nl> auto lpq = <nl> unittest : : assertGet ( LiteParsedQuery : : makeFromFindCommand ( nss , cmdObj , isExplain ) ) ; <nl> class QueryStageSubplanBase { <nl> class QueryStageSubplanGeo2dOr : public QueryStageSubplanBase { <nl> public : <nl> void run ( ) { <nl> - OldClientWriteContext ctx ( & _txn , ns ( ) ) ; <nl> + OldClientWriteContext ctx ( & _txn , nss . ns ( ) ) ; <nl> addIndex ( BSON ( " a " <nl> < < " 2d " <nl> < < " b " < < 1 ) ) ; <nl> class QueryStageSubplanGeo2dOr : public QueryStageSubplanBase { <nl> " { $ or : [ { a : { $ geoWithin : { $ centerSphere : [ [ 0 , 0 ] , 10 ] } } } , " <nl> " { a : { $ geoWithin : { $ centerSphere : [ [ 1 , 1 ] , 10 ] } } } ] } " ) ; <nl> <nl> - auto statusWithCQ = CanonicalQuery : : canonicalize ( ns ( ) , query ) ; <nl> + auto statusWithCQ = CanonicalQuery : : canonicalize ( nss , query ) ; <nl> ASSERT_OK ( statusWithCQ . getStatus ( ) ) ; <nl> std : : unique_ptr < CanonicalQuery > cq = std : : move ( statusWithCQ . getValue ( ) ) ; <nl> <nl> class QueryStageSubplanGeo2dOr : public QueryStageSubplanBase { <nl> class QueryStageSubplanPlanFromCache : public QueryStageSubplanBase { <nl> public : <nl> void run ( ) { <nl> - OldClientWriteContext ctx ( & _txn , ns ( ) ) ; <nl> + OldClientWriteContext ctx ( & _txn , nss . ns ( ) ) ; <nl> <nl> addIndex ( BSON ( " a " < < 1 ) ) ; <nl> addIndex ( BSON ( " a " < < 1 < < " b " < < 1 ) ) ; <nl> class QueryStageSubplanPlanFromCache : public QueryStageSubplanBase { <nl> <nl> Collection * collection = ctx . getCollection ( ) ; <nl> <nl> - auto statusWithCQ = CanonicalQuery : : canonicalize ( ns ( ) , query ) ; <nl> + auto statusWithCQ = CanonicalQuery : : canonicalize ( nss , query ) ; <nl> ASSERT_OK ( statusWithCQ . getStatus ( ) ) ; <nl> std : : unique_ptr < CanonicalQuery > cq = std : : move ( statusWithCQ . getValue ( ) ) ; <nl> <nl> class QueryStageSubplanRewriteToRootedOr : public QueryStageSubplanBase { <nl> class QueryStageSubplanPlanContainedOr : public QueryStageSubplanBase { <nl> public : <nl> void run ( ) { <nl> - OldClientWriteContext ctx ( & _txn , ns ( ) ) ; <nl> + OldClientWriteContext ctx ( & _txn , nss . ns ( ) ) ; <nl> addIndex ( BSON ( " b " < < 1 < < " a " < < 1 ) ) ; <nl> addIndex ( BSON ( " c " < < 1 < < " a " < < 1 ) ) ; <nl> <nl> class QueryStageSubplanPlanContainedOr : public QueryStageSubplanBase { <nl> insert ( BSON ( " _id " < < 3 < < " a " < < 1 < < " c " < < 3 ) ) ; <nl> insert ( BSON ( " _id " < < 4 < < " a " < < 1 < < " c " < < 4 ) ) ; <nl> <nl> - auto cq = unittest : : assertGet ( CanonicalQuery : : canonicalize ( ns ( ) , query ) ) ; <nl> + auto cq = unittest : : assertGet ( CanonicalQuery : : canonicalize ( nss , query ) ) ; <nl> <nl> Collection * collection = ctx . getCollection ( ) ; <nl> <nl> class QueryStageSubplanPlanContainedOr : public QueryStageSubplanBase { <nl> class QueryStageSubplanPlanRootedOrNE : public QueryStageSubplanBase { <nl> public : <nl> void run ( ) { <nl> - OldClientWriteContext ctx ( & _txn , ns ( ) ) ; <nl> + OldClientWriteContext ctx ( & _txn , nss . ns ( ) ) ; <nl> addIndex ( BSON ( " a " < < 1 < < " b " < < 1 ) ) ; <nl> addIndex ( BSON ( " a " < < 1 < < " c " < < 1 ) ) ; <nl> <nl> class QueryStageSubplanPlanRootedOrNE : public QueryStageSubplanBase { <nl> BSONObj query = fromjson ( " { $ or : [ { a : 1 } , { a : { $ ne : 1 } } ] } " ) ; <nl> BSONObj sort = BSON ( " d " < < 1 ) ; <nl> BSONObj projection ; <nl> - auto cq = unittest : : assertGet ( CanonicalQuery : : canonicalize ( ns ( ) , query , sort , projection ) ) ; <nl> + auto cq = unittest : : assertGet ( CanonicalQuery : : canonicalize ( nss , query , sort , projection ) ) ; <nl> <nl> Collection * collection = ctx . getCollection ( ) ; <nl> <nl> mmm a / src / mongo / dbtests / query_stage_update . cpp <nl> ppp b / src / mongo / dbtests / query_stage_update . cpp <nl> namespace QueryStageUpdate { <nl> using std : : unique_ptr ; <nl> using std : : vector ; <nl> <nl> + static const NamespaceString nss ( " unittests . QueryStageUpdate " ) ; <nl> + <nl> class QueryStageUpdateBase { <nl> public : <nl> - QueryStageUpdateBase ( ) <nl> - : _client ( & _txn ) , _ns ( " unittests . QueryStageUpdate " ) , _nsString ( StringData ( ns ( ) ) ) { <nl> - OldClientWriteContext ctx ( & _txn , ns ( ) ) ; <nl> - _client . dropCollection ( ns ( ) ) ; <nl> - _client . createCollection ( ns ( ) ) ; <nl> + QueryStageUpdateBase ( ) : _client ( & _txn ) { <nl> + OldClientWriteContext ctx ( & _txn , nss . ns ( ) ) ; <nl> + _client . dropCollection ( nss . ns ( ) ) ; <nl> + _client . createCollection ( nss . ns ( ) ) ; <nl> } <nl> <nl> virtual ~ QueryStageUpdateBase ( ) { <nl> - OldClientWriteContext ctx ( & _txn , ns ( ) ) ; <nl> - _client . dropCollection ( ns ( ) ) ; <nl> + OldClientWriteContext ctx ( & _txn , nss . ns ( ) ) ; <nl> + _client . dropCollection ( nss . ns ( ) ) ; <nl> } <nl> <nl> void insert ( const BSONObj & doc ) { <nl> - _client . insert ( ns ( ) , doc ) ; <nl> + _client . insert ( nss . ns ( ) , doc ) ; <nl> } <nl> <nl> void remove ( const BSONObj & obj ) { <nl> - _client . remove ( ns ( ) , obj ) ; <nl> + _client . remove ( nss . ns ( ) , obj ) ; <nl> } <nl> <nl> size_t count ( const BSONObj & query ) { <nl> - return _client . count ( ns ( ) , query , 0 , 0 , 0 ) ; <nl> + return _client . count ( nss . ns ( ) , query , 0 , 0 , 0 ) ; <nl> } <nl> <nl> unique_ptr < CanonicalQuery > canonicalize ( const BSONObj & query ) { <nl> - auto statusWithCQ = CanonicalQuery : : canonicalize ( ns ( ) , query ) ; <nl> + auto statusWithCQ = CanonicalQuery : : canonicalize ( nss , query ) ; <nl> ASSERT_OK ( statusWithCQ . getStatus ( ) ) ; <nl> return std : : move ( statusWithCQ . getValue ( ) ) ; <nl> } <nl> class QueryStageUpdateBase { <nl> ASSERT ( foundDoc ) ; <nl> } <nl> <nl> - const char * ns ( ) { <nl> - return _ns . c_str ( ) ; <nl> - } <nl> - <nl> - const NamespaceString & nsString ( ) { <nl> - return _nsString ; <nl> - } <nl> - <nl> protected : <nl> OperationContextImpl _txn ; <nl> <nl> private : <nl> DBDirectClient _client ; <nl> - <nl> - std : : string _ns ; <nl> - NamespaceString _nsString ; <nl> } ; <nl> <nl> / * * <nl> class QueryStageUpdateUpsertEmptyColl : public QueryStageUpdateBase { <nl> void run ( ) { <nl> / / Run the update . <nl> { <nl> - OldClientWriteContext ctx ( & _txn , ns ( ) ) ; <nl> + OldClientWriteContext ctx ( & _txn , nss . ns ( ) ) ; <nl> CurOp & curOp = * CurOp : : get ( _txn ) ; <nl> OpDebug * opDebug = & curOp . debug ( ) ; <nl> UpdateDriver driver ( ( UpdateDriver : : Options ( ) ) ) ; <nl> class QueryStageUpdateUpsertEmptyColl : public QueryStageUpdateBase { <nl> / / Collection should be empty . <nl> ASSERT_EQUALS ( 0U , count ( BSONObj ( ) ) ) ; <nl> <nl> - UpdateRequest request ( nsString ( ) ) ; <nl> - UpdateLifecycleImpl updateLifecycle ( false , nsString ( ) ) ; <nl> + UpdateRequest request ( nss ) ; <nl> + UpdateLifecycleImpl updateLifecycle ( false , nss ) ; <nl> request . setLifecycle ( & updateLifecycle ) ; <nl> <nl> / / Update is the upsert { _id : 0 , x : 1 } , { $ set : { y : 2 } } . <nl> class QueryStageUpdateUpsertEmptyColl : public QueryStageUpdateBase { <nl> <nl> / / Verify the contents of the resulting collection . <nl> { <nl> - AutoGetCollectionForRead ctx ( & _txn , ns ( ) ) ; <nl> + AutoGetCollectionForRead ctx ( & _txn , nss . ns ( ) ) ; <nl> Collection * collection = ctx . getCollection ( ) ; <nl> <nl> vector < BSONObj > objs ; <nl> class QueryStageUpdateSkipInvalidatedDoc : public QueryStageUpdateBase { <nl> void run ( ) { <nl> / / Run the update . <nl> { <nl> - OldClientWriteContext ctx ( & _txn , ns ( ) ) ; <nl> + OldClientWriteContext ctx ( & _txn , nss . ns ( ) ) ; <nl> <nl> / / Populate the collection . <nl> for ( int i = 0 ; i < 10 ; + + i ) { <nl> class QueryStageUpdateSkipInvalidatedDoc : public QueryStageUpdateBase { <nl> OpDebug * opDebug = & curOp . debug ( ) ; <nl> UpdateDriver driver ( ( UpdateDriver : : Options ( ) ) ) ; <nl> Database * db = ctx . db ( ) ; <nl> - Collection * coll = db - > getCollection ( ns ( ) ) ; <nl> + Collection * coll = db - > getCollection ( nss . ns ( ) ) ; <nl> <nl> / / Get the RecordIds that would be returned by an in - order scan . <nl> vector < RecordId > locs ; <nl> getLocs ( coll , CollectionScanParams : : FORWARD , & locs ) ; <nl> <nl> - UpdateRequest request ( nsString ( ) ) ; <nl> - UpdateLifecycleImpl updateLifecycle ( false , nsString ( ) ) ; <nl> + UpdateRequest request ( nss ) ; <nl> + UpdateLifecycleImpl updateLifecycle ( false , nss ) ; <nl> request . setLifecycle ( & updateLifecycle ) ; <nl> <nl> / / Update is a multi - update that sets ' bar ' to 3 in every document <nl> class QueryStageUpdateSkipInvalidatedDoc : public QueryStageUpdateBase { <nl> <nl> / / Check the contents of the collection . <nl> { <nl> - AutoGetCollectionForRead ctx ( & _txn , ns ( ) ) ; <nl> + AutoGetCollectionForRead ctx ( & _txn , nss . ns ( ) ) ; <nl> Collection * collection = ctx . getCollection ( ) ; <nl> <nl> vector < BSONObj > objs ; <nl> class QueryStageUpdateReturnOldDoc : public QueryStageUpdateBase { <nl> ASSERT_EQUALS ( 10U , count ( BSONObj ( ) ) ) ; <nl> <nl> / / Various variables we ' ll need . <nl> - OldClientWriteContext ctx ( & _txn , ns ( ) ) ; <nl> + OldClientWriteContext ctx ( & _txn , nss . ns ( ) ) ; <nl> OpDebug * opDebug = & CurOp : : get ( _txn ) - > debug ( ) ; <nl> Collection * coll = ctx . getCollection ( ) ; <nl> - UpdateLifecycleImpl updateLifecycle ( false , nsString ( ) ) ; <nl> - UpdateRequest request ( nsString ( ) ) ; <nl> + UpdateLifecycleImpl updateLifecycle ( false , nss ) ; <nl> + UpdateRequest request ( nss ) ; <nl> UpdateDriver driver ( ( UpdateDriver : : Options ( ) ) ) ; <nl> const int targetDocIndex = 0 ; / / We ' ll be working with the first doc in the collection . <nl> const BSONObj query = BSON ( " foo " < < BSON ( " $ gte " < < targetDocIndex ) ) ; <nl> class QueryStageUpdateReturnNewDoc : public QueryStageUpdateBase { <nl> ASSERT_EQUALS ( 50U , count ( BSONObj ( ) ) ) ; <nl> <nl> / / Various variables we ' ll need . <nl> - OldClientWriteContext ctx ( & _txn , ns ( ) ) ; <nl> + OldClientWriteContext ctx ( & _txn , nss . ns ( ) ) ; <nl> OpDebug * opDebug = & CurOp : : get ( _txn ) - > debug ( ) ; <nl> Collection * coll = ctx . getCollection ( ) ; <nl> - UpdateLifecycleImpl updateLifecycle ( false , nsString ( ) ) ; <nl> - UpdateRequest request ( nsString ( ) ) ; <nl> + UpdateLifecycleImpl updateLifecycle ( false , nss ) ; <nl> + UpdateRequest request ( nss ) ; <nl> UpdateDriver driver ( ( UpdateDriver : : Options ( ) ) ) ; <nl> const int targetDocIndex = 10 ; <nl> const BSONObj query = BSON ( " foo " < < BSON ( " $ gte " < < targetDocIndex ) ) ; <nl> class QueryStageUpdateSkipOwnedObjects : public QueryStageUpdateBase { <nl> public : <nl> void run ( ) { <nl> / / Various variables we ' ll need . <nl> - OldClientWriteContext ctx ( & _txn , ns ( ) ) ; <nl> + OldClientWriteContext ctx ( & _txn , nss . ns ( ) ) ; <nl> OpDebug * opDebug = & CurOp : : get ( _txn ) - > debug ( ) ; <nl> Collection * coll = ctx . getCollection ( ) ; <nl> - UpdateLifecycleImpl updateLifecycle ( false , nsString ( ) ) ; <nl> - UpdateRequest request ( nsString ( ) ) ; <nl> + UpdateLifecycleImpl updateLifecycle ( false , nss ) ; <nl> + UpdateRequest request ( nss ) ; <nl> UpdateDriver driver ( ( UpdateDriver : : Options ( ) ) ) ; <nl> const BSONObj query = BSONObj ( ) ; <nl> const unique_ptr < WorkingSet > ws ( stdx : : make_unique < WorkingSet > ( ) ) ; <nl> mmm a / src / mongo / s / chunk_manager . cpp <nl> ppp b / src / mongo / s / chunk_manager . cpp <nl> ChunkPtr ChunkManager : : findIntersectingChunk ( const BSONObj & shardKey ) const { <nl> } <nl> <nl> void ChunkManager : : getShardIdsForQuery ( set < ShardId > & shardIds , const BSONObj & query ) const { <nl> - auto statusWithCQ = CanonicalQuery : : canonicalize ( _ns , query , WhereCallbackNoop ( ) ) ; <nl> + auto statusWithCQ = <nl> + CanonicalQuery : : canonicalize ( NamespaceString ( _ns ) , query , WhereCallbackNoop ( ) ) ; <nl> <nl> uassertStatusOK ( statusWithCQ . getStatus ( ) ) ; <nl> unique_ptr < CanonicalQuery > cq = std : : move ( statusWithCQ . getValue ( ) ) ; <nl> mmm a / src / mongo / s / chunk_manager_targeter_test . cpp <nl> ppp b / src / mongo / s / chunk_manager_targeter_test . cpp <nl> using std : : make_pair ; <nl> / / Utility function to create a CanonicalQuery <nl> unique_ptr < CanonicalQuery > canonicalize ( const char * queryStr ) { <nl> BSONObj queryObj = fromjson ( queryStr ) ; <nl> - auto statusWithCQ = CanonicalQuery : : canonicalize ( " test . foo " , queryObj , WhereCallbackNoop ( ) ) ; <nl> + const NamespaceString nss ( " test . foo " ) ; <nl> + auto statusWithCQ = CanonicalQuery : : canonicalize ( nss , queryObj , WhereCallbackNoop ( ) ) ; <nl> ASSERT_OK ( statusWithCQ . getStatus ( ) ) ; <nl> return std : : move ( statusWithCQ . getValue ( ) ) ; <nl> } <nl> mmm a / src / mongo / s / shard_key_pattern . cpp <nl> ppp b / src / mongo / s / shard_key_pattern . cpp <nl> StatusWith < BSONObj > ShardKeyPattern : : extractShardKeyFromQuery ( const BSONObj & bas <nl> return StatusWith < BSONObj > ( BSONObj ( ) ) ; <nl> <nl> / / Extract equalities from query <nl> - auto statusWithCQ = CanonicalQuery : : canonicalize ( " " , basicQuery , WhereCallbackNoop ( ) ) ; <nl> + auto statusWithCQ = <nl> + CanonicalQuery : : canonicalize ( NamespaceString ( " " ) , basicQuery , WhereCallbackNoop ( ) ) ; <nl> if ( ! statusWithCQ . isOK ( ) ) { <nl> return StatusWith < BSONObj > ( statusWithCQ . getStatus ( ) ) ; <nl> } <nl> | SERVER - 19235 CQ : : canonicalize ( ) to use NamespaceString rather than std : : string | mongodb/mongo | 5b759f3b3b704c93c59173789e393fcad6ee48f7 | 2015-07-27T21:12:16Z |
mmm a / aten / src / ATen / native / quantized / cpu / qnnpack_utils . h <nl> ppp b / aten / src / ATen / native / quantized / cpu / qnnpack_utils . h <nl> std : : vector < float > generate_requantization_scales ( <nl> requant_scales . resize ( num_output_channels_padded ) ; <nl> } <nl> for ( int i = 0 ; i < num_output_channels_padded ; + + i ) { <nl> - requant_scales [ i ] = weight_scales_data [ i ] * input_scale / output_scale ; <nl> + auto inverse_output_scale = 1 . f / output_scale ; <nl> + requant_scales [ i ] = ( weight_scales_data [ i ] * input_scale ) * inverse_output_scale ; <nl> TORCH_CHECK ( <nl> ( requant_scales [ i ] > 0 . 0f & & std : : isnormal ( requant_scales [ i ] ) ) , <nl> " failed to create op with requantization scale : " , <nl> | Calculate inverse of output scale first . ( ) | pytorch/pytorch | f0fd1cc873ff9fdca49205347ec0752c6ec66b25 | 2020-08-03T21:45:08Z |
mmm a / src / mongo / db / storage / kv / kv_storage_engine . cpp <nl> ppp b / src / mongo / db / storage / kv / kv_storage_engine . cpp <nl> Status KVStorageEngine : : dropDatabase ( OperationContext * opCtx , StringData db ) { <nl> std : : list < std : : string > toDrop ; <nl> entry - > getCollectionNamespaces ( & toDrop ) ; <nl> <nl> - / / Partition ` toDrop ` into ranges of ` [ untimestampedCollections . . . , <nl> - / / timestampedCollections . . . ] ` . All timestamped collections must have already been renamed to <nl> - / / a drop - pending namespace . Running without replication treats all collections as not <nl> - / / timestamped . <nl> - auto untimestampedDropsEnd = <nl> - std : : partition ( toDrop . begin ( ) , toDrop . end ( ) , [ ] ( const std : : string & dropNs ) { <nl> - return ! NamespaceString ( dropNs ) . isDropPendingNamespace ( ) ; <nl> - } ) ; <nl> - <nl> - / / The primary caller ( ` DatabaseImpl : : dropDatabase ` ) of this method currently <nl> - / / ` transitional_ignore ` s the result . To minimize the impact of that , while also returning a <nl> - / / correct status , attempt to drop every collection , and if there were any errors , return the <nl> - / / first one . <nl> - Status firstError = Status : : OK ( ) ; <nl> - <nl> - / / First drop the " non - timestamped " collections . " Non - timestamped " collections such as user <nl> - / / collections in ` local ` or ` system . profile ` do not get rolled back . This means we also <nl> - / / should not rollback their creation or deletion . To achieve that , the code takes care to <nl> - / / suppress any timestamping state . <nl> - firstError = _dropCollectionsNoTimestamp ( opCtx , entry , toDrop . begin ( ) , untimestampedDropsEnd ) ; <nl> - <nl> - / / Now drop any leftover timestamped collections ( i . e : not already dropped by the reaper ) . On <nl> - / / secondaries there is already a ` commit timestamp ` set and these drops inherit the timestamp <nl> - / / of the ` dropDatabase ` oplog entry . On primaries , these writes are allowed to be processed <nl> - / / without a timestamp as these are , logically , behind the majority commit point . This method <nl> - / / will enforce that all remaining collections were moved to a drop - pending namespace . <nl> - / / <nl> - / / Additionally , before returning , this method will remove the ` KVDatabaseCatalogEntry ` from <nl> - / / the ` _dbs ` map . This action creates a new constraint that this " timestamped drop " method <nl> - / / must happen after the " non - timestamped drops " . <nl> - auto status = <nl> - _dropCollectionsWithTimestamp ( opCtx , entry , toDrop , untimestampedDropsEnd , toDrop . end ( ) ) ; <nl> - if ( firstError . isOK ( ) ) { <nl> - firstError = status ; <nl> - } <nl> - <nl> - return firstError ; <nl> + / / Do not timestamp any of the following writes . This will remove entries from the catalog as <nl> + / / well as drop any underlying tables . It ' s not expected for dropping tables to be reversible <nl> + / / on crash / recoverToStableTimestamp . <nl> + return _dropCollectionsNoTimestamp ( opCtx , entry , toDrop . begin ( ) , toDrop . end ( ) ) ; <nl> } <nl> <nl> / * * <nl> Status KVStorageEngine : : _dropCollectionsNoTimestamp ( OperationContext * opCtx , <nl> return firstError ; <nl> } <nl> <nl> - Status KVStorageEngine : : _dropCollectionsWithTimestamp ( OperationContext * opCtx , <nl> - KVDatabaseCatalogEntryBase * dbce , <nl> - std : : list < std : : string > & toDrop , <nl> - CollIter begin , <nl> - CollIter end ) { <nl> - / / This method does not enforce any timestamping rules for the writes that remove collections <nl> - / / from the catalog . <nl> - / / <nl> - / / This is called outside of a WUOW since MMAPv1 has unfortunate behavior around dropping <nl> - / / databases . We need to create one here since we want db dropping to all - or - nothing <nl> - / / wherever possible . Eventually we want to move this up so that it can include the logOp <nl> - / / inside of the WUOW , but that would require making DB dropping happen inside the Dur <nl> - / / system for MMAPv1 . <nl> - WriteUnitOfWork wuow ( opCtx ) ; <nl> - <nl> - Status firstError = Status : : OK ( ) ; <nl> - for ( auto toDropStr = begin ; toDropStr ! = toDrop . end ( ) ; + + toDropStr ) { <nl> - std : : string coll = * toDropStr ; <nl> - NamespaceString nss ( coll ) ; <nl> - <nl> - Status result = dbce - > dropCollection ( opCtx , coll ) ; <nl> - if ( ! result . isOK ( ) & & firstError . isOK ( ) ) { <nl> - firstError = result ; <nl> - } <nl> - } <nl> - <nl> - toDrop . clear ( ) ; <nl> - dbce - > getCollectionNamespaces ( & toDrop ) ; <nl> - invariant ( toDrop . empty ( ) ) ; <nl> - <nl> - { <nl> - stdx : : lock_guard < stdx : : mutex > lk ( _dbsLock ) ; <nl> - opCtx - > recoveryUnit ( ) - > registerChange ( new RemoveDBChange ( this , dbce - > name ( ) , dbce ) ) ; <nl> - _dbs . erase ( dbce - > name ( ) ) ; <nl> - } <nl> - <nl> - wuow . commit ( ) ; <nl> - return firstError ; <nl> - } <nl> - <nl> int KVStorageEngine : : flushAllFiles ( OperationContext * opCtx , bool sync ) { <nl> return _engine - > flushAllFiles ( opCtx , sync ) ; <nl> } <nl> mmm a / src / mongo / dbtests / storage_timestamp_tests . cpp <nl> ppp b / src / mongo / dbtests / storage_timestamp_tests . cpp <nl> class SetMinValidAppliedThrough : public StorageTimestampTest { <nl> * timestamping ` dropDatabase ` side - effects no longer applies . The purpose of this test is to <nl> * exercise the ` KVStorageEngine : : dropDatabase ` method . <nl> * / <nl> + template < bool SimulatePrimary > <nl> class KVDropDatabase : public StorageTimestampTest { <nl> public : <nl> void run ( ) { <nl> class KVDropDatabase : public StorageTimestampTest { <nl> / / The namespace has changed , but the ident still exists as - is after the rename . <nl> assertIdentsExistAtTimestamp ( kvCatalog , collIdent , indexIdent , postRenameTime ) ; <nl> <nl> - ASSERT_OK ( dropDatabase ( _opCtx , nss . db ( ) . toString ( ) ) ) ; <nl> + const Timestamp dropTime = _clock - > reserveTicks ( 1 ) . asTimestamp ( ) ; <nl> + if ( SimulatePrimary ) { <nl> + ASSERT_OK ( dropDatabase ( _opCtx , nss . db ( ) . toString ( ) ) ) ; <nl> + } else { <nl> + repl : : UnreplicatedWritesBlock uwb ( _opCtx ) ; <nl> + TimestampBlock ts ( _opCtx , dropTime ) ; <nl> + ASSERT_OK ( dropDatabase ( _opCtx , nss . db ( ) . toString ( ) ) ) ; <nl> + } <nl> <nl> / / Assert that the idents do not exist . <nl> assertIdentsMissingAtTimestamp ( <nl> kvCatalog , sysProfileIdent , sysProfileIndexIdent , Timestamp : : max ( ) ) ; <nl> assertIdentsMissingAtTimestamp ( kvCatalog , collIdent , indexIdent , Timestamp : : max ( ) ) ; <nl> + <nl> + / / dropDatabase must not timestamp the final write . The collection and index should seem <nl> + / / to have never existed . <nl> + assertIdentsMissingAtTimestamp ( kvCatalog , collIdent , indexIdent , syncTime ) ; <nl> } <nl> } ; <nl> <nl> class AllStorageTimestampTests : public unittest : : Suite { <nl> add < SetMinValidInitialSyncFlag > ( ) ; <nl> add < SetMinValidToAtLeast > ( ) ; <nl> add < SetMinValidAppliedThrough > ( ) ; <nl> - add < KVDropDatabase > ( ) ; <nl> + / / KVDropDatabase < SimulatePrimary > <nl> + add < KVDropDatabase < false > > ( ) ; <nl> + add < KVDropDatabase < true > > ( ) ; <nl> / / TimestampIndexBuilds < SimulatePrimary > <nl> add < TimestampIndexBuilds < false > > ( ) ; <nl> add < TimestampIndexBuilds < true > > ( ) ; <nl> | SERVER - 35127 : Do not timestamp any collection drops committed by dropDatabase . | mongodb/mongo | 7730b27795b642ac772411cfba25f165ddb265e7 | 2018-05-23T14:16:03Z |
mmm a / src / Functions / FunctionsConversion . h <nl> ppp b / src / Functions / FunctionsConversion . h <nl> struct ToDateMonotonicity <nl> else if ( <nl> ( which . isUInt ( ) & & ( ( left . isNull ( ) | | left . get < UInt64 > ( ) < 0xFFFF ) & & ( right . isNull ( ) | | right . get < UInt64 > ( ) > = 0xFFFF ) ) ) <nl> | | ( which . isInt ( ) & & ( ( left . isNull ( ) | | left . get < Int64 > ( ) < 0xFFFF ) & & ( right . isNull ( ) | | right . get < Int64 > ( ) > = 0xFFFF ) ) ) <nl> - | | ( which . isFloat ( ) & & ( ( left . isNull ( ) | | left . get < Float64 > ( ) < 0xFFFF ) & & ( right . isNull ( ) | | right . get < Float64 > ( ) > = 0xFFFF ) ) ) ) <nl> + | | ( which . isFloat ( ) & & ( ( left . isNull ( ) | | left . get < Float64 > ( ) < 0xFFFF ) & & ( right . isNull ( ) | | right . get < Float64 > ( ) > = 0xFFFF ) ) ) <nl> + | | ! type . isValueRepresentedByNumber ( ) ) <nl> return { } ; <nl> else <nl> return { true , true , true } ; <nl> } <nl> } ; <nl> <nl> + struct ToDateTimeMonotonicity <nl> + { <nl> + static bool has ( ) { return true ; } <nl> + <nl> + static IFunction : : Monotonicity get ( const IDataType & type , const Field & , const Field & ) <nl> + { <nl> + if ( type . isValueRepresentedByNumber ( ) ) <nl> + return { true , true , true } ; <nl> + else <nl> + return { } ; <nl> + } <nl> + } ; <nl> + <nl> / * * The monotonicity for the ` toString ` function is mainly determined for test purposes . <nl> * It is doubtful that anyone is looking to optimize queries with conditions ` toString ( CounterID ) = 34 ` . <nl> * / <nl> using FunctionToInt64 = FunctionConvert < DataTypeInt64 , NameToInt64 , ToNumberMono <nl> using FunctionToFloat32 = FunctionConvert < DataTypeFloat32 , NameToFloat32 , ToNumberMonotonicity < Float32 > > ; <nl> using FunctionToFloat64 = FunctionConvert < DataTypeFloat64 , NameToFloat64 , ToNumberMonotonicity < Float64 > > ; <nl> using FunctionToDate = FunctionConvert < DataTypeDate , NameToDate , ToDateMonotonicity > ; <nl> - using FunctionToDateTime = FunctionConvert < DataTypeDateTime , NameToDateTime , PositiveMonotonicity > ; <nl> + using FunctionToDateTime = FunctionConvert < DataTypeDateTime , NameToDateTime , ToDateTimeMonotonicity > ; <nl> using FunctionToDateTime64 = FunctionConvert < DataTypeDateTime64 , NameToDateTime64 , UnknownMonotonicity > ; <nl> using FunctionToUUID = FunctionConvert < DataTypeUUID , NameToUUID , ToNumberMonotonicity < UInt128 > > ; <nl> using FunctionToString = FunctionConvert < DataTypeString , NameToString , ToStringMonotonicity > ; <nl> mmm a / tests / queries / 0_stateless / 01440_to_date_monotonicity . reference <nl> ppp b / tests / queries / 0_stateless / 01440_to_date_monotonicity . reference <nl> <nl> 0 <nl> 1970 - 01 - 01 2106 - 02 - 07 1970 - 04 - 11 1970 - 01 - 01 2106 - 02 - 07 <nl> + 1970 - 01 - 01 03 : 00 : 00 2106 - 02 - 07 09 : 28 : 15 1970 - 01 - 01 03 : 16 : 40 <nl> | string to date is not monotonic | ClickHouse/ClickHouse | d9de96716a4b22dd79d6780e4b36e527e8d46ced | 2020-08-08T06:30:50Z |
mmm a / lib / SILOptimizer / Utils / SILInliner . cpp <nl> ppp b / lib / SILOptimizer / Utils / SILInliner . cpp <nl> namespace swift { <nl> / / / Utility class for rewiring control - flow of inlined begin_apply functions . <nl> class BeginApplySite { <nl> SILLocation Loc ; <nl> - SILBuilder & Builder ; <nl> + SILBuilder * Builder ; <nl> BeginApplyInst * BeginApply ; <nl> bool HasYield = false ; <nl> <nl> class BeginApplySite { <nl> <nl> public : <nl> BeginApplySite ( BeginApplyInst * BeginApply , SILLocation Loc , <nl> - SILBuilder & Builder ) <nl> + SILBuilder * Builder ) <nl> : Loc ( Loc ) , Builder ( Builder ) , BeginApply ( BeginApply ) { } <nl> <nl> static Optional < BeginApplySite > get ( FullApplySite AI , SILLocation Loc , <nl> - SILBuilder & Builder ) { <nl> + SILBuilder * Builder ) { <nl> auto * BeginApply = dyn_cast < BeginApplyInst > ( AI ) ; <nl> if ( ! BeginApply ) <nl> return None ; <nl> class BeginApplySite { <nl> auto remappedYield = remapValue ( calleeYields [ i ] ) ; <nl> callerYields [ i ] - > replaceAllUsesWith ( remappedYield ) ; <nl> } <nl> - Builder . createBranch ( Loc , returnToBB ) ; <nl> + Builder - > createBranch ( Loc , returnToBB ) ; <nl> <nl> / / Add branches at the resumption sites to the resume / unwind block . <nl> if ( EndApply ) { <nl> - SavedInsertionPointRAII savedIP ( Builder , EndApplyBB ) ; <nl> + SavedInsertionPointRAII savedIP ( * Builder , EndApplyBB ) ; <nl> auto resumeBB = remapBlock ( yield - > getResumeBB ( ) ) ; <nl> - Builder . createBranch ( EndApply - > getLoc ( ) , resumeBB ) ; <nl> + Builder - > createBranch ( EndApply - > getLoc ( ) , resumeBB ) ; <nl> } <nl> if ( AbortApply ) { <nl> - SavedInsertionPointRAII savedIP ( Builder , AbortApplyBB ) ; <nl> + SavedInsertionPointRAII savedIP ( * Builder , AbortApplyBB ) ; <nl> auto unwindBB = remapBlock ( yield - > getUnwindBB ( ) ) ; <nl> - Builder . createBranch ( AbortApply - > getLoc ( ) , unwindBB ) ; <nl> + Builder - > createBranch ( AbortApply - > getLoc ( ) , unwindBB ) ; <nl> } <nl> return true ; <nl> } <nl> class BeginApplySite { <nl> bool isNormal = isa < ReturnInst > ( terminator ) ; <nl> auto returnBB = isNormal ? EndApplyReturnBB : AbortApplyReturnBB ; <nl> if ( returnBB ) { <nl> - Builder . createBranch ( Loc , returnBB ) ; <nl> + Builder - > createBranch ( Loc , returnBB ) ; <nl> } else { <nl> - Builder . createUnreachable ( Loc ) ; <nl> + Builder - > createUnreachable ( Loc ) ; <nl> } <nl> return true ; <nl> } <nl> class BeginApplySite { <nl> if ( ! HasYield ) { <nl> / / Make sure the split resumption blocks have terminators . <nl> if ( EndApplyBB ) { <nl> - SavedInsertionPointRAII savedIP ( Builder , EndApplyBB ) ; <nl> - Builder . createUnreachable ( Loc ) ; <nl> + SavedInsertionPointRAII savedIP ( * Builder , EndApplyBB ) ; <nl> + Builder - > createUnreachable ( Loc ) ; <nl> } <nl> if ( AbortApplyBB ) { <nl> - SavedInsertionPointRAII savedIP ( Builder , AbortApplyBB ) ; <nl> - Builder . createUnreachable ( Loc ) ; <nl> + SavedInsertionPointRAII savedIP ( * Builder , AbortApplyBB ) ; <nl> + Builder - > createUnreachable ( Loc ) ; <nl> } <nl> <nl> / / Replace all the yielded values in the callee with undef . <nl> for ( auto calleeYield : BeginApply - > getYieldedValues ( ) ) { <nl> - calleeYield - > replaceAllUsesWith ( SILUndef : : get ( calleeYield - > getType ( ) , <nl> - Builder . getModule ( ) ) ) ; <nl> + calleeYield - > replaceAllUsesWith ( <nl> + SILUndef : : get ( calleeYield - > getType ( ) , Builder - > getModule ( ) ) ) ; <nl> } <nl> } <nl> <nl> SILInlineCloner : : SILInlineCloner ( <nl> assert ( CallSiteScope - > getParentFunction ( ) = = & F ) ; <nl> <nl> / / Set up the coroutine - specific inliner if applicable . <nl> - BeginApply = BeginApplySite : : get ( apply , Loc . getValue ( ) , getBuilder ( ) ) ; <nl> + BeginApply = BeginApplySite : : get ( apply , Loc . getValue ( ) , & getBuilder ( ) ) ; <nl> } <nl> <nl> / / Clone the entire callee function into the caller function at the apply site . <nl> | Fix SILInliner Compilation on MSVC | apple/swift | 39f17678590b91d085fc52a0acc1cf53f5a48b03 | 2018-10-26T18:44:57Z |
similarity index 100 % <nl> rename from test / expr / postfix / dot / self . swift <nl> rename to test / expr / postfix / dot / dot_keywords . swift <nl> | Rename a testcase to prepare to generalize it . | apple/swift | 973b2f785f3585669b30655c15070700fd7cacf3 | 2016-05-03T04:55:58Z |
mmm a / src / asmjs / asm - parser . cc <nl> ppp b / src / asmjs / asm - parser . cc <nl> uint32_t AsmJsParser : : VarIndex ( VarInfo * info ) { <nl> void AsmJsParser : : AddGlobalImport ( std : : string name , AsmType * type , <nl> ValueType vtype , bool mutable_variable , <nl> VarInfo * info ) { <nl> - / / TODO ( bradnelson ) : Refactor memory management here . <nl> - / / AsmModuleBuilder should really own import names . <nl> - char * name_data = zone ( ) - > NewArray < char > ( name . size ( ) ) ; <nl> - memcpy ( name_data , name . data ( ) , name . size ( ) ) ; <nl> if ( mutable_variable ) { <nl> / / Allocate a separate variable for the import . <nl> DeclareGlobal ( info , true , type , vtype ) ; <nl> / / Record the need to initialize the global from the import . <nl> - global_imports_ . push_back ( { name_data , name . size ( ) , 0 , info - > index , true } ) ; <nl> + global_imports_ . push_back ( { name , 0 , info - > index , true } ) ; <nl> } else { <nl> / / Just use the import directly . <nl> - global_imports_ . push_back ( { name_data , name . size ( ) , 0 , info - > index , false } ) ; <nl> + global_imports_ . push_back ( { name , 0 , info - > index , false } ) ; <nl> } <nl> GlobalImport & gi = global_imports_ . back ( ) ; <nl> / / TODO ( bradnelson ) : Reuse parse buffer memory / make wasm - module - builder <nl> / / managed the memory for the import name ( currently have to keep our <nl> / / own memory for it ) . <nl> gi . import_index = module_builder_ - > AddGlobalImport ( <nl> - name_data , static_cast < int > ( name . size ( ) ) , vtype ) ; <nl> + name . data ( ) , static_cast < int > ( name . size ( ) ) , vtype ) ; <nl> if ( ! mutable_variable ) { <nl> info - > DeclareGlobalImport ( type , gi . import_index ) ; <nl> } <nl> void AsmJsParser : : ValidateModule ( ) { <nl> } <nl> RECURSE ( ValidateExport ( ) ) ; <nl> <nl> - / / Check that all functions were eventually defined . <nl> - for ( auto info : global_var_info_ ) { <nl> - if ( info . kind ! = VarKind : : kFunction ) { <nl> - continue ; <nl> - } <nl> - if ( ! info . function_defined ) { <nl> - FAIL ( " Undefined function " ) ; <nl> - } <nl> - } <nl> - <nl> / / Add start function to init things . <nl> WasmFunctionBuilder * start = module_builder_ - > AddFunction ( ) ; <nl> module_builder_ - > MarkStartFunction ( start ) ; <nl> void AsmJsParser : : ValidateModuleVar ( bool mutable_variable ) { <nl> if ( uvalue > 0x7fffffff ) { <nl> FAIL ( " Numeric literal out of range " ) ; <nl> } <nl> - DeclareGlobal ( info , mutable_variable , <nl> - mutable_variable ? AsmType : : Int ( ) : AsmType : : Signed ( ) , <nl> - kWasmI32 , WasmInitExpr ( static_cast < int32_t > ( uvalue ) ) ) ; <nl> + DeclareGlobal ( info , mutable_variable , AsmType : : Int ( ) , kWasmI32 , <nl> + WasmInitExpr ( static_cast < int32_t > ( uvalue ) ) ) ; <nl> } else if ( Check ( ' - ' ) ) { <nl> if ( CheckForDouble ( & dvalue ) ) { <nl> DeclareGlobal ( info , mutable_variable , AsmType : : Double ( ) , kWasmF64 , <nl> void AsmJsParser : : ValidateModuleVar ( bool mutable_variable ) { <nl> if ( uvalue > 0x7fffffff ) { <nl> FAIL ( " Numeric literal out of range " ) ; <nl> } <nl> - DeclareGlobal ( info , mutable_variable , <nl> - mutable_variable ? AsmType : : Int ( ) : AsmType : : Signed ( ) , <nl> - kWasmI32 , WasmInitExpr ( - static_cast < int32_t > ( uvalue ) ) ) ; <nl> + DeclareGlobal ( info , mutable_variable , AsmType : : Int ( ) , kWasmI32 , <nl> + WasmInitExpr ( - static_cast < int32_t > ( uvalue ) ) ) ; <nl> } else { <nl> FAIL ( " Expected numeric literal " ) ; <nl> } <nl> void AsmJsParser : : ValidateModuleVar ( bool mutable_variable ) { <nl> } else if ( ValidateModuleVarImport ( info , mutable_variable ) ) { <nl> / / Handled inside . <nl> } else if ( scanner_ . IsGlobal ( ) ) { <nl> - RECURSE ( ValidateModuleVarFromGlobal ( info , mutable_variable ) ) ; <nl> + RECURSE ( ValidateModuleVarFloat ( info , mutable_variable ) ) ; <nl> } else { <nl> FAIL ( " Bad variable declaration " ) ; <nl> } <nl> } <nl> <nl> / / 6 . 1 ValidateModule - global float declaration <nl> - void AsmJsParser : : ValidateModuleVarFromGlobal ( VarInfo * info , <nl> - bool mutable_variable ) { <nl> - VarInfo * src_info = GetVarInfo ( Consume ( ) ) ; <nl> - if ( ! src_info - > type - > IsA ( stdlib_fround_ ) ) { <nl> - if ( src_info - > mutable_variable ) { <nl> - FAIL ( " Can only use immutable variables in global definition " ) ; <nl> - } <nl> - if ( mutable_variable ) { <nl> - FAIL ( " Can only define immutable variables with other immutables " ) ; <nl> - } <nl> - if ( ! src_info - > type - > IsA ( AsmType : : Int ( ) ) & & <nl> - ! src_info - > type - > IsA ( AsmType : : Float ( ) ) & & <nl> - ! src_info - > type - > IsA ( AsmType : : Double ( ) ) ) { <nl> - FAIL ( " Expected int , float , double , or fround for global definition " ) ; <nl> - } <nl> - info - > kind = VarKind : : kGlobal ; <nl> - info - > type = src_info - > type ; <nl> - info - > index = src_info - > index ; <nl> - info - > mutable_variable = false ; <nl> - return ; <nl> + void AsmJsParser : : ValidateModuleVarFloat ( VarInfo * info , bool mutable_variable ) { <nl> + if ( ! GetVarInfo ( Consume ( ) ) - > type - > IsA ( stdlib_fround_ ) ) { <nl> + FAIL ( " Expected fround " ) ; <nl> } <nl> EXPECT_TOKEN ( ' ( ' ) ; <nl> bool negate = false ; <nl> bool AsmJsParser : : ValidateModuleVarImport ( VarInfo * info , <nl> info - > kind = VarKind : : kImportedFunction ; <nl> function_import_info_ . resize ( function_import_info_ . size ( ) + 1 ) ; <nl> info - > import = & function_import_info_ . back ( ) ; <nl> - / / TODO ( bradnelson ) : Refactor memory management here . <nl> - / / AsmModuleBuilder should really own import names . <nl> - info - > import - > function_name = zone ( ) - > NewArray < char > ( import_name . size ( ) ) ; <nl> - memcpy ( info - > import - > function_name , import_name . data ( ) , import_name . size ( ) ) ; <nl> - info - > import - > function_name_size = import_name . size ( ) ; <nl> + info - > import - > name = import_name ; <nl> return true ; <nl> } <nl> return false ; <nl> void AsmJsParser : : ValidateModuleVarStdlib ( VarInfo * info ) { <nl> case TOK ( name ) : \ <nl> DeclareGlobal ( info , false , AsmType : : Double ( ) , kWasmF64 , \ <nl> WasmInitExpr ( M_ # # name ) ) ; \ <nl> - stdlib_uses_ . insert ( AsmTyper : : kMath # # name ) ; \ <nl> break ; <nl> STDLIB_MATH_VALUE_LIST ( V ) <nl> # undef V <nl> void AsmJsParser : : ValidateModuleVarStdlib ( VarInfo * info ) { <nl> } else if ( Check ( TOK ( Infinity ) ) ) { <nl> DeclareGlobal ( info , false , AsmType : : Double ( ) , kWasmF64 , <nl> WasmInitExpr ( std : : numeric_limits < double > : : infinity ( ) ) ) ; <nl> - stdlib_uses_ . insert ( AsmTyper : : kInfinity ) ; <nl> } else if ( Check ( TOK ( NaN ) ) ) { <nl> DeclareGlobal ( info , false , AsmType : : Double ( ) , kWasmF64 , <nl> WasmInitExpr ( std : : numeric_limits < double > : : quiet_NaN ( ) ) ) ; <nl> - stdlib_uses_ . insert ( AsmTyper : : kNaN ) ; <nl> } else { <nl> FAIL ( " Invalid member of stdlib " ) ; <nl> } <nl> void AsmJsParser : : ValidateFunction ( ) { <nl> if ( function_info - > kind = = VarKind : : kUnused ) { <nl> function_info - > kind = VarKind : : kFunction ; <nl> function_info - > function_builder = module_builder_ - > AddFunction ( ) ; <nl> - / / TODO ( bradnelson ) : Cleanup memory management here . <nl> - / / WasmModuleBuilder should own these . <nl> - char * function_name = zone ( ) - > NewArray < char > ( function_name_raw . size ( ) ) ; <nl> - memcpy ( function_name , function_name_raw . data ( ) , function_name_raw . size ( ) ) ; <nl> function_info - > function_builder - > SetName ( <nl> - { function_name , static_cast < int > ( function_name_raw . size ( ) ) } ) ; <nl> + { function_name_raw . c_str ( ) , <nl> + static_cast < int > ( function_name_raw . size ( ) ) } ) ; <nl> function_info - > index = function_info - > function_builder - > func_index ( ) ; <nl> function_info - > function_defined = true ; <nl> } else if ( function_info - > function_defined ) { <nl> FAIL ( " Function redefined " ) ; <nl> - } else { <nl> - function_info - > function_defined = true ; <nl> } <nl> current_function_builder_ = function_info - > function_builder ; <nl> return_type_ = nullptr ; <nl> void AsmJsParser : : ValidateFunctionLocals ( <nl> info - > type = AsmType : : Double ( ) ; <nl> info - > index = static_cast < uint32_t > ( param_count + locals - > size ( ) ) ; <nl> locals - > push_back ( kWasmF64 ) ; <nl> - byte code [ ] = { WASM_F64 ( - dvalue ) } ; <nl> + byte code [ ] = { WASM_F64 ( dvalue ) } ; <nl> current_function_builder_ - > EmitCode ( code , sizeof ( code ) ) ; <nl> current_function_builder_ - > EmitSetLocal ( info - > index ) ; <nl> } else if ( CheckForUnsigned ( & uvalue ) ) { <nl> AsmType * AsmJsParser : : ValidateCall ( ) { <nl> uint32_t index ; <nl> if ( cache_index > = function_info - > import - > cache_index . size ( ) ) { <nl> index = module_builder_ - > AddImport ( <nl> - function_info - > import - > function_name , <nl> - static_cast < uint32_t > ( function_info - > import - > function_name_size ) , <nl> - sig ) ; <nl> + function_info - > import - > name . data ( ) , <nl> + static_cast < uint32_t > ( function_info - > import - > name . size ( ) ) , sig ) ; <nl> function_info - > import - > cache_index . push_back ( index ) ; <nl> } else { <nl> index = function_info - > import - > cache_index [ cache_index ] ; <nl> mmm a / src / asmjs / asm - parser . h <nl> ppp b / src / asmjs / asm - parser . h <nl> class AsmJsParser { <nl> / / clang - format on <nl> <nl> struct FunctionImportInfo { <nl> - char * function_name ; <nl> - size_t function_name_size ; <nl> + std : : string name ; <nl> SignatureMap cache ; <nl> std : : vector < uint32_t > cache_index ; <nl> } ; <nl> class AsmJsParser { <nl> } ; <nl> <nl> struct GlobalImport { <nl> - char * import_name ; <nl> - size_t import_name_size ; <nl> + std : : string import_name ; <nl> uint32_t import_index ; <nl> uint32_t global_index ; <nl> bool needs_init ; <nl> class AsmJsParser { <nl> bool ValidateModuleVarImport ( VarInfo * info , bool mutable_variable ) ; <nl> void ValidateModuleVarStdlib ( VarInfo * info ) ; <nl> void ValidateModuleVarNewStdlib ( VarInfo * info ) ; <nl> - void ValidateModuleVarFromGlobal ( VarInfo * info , bool mutable_variable ) ; <nl> + void ValidateModuleVarFloat ( VarInfo * info , bool mutable_variable ) ; <nl> <nl> void ValidateExport ( ) ; / / 6 . 2 ValidateExport <nl> void ValidateFunctionTable ( ) ; / / 6 . 3 ValidateFunctionTable <nl> mmm a / src / asmjs / asm - scanner . cc <nl> ppp b / src / asmjs / asm - scanner . cc <nl> void AsmJsScanner : : Next ( ) { <nl> } <nl> <nl> void AsmJsScanner : : Rewind ( ) { <nl> - / / TODO ( bradnelson ) : Currently rewinding needs to leave in place the <nl> - / / preceding newline state ( in case a | 0 ends a line ) . <nl> - / / This is weird and stateful , fix me . <nl> DCHECK ( ! rewind_ ) ; <nl> next_token_ = token_ ; <nl> token_ = preceding_token_ ; <nl> preceding_token_ = kUninitialized ; <nl> rewind_ = true ; <nl> + preceded_by_newline_ = false ; <nl> identifier_string_ . clear ( ) ; <nl> } <nl> <nl> mmm a / src / wasm / wasm - module - builder . cc <nl> ppp b / src / wasm / wasm - module - builder . cc <nl> void WasmFunctionBuilder : : StashCode ( std : : vector < byte > * dst , size_t position ) { <nl> body_ . resize ( position ) ; <nl> return ; <nl> } <nl> - DCHECK_LE ( position , body_ . size ( ) ) ; <nl> size_t len = body_ . size ( ) - position ; <nl> dst - > resize ( len ) ; <nl> - memcpy ( dst - > data ( ) , body_ . data ( ) + position , len ) ; <nl> + memcpy ( dst - > data ( ) , & body_ [ position ] , len ) ; <nl> body_ . resize ( position ) ; <nl> } <nl> <nl> mmm a / test / mjsunit / asm / asm - validation . js <nl> ppp b / test / mjsunit / asm / asm - validation . js <nl> function assertValidAsm ( func ) { <nl> assertFalse ( % IsAsmWasmCode ( Module ) ) ; <nl> } ) ( ) ; <nl> <nl> + ( function TestBadishBooleanExprAnnotation ( ) { <nl> + function Module ( ) { <nl> + " use asm " ; <nl> + function foo ( x ) { <nl> + x = x | 0 ; <nl> + x = ( x + 1 ) | false ; <nl> + return x | 0 ; <nl> + } <nl> + return { foo : foo } ; <nl> + } <nl> + var m = Module ( ) ; <nl> + / / We all false here because the parser optimizes expressons like : <nl> + / / ! 123 to false . <nl> + assertTrue ( % IsAsmWasmCode ( Module ) ) ; <nl> + assertEquals ( 4 , m . foo ( 3 ) ) ; <nl> + } ) ( ) ; <nl> + <nl> ( function TestBadFroundTrue ( ) { <nl> function Module ( stdlib ) { <nl> " use asm " ; <nl> mmm a / test / mjsunit / mjsunit . status <nl> ppp b / test / mjsunit / mjsunit . status <nl> <nl> [ ' variant = = asm_wasm ' , { <nl> # Issue 6127 : We won ' t fix these in the " old " validator . But we definitely <nl> # need to re - enable these for the " new " validator . <nl> + ' code - coverage - precise ' : [ SKIP ] , <nl> + ' object - freeze ' : [ SKIP ] , <nl> + ' asm / asm - validation ' : [ SKIP ] , <nl> + ' asm / infinite - loops ' : [ SKIP ] , <nl> + ' es6 / completion ' : [ SKIP ] , <nl> + ' es6 / function - length - configurable ' : [ SKIP ] , <nl> + ' es6 / object - assign ' : [ SKIP ] , <nl> + ' es6 / proxies - cross - realm - exception ' : [ SKIP ] , <nl> + ' harmony / function - tostring ' : [ SKIP ] , <nl> ' compiler / regress - 445859 ' : [ SKIP ] , <nl> ' compiler / regress - 452427 ' : [ SKIP ] , <nl> + ' regress / regress - 353058 ' : [ SKIP ] , <nl> ' regress / regress - 458987 ' : [ SKIP ] , <nl> + ' regress / regress - 599068 - func - bindings ' : [ SKIP ] , <nl> ' regress / regress - 599719 ' : [ SKIP ] , <nl> ' regress / regress - 618608 ' : [ SKIP ] , <nl> + ' regress / regress - 619382 ' : [ SKIP ] , <nl> + ' regress / regress - 632289 ' : [ SKIP ] , <nl> ' regress / regress - 670808 ' : [ SKIP ] , <nl> + ' regress / regress - crbug - 530598 ' : [ SKIP ] , <nl> + ' wasm / asm - wasm ' : [ SKIP ] , <nl> ' wasm / asm - wasm - exception - in - tonumber ' : [ SKIP ] , <nl> ' wasm / asm - wasm - stack ' : [ SKIP ] , <nl> ' wasm / errors ' : [ SKIP ] , <nl> <nl> ' asm / b5528 - comma ' : [ SKIP ] , <nl> ' asm / if - folding ' : [ SKIP ] , <nl> ' asm / if - reduction ' : [ SKIP ] , <nl> + ' wasm / embenchen / box2d ' : [ SKIP ] , <nl> + ' wasm / embenchen / copy ' : [ SKIP ] , <nl> + ' wasm / embenchen / corrections ' : [ SKIP ] , <nl> + ' wasm / embenchen / fannkuch ' : [ SKIP ] , <nl> + ' wasm / embenchen / fasta ' : [ SKIP ] , <nl> + ' wasm / embenchen / lua_binarytrees ' : [ SKIP ] , <nl> + ' wasm / embenchen / memops ' : [ SKIP ] , <nl> + ' wasm / embenchen / primes ' : [ SKIP ] , <nl> + ' wasm / embenchen / zlib ' : [ SKIP ] , <nl> <nl> # Issue 6127 : Breaks on no - snap bots . <nl> ' es8 / regress / regress - 624300 ' : [ SKIP ] , <nl> | Revert of [ wasm ] [ asm . js ] Fix and enable several asm . js tests with the new parser . ( patchset id : 180001 of https : / / codereview . chromium . org / 2771183002 / ) | v8/v8 | f8973f1caab084beb4ba35c4f4822dff61aae45f | 2017-03-28T17:04:10Z |
mmm a / Source / Common / Include / MPIWrapper . h <nl> ppp b / Source / Common / Include / MPIWrapper . h <nl> extern int operator | | ( int rc , const MpiFail & what ) ; <nl> class MPIWrapper ; <nl> typedef std : : shared_ptr < MPIWrapper > MPIWrapperPtr ; <nl> <nl> + <nl> class MPIWrapper : public std : : enable_shared_from_this < MPIWrapper > <nl> { <nl> public : <nl> class MPIWrapper : public std : : enable_shared_from_this < MPIWrapper > <nl> <nl> / / allreduce of a vector <nl> virtual void AllReduce ( std : : vector < size_t > & accumulator ) const = 0 ; <nl> + virtual void AllReduce ( std : : vector < int > & accumulator ) const = 0 ; <nl> virtual void AllReduce ( std : : vector < double > & accumulator ) const = 0 ; <nl> virtual void AllReduce ( std : : vector < float > & accumulator ) const = 0 ; <nl> <nl> class MPIWrapper : public std : : enable_shared_from_this < MPIWrapper > <nl> virtual void Bcast ( float * pData , size_t nData , size_t srcRank ) = 0 ; <nl> <nl> / / wait for all ranks to reach here <nl> - virtual void WaitAll ( ) = 0 ; <nl> + virtual int WaitAll ( ) = 0 ; <nl> } ; <nl> <nl> <nl> class MPIWrapperMpi : public MPIWrapper <nl> <nl> / / allreduce of a vector <nl> virtual void AllReduce ( std : : vector < size_t > & accumulator ) const ; <nl> + virtual void AllReduce ( std : : vector < int > & accumulator ) const ; <nl> virtual void AllReduce ( std : : vector < double > & accumulator ) const ; <nl> virtual void AllReduce ( std : : vector < float > & accumulator ) const ; <nl> <nl> class MPIWrapperMpi : public MPIWrapper <nl> virtual void Bcast ( float * pData , size_t nData , size_t srcRank ) ; <nl> <nl> / / wait for all ranks to reach here <nl> - void WaitAll ( ) ; <nl> + int WaitAll ( ) ; <nl> } ; <nl> <nl> } } } <nl> mmm a / Source / Common / MPIWrapper . cpp <nl> ppp b / Source / Common / MPIWrapper . cpp <nl> <nl> <nl> namespace Microsoft { namespace MSR { namespace CNTK { <nl> <nl> + / / mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - <nl> + / / Generic MPIWrapper functions ( not related to a specific implementation ) <nl> + / / mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - <nl> <nl> int operator | | ( int rc , const MpiFail & what ) <nl> { <nl> int operator | | ( int rc , const MpiFail & what ) <nl> int MPIWrapperMpi : : s_myRank = - 1 ; <nl> std : : shared_ptr < MPIWrapper > MPIWrapperMpi : : s_mpi = nullptr ; <nl> <nl> - / / MPI_Init ( ) with delay - loading the msmpi . dll ( possibly causing a failure if missing ; we want to catch that ) <nl> - int MPIWrapperMpi : : MPI_Init_DL ( ) <nl> + / / Note that specifically , this function is such that it does not require <nl> + / / MPI initialization . Moreover , it can be used without actually loading any <nl> + / / MPI libs . <nl> + / / TODO : Once we move to dynamic loading for MPI libs on Linux , move it to utilities . <nl> + int MPIWrapper : : GetTotalNumberOfMPINodes ( ) <nl> { <nl> # if ! HAS_OPENMPI <nl> - return MPI_SUCCESS ; <nl> + return 0 ; <nl> # else <nl> # ifdef WIN32 <nl> - __try <nl> + const char * p = std : : getenv ( " PMI_SIZE " ) ; <nl> + # else <nl> + const char * p = std : : getenv ( " OMPI_COMM_WORLD_SIZE " ) ; <nl> # endif <nl> + if ( ! p ) <nl> { <nl> - / / don ' t initialize if that has been done already <nl> - int flag = 0 ; <nl> - MPI_Initialized ( & flag ) ; <nl> - if ( flag ) <nl> - return MPI_SUCCESS ; <nl> - <nl> - int argc = 0 ; <nl> - char * * argv = NULL ; <nl> - int requiredThreadLevelSupport = MPI_THREAD_SERIALIZED ; <nl> - int provided ; <nl> - int ret = MPI_Init_thread ( & argc , & argv , requiredThreadLevelSupport , & provided ) ; <nl> - if ( provided ! = requiredThreadLevelSupport ) <nl> - LogicError ( " Failed to initialize MPI with the desired level of thread support " ) ; <nl> - <nl> - return ret ; <nl> + return 0 ; <nl> } <nl> - # ifdef WIN32 <nl> - __except ( EXCEPTION_EXECUTE_HANDLER ) <nl> + else <nl> { <nl> - fprintf ( stderr , " mpihelper : msmpi . dll missing \ n " ) ; <nl> - return MPI_ERR_INTERN ; <nl> + return std : : stoi ( string ( p ) ) ; <nl> } <nl> # endif <nl> - # endif <nl> } <nl> <nl> - / / Workaround for the issue with MPI hanging when we have non - 0 exit codes from CNTK processes <nl> - / / OpenMPI has a confirmed race condition on killing child process vs . handling their non - zero exit statuses , resulting <nl> - / / in a deadlock , where all processes killed but MPI is still waiting . <nl> - / / This happens when several perfectly synchronized processes ( for example on MPI barrier ) <nl> - / / simulatenously exit with non - 0 exit code . <nl> - / / As a workaround , we simply sleep 50 * rank miliseconds , effectively " de - synchronizing processes " at exit , <nl> - / / allowing MPI to sequentially handle terminations <nl> - void MPIWrapperMpi : : MPIWorkaroundAtExit ( ) <nl> + MPIWrapperPtr MPIWrapper : : GetInstance ( bool create ) <nl> { <nl> - Sleep ( s_myRank * 50 ) ; <nl> + static bool initialized = false ; <nl> + if ( create ) <nl> + { <nl> + if ( initialized ) <nl> + LogicError ( " Creating MPIWrapper instance after a GetInstance call has been already made ! " ) ; <nl> + else <nl> + s_mpi = std : : make_shared < MPIWrapperMpi > ( ) ; <nl> + } <nl> + <nl> + initialized = true ; <nl> + return s_mpi ; <nl> } <nl> <nl> + void MPIWrapper : : DeleteInstance ( ) <nl> + { <nl> + s_mpi = nullptr ; <nl> + } <nl> + <nl> + / / helpers to determine the MPI_Datatype of a pointer <nl> + MPI_Datatype MPIWrapper : : GetDataType ( char * ) <nl> + { <nl> + return MPI_CHAR ; <nl> + } <nl> + <nl> + MPI_Datatype MPIWrapper : : GetDataType ( int * ) <nl> + { <nl> + return MPI_INT ; <nl> + } <nl> + <nl> + MPI_Datatype MPIWrapper : : GetDataType ( float * ) <nl> + { <nl> + return MPI_FLOAT ; <nl> + } <nl> + <nl> + MPI_Datatype MPIWrapper : : GetDataType ( double * ) <nl> + { <nl> + return MPI_DOUBLE ; <nl> + } <nl> + <nl> + MPI_Datatype MPIWrapper : : GetDataType ( size_t * ) <nl> + { <nl> + return sizeof ( size_t ) = = 4 ? MPI_UNSIGNED : MPI_LONG_LONG_INT ; <nl> + } <nl> + <nl> + <nl> + / / mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - <nl> + / / MPIWrapper that actually calls into msmpi . dll <nl> + / / mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - <nl> + <nl> MPIWrapperMpi : : MPIWrapperMpi ( ) <nl> : m_currentComm ( MPI_COMM_WORLD ) <nl> { <nl> MPIWrapperMpi : : MPIWrapperMpi ( ) <nl> : : Sleep ( ( DWORD ) ( 500 * CurrentNodeRank ( ) ) ) ; <nl> } <nl> <nl> - / / Note that specifically , this function is such that it does not require <nl> - / / MPI initialization . Moreover , it can be used without actually loading any <nl> - / / MPI libs . <nl> - / / TODO : Once we move to dynamic loading for MPI libs on Linux , move it to utilities . <nl> - int MPIWrapper : : GetTotalNumberOfMPINodes ( ) <nl> - { <nl> - # if ! HAS_OPENMPI <nl> - return 0 ; <nl> - # else <nl> - # ifdef WIN32 <nl> - const char * p = std : : getenv ( " PMI_SIZE " ) ; <nl> - # else <nl> - const char * p = std : : getenv ( " OMPI_COMM_WORLD_SIZE " ) ; <nl> - # endif <nl> - if ( ! p ) <nl> - { <nl> - return 0 ; <nl> - } <nl> - else <nl> - { <nl> - return std : : stoi ( string ( p ) ) ; <nl> - } <nl> - # endif <nl> - } <nl> - <nl> / / Note : we don ' t clear the sub - communication here although we should , because in case of a crash , this prevents the EXE from terminating . <nl> / / It ' s OK since this class is a singleton anyway that gets instantiated exactly once at program startup . <nl> MPIWrapperMpi : : ~ MPIWrapperMpi ( ) <nl> MPIWrapperMpi : : ~ MPIWrapperMpi ( ) <nl> } <nl> } <nl> <nl> + / / MPI_Init ( ) with delay - loading the msmpi . dll ( possibly causing a failure if missing ; we want to catch that ) <nl> + int MPIWrapperMpi : : MPI_Init_DL ( ) <nl> + { <nl> + # if ! HAS_OPENMPI <nl> + return MPI_SUCCESS ; <nl> + # else <nl> + # ifdef WIN32 <nl> + __try <nl> + # endif <nl> + { <nl> + / / don ' t initialize if that has been done already <nl> + int flag = 0 ; <nl> + MPI_Initialized ( & flag ) ; <nl> + if ( flag ) <nl> + return MPI_SUCCESS ; <nl> + <nl> + int argc = 0 ; <nl> + char * * argv = NULL ; <nl> + int requiredThreadLevelSupport = MPI_THREAD_SERIALIZED ; <nl> + int provided ; <nl> + int ret = MPI_Init_thread ( & argc , & argv , requiredThreadLevelSupport , & provided ) ; <nl> + if ( provided ! = requiredThreadLevelSupport ) <nl> + LogicError ( " Failed to initialize MPI with the desired level of thread support " ) ; <nl> + <nl> + return ret ; <nl> + } <nl> + # ifdef WIN32 <nl> + __except ( EXCEPTION_EXECUTE_HANDLER ) <nl> + { <nl> + fprintf ( stderr , " mpihelper : msmpi . dll missing \ n " ) ; <nl> + return MPI_ERR_INTERN ; <nl> + } <nl> + # endif <nl> + # endif <nl> + } <nl> + <nl> + / / Workaround for the issue with MPI hanging when we have non - 0 exit codes from CNTK processes <nl> + / / OpenMPI has a confirmed race condition on killing child process vs . handling their non - zero exit statuses , resulting <nl> + / / in a deadlock , where all processes killed but MPI is still waiting . <nl> + / / This happens when several perfectly synchronized processes ( for example on MPI barrier ) <nl> + / / simulatenously exit with non - 0 exit code . <nl> + / / As a workaround , we simply sleep 50 * rank miliseconds , effectively " de - synchronizing processes " at exit , <nl> + / / allowing MPI to sequentially handle terminations <nl> + void MPIWrapperMpi : : MPIWorkaroundAtExit ( ) <nl> + { <nl> + Sleep ( s_myRank * 50 ) ; <nl> + } <nl> + <nl> void MPIWrapperMpi : : Ping ( const char * msg ) const <nl> { <nl> # if HAS_OPENMPI <nl> void MPIWrapperMpi : : RequestNodes ( const char * msg , size_t requestednodes / * defaul <nl> # endif <nl> } <nl> <nl> - MPIWrapperPtr MPIWrapper : : GetInstance ( bool create ) <nl> - { <nl> - static bool initialized = false ; <nl> - if ( create ) <nl> - { <nl> - if ( initialized ) <nl> - LogicError ( " Creating MPIWrapper instance after a GetInstance call has been already made ! " ) ; <nl> - else <nl> - s_mpi = std : : make_shared < MPIWrapperMpi > ( ) ; <nl> - } <nl> - <nl> - initialized = true ; <nl> - return s_mpi ; <nl> - } <nl> - <nl> - void MPIWrapper : : DeleteInstance ( ) <nl> - { <nl> - s_mpi = nullptr ; <nl> - } <nl> - <nl> MPI_Comm MPIWrapperMpi : : Communicator ( ) const <nl> { <nl> return m_currentComm ; <nl> int MPIWrapperMpi : : Finalize ( void ) <nl> # endif <nl> } <nl> <nl> + / / wait for all ranks to reach here <nl> + int MPIWrapperMpi : : WaitAll ( ) <nl> + { <nl> + # if HAS_OPENMPI <nl> + return MPI_Barrier ( m_currentComm ) | | MpiFail ( " waitall : MPI_Barrier " ) ; <nl> + # else <nl> + return MPI_UNDEFINED ; <nl> + # endif <nl> + } <nl> + <nl> int MPIWrapperMpi : : Wait ( MPI_Request * request , MPI_Status * status ) <nl> { <nl> # if HAS_OPENMPI <nl> size_t MPIWrapperMpi : : MainNodeRank ( ) const <nl> return 0 ; <nl> } <nl> <nl> - / / mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - <nl> - / / data - exchange functions ( wrappers around MPI functions ) <nl> - / / mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - <nl> - <nl> - / / helpers to determine the MPI_Datatype of a pointer <nl> - MPI_Datatype MPIWrapper : : GetDataType ( char * ) <nl> - { <nl> - return MPI_CHAR ; <nl> - } <nl> - <nl> - MPI_Datatype MPIWrapper : : GetDataType ( int * ) <nl> - { <nl> - return MPI_INT ; <nl> - } <nl> - <nl> - MPI_Datatype MPIWrapper : : GetDataType ( float * ) <nl> - { <nl> - return MPI_FLOAT ; <nl> - } <nl> - <nl> - MPI_Datatype MPIWrapper : : GetDataType ( double * ) <nl> + / / allreduce of a vector <nl> + void MPIWrapperMpi : : AllReduce ( std : : vector < size_t > & accumulator ) const <nl> { <nl> - return MPI_DOUBLE ; <nl> - } <nl> + # if HAS_OPENMPI <nl> + auto * dataptr = accumulator . data ( ) ; <nl> + size_t totalnumelements = accumulator . size ( ) ; <nl> <nl> - MPI_Datatype MPIWrapper : : GetDataType ( size_t * ) <nl> - { <nl> - return sizeof ( size_t ) = = 4 ? MPI_UNSIGNED : MPI_LONG_LONG_INT ; <nl> + / / use MPI to compute the sum over all elements in ( dataptr , totalnumelements ) and redistribute to all nodes <nl> + if ( ( NumNodesInUse ( ) > 1 ) & & ( Communicator ( ) ! = MPI_COMM_NULL ) ) <nl> + { <nl> + MPI_Allreduce ( MPI_IN_PLACE , dataptr , ( int ) totalnumelements , GetDataType ( dataptr ) , MPI_SUM , Communicator ( ) ) | | MpiFail ( " allreduce : MPI_Allreduce " ) ; <nl> + } <nl> + # endif <nl> } <nl> <nl> - / / allreduce of a vector <nl> - void MPIWrapperMpi : : AllReduce ( std : : vector < size_t > & accumulator ) const <nl> + void MPIWrapperMpi : : AllReduce ( std : : vector < int > & accumulator ) const <nl> { <nl> # if HAS_OPENMPI <nl> auto * dataptr = accumulator . data ( ) ; <nl> void MPIWrapperMpi : : AllReduce ( std : : vector < size_t > & accumulator ) const <nl> } <nl> # endif <nl> } <nl> + <nl> void MPIWrapperMpi : : AllReduce ( std : : vector < double > & accumulator ) const <nl> { <nl> # if HAS_OPENMPI <nl> void MPIWrapperMpi : : AllReduce ( std : : vector < double > & accumulator ) const <nl> } <nl> # endif <nl> } <nl> + <nl> void MPIWrapperMpi : : AllReduce ( std : : vector < float > & accumulator ) const <nl> { <nl> # if HAS_OPENMPI <nl> void MPIWrapperMpi : : AllReduce ( size_t * pData , size_t nData ) <nl> } <nl> # endif <nl> } <nl> + <nl> void MPIWrapperMpi : : AllReduce ( int * pData , size_t nData ) <nl> { <nl> # if HAS_OPENMPI <nl> void MPIWrapperMpi : : AllReduce ( int * pData , size_t nData ) <nl> } <nl> # endif <nl> } <nl> + <nl> void MPIWrapperMpi : : AllReduce ( double * pData , size_t nData ) <nl> { <nl> # if HAS_OPENMPI <nl> void MPIWrapperMpi : : AllReduce ( double * pData , size_t nData ) <nl> } <nl> # endif <nl> } <nl> + <nl> void MPIWrapperMpi : : AllReduce ( float * pData , size_t nData ) <nl> { <nl> # if HAS_OPENMPI <nl> void MPIWrapperMpi : : Bcast ( size_t * pData , size_t nData , size_t srcRank ) <nl> } <nl> # endif <nl> } <nl> + <nl> void MPIWrapperMpi : : Bcast ( double * pData , size_t nData , size_t srcRank ) <nl> { <nl> # if HAS_OPENMPI <nl> void MPIWrapperMpi : : Bcast ( double * pData , size_t nData , size_t srcRank ) <nl> } <nl> # endif <nl> } <nl> + <nl> void MPIWrapperMpi : : Bcast ( float * pData , size_t nData , size_t srcRank ) <nl> { <nl> # if HAS_OPENMPI <nl> void MPIWrapperMpi : : Bcast ( float * pData , size_t nData , size_t srcRank ) <nl> # endif <nl> } <nl> <nl> - / / wait for all ranks to reach here <nl> - void MPIWrapperMpi : : WaitAll ( ) <nl> - { <nl> - # if HAS_OPENMPI <nl> - MPI_Barrier ( m_currentComm ) | | MpiFail ( " waitall : MPI_Barrier " ) ; <nl> - # endif <nl> - } <nl> - <nl> } } } <nl> | shuffling code for restucturing | microsoft/CNTK | 47bcaa953ec9c1f494218f05b61121b063bd55bd | 2016-10-13T09:32:01Z |
mmm a / . ci / test_r_package . sh <nl> ppp b / . ci / test_r_package . sh <nl> if grep - q - R " WARNING " " $ LOG_FILE_NAME " ; then <nl> exit - 1 <nl> fi <nl> <nl> - ALLOWED_CHECK_NOTES = 2 <nl> + ALLOWED_CHECK_NOTES = 3 <nl> NUM_CHECK_NOTES = $ ( <nl> cat $ { LOG_FILE_NAME } \ <nl> | grep - e ' ^ Status : . * NOTE . * ' \ <nl> mmm a / docs / Parameters - Tuning . rst <nl> ppp b / docs / Parameters - Tuning . rst <nl> Deal with Over - fitting <nl> <nl> - Try ` ` extra_trees ` ` <nl> <nl> + - Try increasing ` ` path_smooth ` ` <nl> + <nl> . . _Optuna : https : / / medium . com / optuna / lightgbm - tuner - new - optuna - integration - for - hyperparameter - optimization - 8b7095e99258 <nl> mmm a / docs / Parameters . rst <nl> ppp b / docs / Parameters . rst <nl> Learning Control Parameters <nl> <nl> - applied once per forest <nl> <nl> + - ` ` path_smooth ` ` : raw - html : ` < a id = " path_smooth " title = " Permalink to this parameter " href = " # path_smooth " > & # x1F517 ; & # xFE0E ; < / a > ` , default = ` ` 0 ` ` , type = double , constraints : ` ` path_smooth > = 0 . 0 ` ` <nl> + <nl> + - controls smoothing applied to tree nodes <nl> + <nl> + - helps prevent overfitting on leaves with few samples <nl> + <nl> + - if set to zero , no smoothing is applied <nl> + <nl> + - if ` ` path_smooth > 0 ` ` then ` ` min_data_in_leaf ` ` must be at least ` ` 2 ` ` <nl> + <nl> + - larger values give stronger regularisation <nl> + <nl> + - the weight of each node is ` ` ( n / path_smooth ) * w + w_p / ( n / path_smooth + 1 ) ` ` , where ` ` n ` ` is the number of samples in the node , ` ` w ` ` is the optimal node weight to minimise the loss ( approximately ` ` - sum_gradients / sum_hessians ` ` ) , and ` ` w_p ` ` is the weight of the parent node <nl> + <nl> + - note that the parent output ` ` w_p ` ` itself has smoothing applied , unless it is the root node , so that the smoothing effect accumulates with the tree depth <nl> + <nl> - ` ` verbosity ` ` : raw - html : ` < a id = " verbosity " title = " Permalink to this parameter " href = " # verbosity " > & # x1F517 ; & # xFE0E ; < / a > ` , default = ` ` 1 ` ` , type = int , aliases : ` ` verbose ` ` <nl> <nl> - controls the level of LightGBM ' s verbosity <nl> mmm a / include / LightGBM / config . h <nl> ppp b / include / LightGBM / config . h <nl> struct Config { <nl> / / desc = applied once per forest <nl> std : : vector < double > cegb_penalty_feature_coupled ; <nl> <nl> + / / check = > = 0 . 0 <nl> + / / desc = controls smoothing applied to tree nodes <nl> + / / desc = helps prevent overfitting on leaves with few samples <nl> + / / desc = if set to zero , no smoothing is applied <nl> + / / desc = if ` ` path_smooth > 0 ` ` then ` ` min_data_in_leaf ` ` must be at least ` ` 2 ` ` <nl> + / / desc = larger values give stronger regularisation <nl> + / / descl2 = the weight of each node is ` ` ( n / path_smooth ) * w + w_p / ( n / path_smooth + 1 ) ` ` , where ` ` n ` ` is the number of samples in the node , ` ` w ` ` is the optimal node weight to minimise the loss ( approximately ` ` - sum_gradients / sum_hessians ` ` ) , and ` ` w_p ` ` is the weight of the parent node <nl> + / / descl2 = note that the parent output ` ` w_p ` ` itself has smoothing applied , unless it is the root node , so that the smoothing effect accumulates with the tree depth <nl> + double path_smooth = 0 ; <nl> + <nl> / / alias = verbose <nl> / / desc = controls the level of LightGBM ' s verbosity <nl> / / desc = ` ` < 0 ` ` : Fatal , ` ` = 0 ` ` : Error ( Warning ) , ` ` = 1 ` ` : Info , ` ` > 1 ` ` : Debug <nl> mmm a / include / LightGBM / tree . h <nl> ppp b / include / LightGBM / tree . h <nl> class Tree { <nl> / * ! \ brief Get depth of specific leaf * / <nl> inline int leaf_depth ( int leaf_idx ) const { return leaf_depth_ [ leaf_idx ] ; } <nl> <nl> + / * ! \ brief Get parent of specific leaf * / <nl> + inline int leaf_parent ( int leaf_idx ) const { return leaf_parent_ [ leaf_idx ] ; } <nl> + <nl> / * ! \ brief Get feature of specific split * / <nl> inline int split_feature ( int split_idx ) const { return split_feature_ [ split_idx ] ; } <nl> <nl> class Tree { <nl> return split_feature_inner_ [ node_idx ] ; <nl> } <nl> <nl> - inline int leaf_parent ( int leaf_idx ) const { return leaf_parent_ [ leaf_idx ] ; } <nl> - <nl> inline uint32_t threshold_in_bin ( int node_idx ) const { <nl> return threshold_in_bin_ [ node_idx ] ; <nl> } <nl> mmm a / src / io / config . cpp <nl> ppp b / src / io / config . cpp <nl> void Config : : CheckParamConflict ( ) { <nl> force_col_wise = true ; <nl> force_row_wise = false ; <nl> } <nl> + / / min_data_in_leaf must be at least 2 if path smoothing is active . This is because when the split is calculated <nl> + / / the count is calculated using the proportion of hessian in the leaf which is rounded up to nearest int , so it can <nl> + / / be 1 when there is actually no data in the leaf . In rare cases this can cause a bug because with path smoothing the <nl> + / / calculated split gain can be positive even with zero gradient and hessian . <nl> + if ( path_smooth > kEpsilon & & min_data_in_leaf < 2 ) { <nl> + min_data_in_leaf = 2 ; <nl> + Log : : Warning ( " min_data_in_leaf has been increased to 2 because this is required when path smoothing is active . " ) ; <nl> + } <nl> if ( is_parallel & & monotone_constraints_method = = std : : string ( " intermediate " ) ) { <nl> / / In distributed mode , local node doesn ' t have histograms on all features , cannot perform " intermediate " monotone constraints . <nl> Log : : Warning ( " Cannot use \ " intermediate \ " monotone constraints in parallel learning , auto set to \ " basic \ " method . " ) ; <nl> mmm a / src / io / config_auto . cpp <nl> ppp b / src / io / config_auto . cpp <nl> const std : : unordered_set < std : : string > & Config : : parameter_set ( ) { <nl> " cegb_penalty_split " , <nl> " cegb_penalty_feature_lazy " , <nl> " cegb_penalty_feature_coupled " , <nl> + " path_smooth " , <nl> " verbosity " , <nl> " input_model " , <nl> " output_model " , <nl> void Config : : GetMembersFromString ( const std : : unordered_map < std : : string , std : : str <nl> cegb_penalty_feature_coupled = Common : : StringToArray < double > ( tmp_str , ' , ' ) ; <nl> } <nl> <nl> + GetDouble ( params , " path_smooth " , & path_smooth ) ; <nl> + CHECK_GE ( path_smooth , 0 . 0 ) ; <nl> + <nl> GetInt ( params , " verbosity " , & verbosity ) ; <nl> <nl> GetString ( params , " input_model " , & input_model ) ; <nl> std : : string Config : : SaveMembersToString ( ) const { <nl> str_buf < < " [ cegb_penalty_split : " < < cegb_penalty_split < < " ] \ n " ; <nl> str_buf < < " [ cegb_penalty_feature_lazy : " < < Common : : Join ( cegb_penalty_feature_lazy , " , " ) < < " ] \ n " ; <nl> str_buf < < " [ cegb_penalty_feature_coupled : " < < Common : : Join ( cegb_penalty_feature_coupled , " , " ) < < " ] \ n " ; <nl> + str_buf < < " [ path_smooth : " < < path_smooth < < " ] \ n " ; <nl> str_buf < < " [ verbosity : " < < verbosity < < " ] \ n " ; <nl> str_buf < < " [ max_bin : " < < max_bin < < " ] \ n " ; <nl> str_buf < < " [ max_bin_by_feature : " < < Common : : Join ( max_bin_by_feature , " , " ) < < " ] \ n " ; <nl> mmm a / src / treelearner / feature_histogram . hpp <nl> ppp b / src / treelearner / feature_histogram . hpp <nl> class FeatureHistogram { <nl> void FindBestThreshold ( double sum_gradient , double sum_hessian , <nl> data_size_t num_data , <nl> const ConstraintEntry & constraints , <nl> + double parent_output , <nl> SplitInfo * output ) { <nl> output - > default_left = true ; <nl> output - > gain = kMinScore ; <nl> find_best_threshold_fun_ ( sum_gradient , sum_hessian + 2 * kEpsilon , num_data , <nl> - constraints , output ) ; <nl> + constraints , parent_output , output ) ; <nl> output - > gain * = meta_ - > penalty ; <nl> } <nl> <nl> - template < bool USE_RAND , bool USE_L1 , bool USE_MAX_OUTPUT > <nl> - double BeforeNumercal ( double sum_gradient , double sum_hessian , <nl> + template < bool USE_RAND , bool USE_L1 , bool USE_MAX_OUTPUT , bool USE_SMOOTHING > <nl> + double BeforeNumercal ( double sum_gradient , double sum_hessian , double parent_output , data_size_t num_data , <nl> SplitInfo * output , int * rand_threshold ) { <nl> is_splittable_ = false ; <nl> output - > monotone_type = meta_ - > monotone_type ; <nl> - double gain_shift = GetLeafGain < USE_L1 , USE_MAX_OUTPUT > ( <nl> - sum_gradient , sum_hessian , meta_ - > config - > lambda_l1 , <nl> - meta_ - > config - > lambda_l2 , meta_ - > config - > max_delta_step ) ; <nl> + <nl> + double gain_shift = GetLeafGain < USE_L1 , USE_MAX_OUTPUT , USE_SMOOTHING > ( <nl> + sum_gradient , sum_hessian , meta_ - > config - > lambda_l1 , meta_ - > config - > lambda_l2 , <nl> + meta_ - > config - > max_delta_step , meta_ - > config - > path_smooth , num_data , parent_output ) ; <nl> * rand_threshold = 0 ; <nl> if ( USE_RAND ) { <nl> if ( meta_ - > num_bin - 2 > 0 ) { <nl> class FeatureHistogram { <nl> <nl> template < bool USE_RAND , bool USE_MC , bool USE_L1 , bool USE_MAX_OUTPUT > <nl> void FuncForNumricalL2 ( ) { <nl> - # define TEMPLATE_PREFIX USE_RAND , USE_MC , USE_L1 , USE_MAX_OUTPUT <nl> + if ( meta_ - > config - > path_smooth > kEpsilon ) { <nl> + FuncForNumricalL3 < USE_RAND , USE_MC , USE_L1 , USE_MAX_OUTPUT , true > ( ) ; <nl> + } else { <nl> + FuncForNumricalL3 < USE_RAND , USE_MC , USE_L1 , USE_MAX_OUTPUT , false > ( ) ; <nl> + } <nl> + } <nl> + <nl> + template < bool USE_RAND , bool USE_MC , bool USE_L1 , bool USE_MAX_OUTPUT , bool USE_SMOOTHING > <nl> + void FuncForNumricalL3 ( ) { <nl> + # define TEMPLATE_PREFIX USE_RAND , USE_MC , USE_L1 , USE_MAX_OUTPUT , USE_SMOOTHING <nl> # define LAMBDA_ARGUMENTS \ <nl> double sum_gradient , double sum_hessian , data_size_t num_data , \ <nl> - const ConstraintEntry & constraints , SplitInfo * output <nl> - # define BEFORE_ARGUMENTS sum_gradient , sum_hessian , output , & rand_threshold <nl> + const ConstraintEntry & constraints , double parent_output , SplitInfo * output <nl> + # define BEFORE_ARGUMENTS sum_gradient , sum_hessian , parent_output , num_data , output , & rand_threshold <nl> # define FUNC_ARGUMENTS \ <nl> - sum_gradient , sum_hessian , num_data , constraints , min_gain_shift , output , \ <nl> - rand_threshold <nl> + sum_gradient , sum_hessian , num_data , constraints , min_gain_shift , \ <nl> + output , rand_threshold , parent_output <nl> <nl> if ( meta_ - > num_bin > 2 & & meta_ - > missing_type ! = MissingType : : None ) { <nl> if ( meta_ - > missing_type = = MissingType : : Zero ) { <nl> find_best_threshold_fun_ = [ = ] ( LAMBDA_ARGUMENTS ) { <nl> int rand_threshold = 0 ; <nl> double min_gain_shift = <nl> - BeforeNumercal < USE_RAND , USE_L1 , USE_MAX_OUTPUT > ( <nl> + BeforeNumercal < USE_RAND , USE_L1 , USE_MAX_OUTPUT , USE_SMOOTHING > ( <nl> BEFORE_ARGUMENTS ) ; <nl> FindBestThresholdSequentially < TEMPLATE_PREFIX , true , true , false > ( <nl> FUNC_ARGUMENTS ) ; <nl> class FeatureHistogram { <nl> find_best_threshold_fun_ = [ = ] ( LAMBDA_ARGUMENTS ) { <nl> int rand_threshold = 0 ; <nl> double min_gain_shift = <nl> - BeforeNumercal < USE_RAND , USE_L1 , USE_MAX_OUTPUT > ( <nl> + BeforeNumercal < USE_RAND , USE_L1 , USE_MAX_OUTPUT , USE_SMOOTHING > ( <nl> BEFORE_ARGUMENTS ) ; <nl> FindBestThresholdSequentially < TEMPLATE_PREFIX , true , false , true > ( <nl> FUNC_ARGUMENTS ) ; <nl> class FeatureHistogram { <nl> find_best_threshold_fun_ = [ = ] ( LAMBDA_ARGUMENTS ) { <nl> int rand_threshold = 0 ; <nl> double min_gain_shift = <nl> - BeforeNumercal < USE_RAND , USE_L1 , USE_MAX_OUTPUT > ( <nl> + BeforeNumercal < USE_RAND , USE_L1 , USE_MAX_OUTPUT , USE_SMOOTHING > ( <nl> BEFORE_ARGUMENTS ) ; <nl> FindBestThresholdSequentially < TEMPLATE_PREFIX , true , false , false > ( <nl> FUNC_ARGUMENTS ) ; <nl> class FeatureHistogram { <nl> find_best_threshold_fun_ = [ = ] ( LAMBDA_ARGUMENTS ) { <nl> int rand_threshold = 0 ; <nl> double min_gain_shift = <nl> - BeforeNumercal < USE_RAND , USE_L1 , USE_MAX_OUTPUT > ( <nl> + BeforeNumercal < USE_RAND , USE_L1 , USE_MAX_OUTPUT , USE_SMOOTHING > ( <nl> BEFORE_ARGUMENTS ) ; <nl> - FindBestThresholdSequentially < USE_RAND , USE_MC , USE_L1 , <nl> - USE_MAX_OUTPUT , true , false , false > ( <nl> + FindBestThresholdSequentially < TEMPLATE_PREFIX , true , false , false > ( <nl> FUNC_ARGUMENTS ) ; <nl> output - > default_left = false ; <nl> } ; <nl> class FeatureHistogram { <nl> <nl> template < bool USE_RAND , bool USE_MC > <nl> void FuncForCategoricalL1 ( ) { <nl> + if ( meta_ - > config - > path_smooth > kEpsilon ) { <nl> + FuncForCategoricalL2 < USE_RAND , USE_MC , true > ( ) ; <nl> + } else { <nl> + FuncForCategoricalL2 < USE_RAND , USE_MC , false > ( ) ; <nl> + } <nl> + } <nl> + <nl> + template < bool USE_RAND , bool USE_MC , bool USE_SMOOTHING > <nl> + void FuncForCategoricalL2 ( ) { <nl> # define ARGUMENTS \ <nl> std : : placeholders : : _1 , std : : placeholders : : _2 , std : : placeholders : : _3 , \ <nl> - std : : placeholders : : _4 , std : : placeholders : : _5 <nl> + std : : placeholders : : _4 , std : : placeholders : : _5 , std : : placeholders : : _6 <nl> if ( meta_ - > config - > lambda_l1 > 0 ) { <nl> if ( meta_ - > config - > max_delta_step > 0 ) { <nl> find_best_threshold_fun_ = <nl> std : : bind ( & FeatureHistogram : : FindBestThresholdCategoricalInner < <nl> - USE_RAND , USE_MC , true , true > , <nl> + USE_RAND , USE_MC , true , true , USE_SMOOTHING > , <nl> this , ARGUMENTS ) ; <nl> } else { <nl> find_best_threshold_fun_ = <nl> std : : bind ( & FeatureHistogram : : FindBestThresholdCategoricalInner < <nl> - USE_RAND , USE_MC , true , false > , <nl> + USE_RAND , USE_MC , true , false , USE_SMOOTHING > , <nl> this , ARGUMENTS ) ; <nl> } <nl> } else { <nl> if ( meta_ - > config - > max_delta_step > 0 ) { <nl> find_best_threshold_fun_ = <nl> std : : bind ( & FeatureHistogram : : FindBestThresholdCategoricalInner < <nl> - USE_RAND , USE_MC , false , true > , <nl> + USE_RAND , USE_MC , false , true , USE_SMOOTHING > , <nl> this , ARGUMENTS ) ; <nl> } else { <nl> find_best_threshold_fun_ = <nl> std : : bind ( & FeatureHistogram : : FindBestThresholdCategoricalInner < <nl> - USE_RAND , USE_MC , false , false > , <nl> + USE_RAND , USE_MC , false , false , USE_SMOOTHING > , <nl> this , ARGUMENTS ) ; <nl> } <nl> } <nl> # undef ARGUMENTS <nl> } <nl> <nl> - template < bool USE_RAND , bool USE_MC , bool USE_L1 , bool USE_MAX_OUTPUT > <nl> + template < bool USE_RAND , bool USE_MC , bool USE_L1 , bool USE_MAX_OUTPUT , bool USE_SMOOTHING > <nl> void FindBestThresholdCategoricalInner ( double sum_gradient , <nl> double sum_hessian , <nl> data_size_t num_data , <nl> const ConstraintEntry & constraints , <nl> + double parent_output , <nl> SplitInfo * output ) { <nl> is_splittable_ = false ; <nl> output - > default_left = false ; <nl> class FeatureHistogram { <nl> data_size_t best_left_count = 0 ; <nl> double best_sum_left_gradient = 0 ; <nl> double best_sum_left_hessian = 0 ; <nl> - double gain_shift = GetLeafGain < USE_L1 , USE_MAX_OUTPUT > ( <nl> - sum_gradient , sum_hessian , meta_ - > config - > lambda_l1 , <nl> - meta_ - > config - > lambda_l2 , meta_ - > config - > max_delta_step ) ; <nl> + double gain_shift ; <nl> + if ( USE_SMOOTHING ) { <nl> + gain_shift = GetLeafGainGivenOutput < USE_L1 > ( <nl> + sum_gradient , sum_hessian , meta_ - > config - > lambda_l1 , meta_ - > config - > lambda_l2 , parent_output ) ; <nl> + } else { <nl> + / / Need special case for no smoothing to preserve existing behaviour . If no smoothing , the parent output is calculated <nl> + / / with the larger categorical l2 , whereas min_split_gain uses the original l2 . <nl> + gain_shift = GetLeafGain < USE_L1 , USE_MAX_OUTPUT , false > ( sum_gradient , sum_hessian , <nl> + meta_ - > config - > lambda_l1 , meta_ - > config - > lambda_l2 , meta_ - > config - > max_delta_step , 0 , <nl> + num_data , 0 ) ; <nl> + } <nl> <nl> double min_gain_shift = gain_shift + meta_ - > config - > min_gain_to_split ; <nl> bool is_full_categorical = meta_ - > missing_type = = MissingType : : None ; <nl> class FeatureHistogram { <nl> } <nl> } <nl> / / current split gain <nl> - double current_gain = GetSplitGains < USE_MC , USE_L1 , USE_MAX_OUTPUT > ( <nl> + double current_gain = GetSplitGains < USE_MC , USE_L1 , USE_MAX_OUTPUT , USE_SMOOTHING > ( <nl> sum_other_gradient , sum_other_hessian , grad , hess + kEpsilon , <nl> meta_ - > config - > lambda_l1 , l2 , meta_ - > config - > max_delta_step , <nl> - constraints , 0 ) ; <nl> + constraints , 0 , meta_ - > config - > path_smooth , other_count , cnt , parent_output ) ; <nl> / / gain with split is worse than without split <nl> if ( current_gain < = min_gain_shift ) { <nl> continue ; <nl> class FeatureHistogram { <nl> continue ; <nl> } <nl> } <nl> - double current_gain = GetSplitGains < USE_MC , USE_L1 , USE_MAX_OUTPUT > ( <nl> + double current_gain = GetSplitGains < USE_MC , USE_L1 , USE_MAX_OUTPUT , USE_SMOOTHING > ( <nl> sum_left_gradient , sum_left_hessian , sum_right_gradient , <nl> sum_right_hessian , meta_ - > config - > lambda_l1 , l2 , <nl> - meta_ - > config - > max_delta_step , constraints , 0 ) ; <nl> + meta_ - > config - > max_delta_step , constraints , 0 , meta_ - > config - > path_smooth , <nl> + left_count , right_count , parent_output ) ; <nl> if ( current_gain < = min_gain_shift ) { <nl> continue ; <nl> } <nl> class FeatureHistogram { <nl> } <nl> <nl> if ( is_splittable_ ) { <nl> - output - > left_output = <nl> - CalculateSplittedLeafOutput < USE_MC , USE_L1 , USE_MAX_OUTPUT > ( <nl> - best_sum_left_gradient , best_sum_left_hessian , <nl> - meta_ - > config - > lambda_l1 , l2 , meta_ - > config - > max_delta_step , <nl> - constraints ) ; <nl> + output - > left_output = CalculateSplittedLeafOutput < USE_MC , USE_L1 , USE_MAX_OUTPUT , USE_SMOOTHING > ( <nl> + best_sum_left_gradient , best_sum_left_hessian , <nl> + meta_ - > config - > lambda_l1 , l2 , meta_ - > config - > max_delta_step , <nl> + constraints , meta_ - > config - > path_smooth , best_left_count , parent_output ) ; <nl> output - > left_count = best_left_count ; <nl> output - > left_sum_gradient = best_sum_left_gradient ; <nl> output - > left_sum_hessian = best_sum_left_hessian - kEpsilon ; <nl> - output - > right_output = <nl> - CalculateSplittedLeafOutput < USE_MC , USE_L1 , USE_MAX_OUTPUT > ( <nl> - sum_gradient - best_sum_left_gradient , <nl> - sum_hessian - best_sum_left_hessian , meta_ - > config - > lambda_l1 , l2 , <nl> - meta_ - > config - > max_delta_step , constraints ) ; <nl> + output - > right_output = CalculateSplittedLeafOutput < USE_MC , USE_L1 , USE_MAX_OUTPUT , USE_SMOOTHING > ( <nl> + sum_gradient - best_sum_left_gradient , <nl> + sum_hessian - best_sum_left_hessian , meta_ - > config - > lambda_l1 , l2 , <nl> + meta_ - > config - > max_delta_step , constraints , meta_ - > config - > path_smooth , <nl> + num_data - best_left_count , parent_output ) ; <nl> output - > right_count = num_data - best_left_count ; <nl> output - > right_sum_gradient = sum_gradient - best_sum_left_gradient ; <nl> output - > right_sum_hessian = <nl> class FeatureHistogram { <nl> <nl> void GatherInfoForThreshold ( double sum_gradient , double sum_hessian , <nl> uint32_t threshold , data_size_t num_data , <nl> - SplitInfo * output ) { <nl> + double parent_output , SplitInfo * output ) { <nl> if ( meta_ - > bin_type = = BinType : : NumericalBin ) { <nl> GatherInfoForThresholdNumerical ( sum_gradient , sum_hessian , threshold , <nl> - num_data , output ) ; <nl> + num_data , parent_output , output ) ; <nl> } else { <nl> GatherInfoForThresholdCategorical ( sum_gradient , sum_hessian , threshold , <nl> - num_data , output ) ; <nl> + num_data , parent_output , output ) ; <nl> } <nl> } <nl> <nl> void GatherInfoForThresholdNumerical ( double sum_gradient , double sum_hessian , <nl> uint32_t threshold , data_size_t num_data , <nl> - SplitInfo * output ) { <nl> - double gain_shift = GetLeafGain < true , true > ( <nl> + double parent_output , SplitInfo * output ) { <nl> + bool use_smoothing = meta_ - > config - > path_smooth > kEpsilon ; <nl> + if ( use_smoothing ) { <nl> + GatherInfoForThresholdNumericalInner < true > ( sum_gradient , sum_hessian , <nl> + threshold , num_data , <nl> + parent_output , output ) ; <nl> + } else { <nl> + GatherInfoForThresholdNumericalInner < false > ( sum_gradient , sum_hessian , <nl> + threshold , num_data , <nl> + parent_output , output ) ; <nl> + } <nl> + } <nl> + <nl> + template < bool USE_SMOOTHING > <nl> + void GatherInfoForThresholdNumericalInner ( double sum_gradient , double sum_hessian , <nl> + uint32_t threshold , data_size_t num_data , <nl> + double parent_output , SplitInfo * output ) { <nl> + double gain_shift = GetLeafGainGivenOutput < true > ( <nl> sum_gradient , sum_hessian , meta_ - > config - > lambda_l1 , <nl> - meta_ - > config - > lambda_l2 , meta_ - > config - > max_delta_step ) ; <nl> + meta_ - > config - > lambda_l2 , parent_output ) ; <nl> double min_gain_shift = gain_shift + meta_ - > config - > min_gain_to_split ; <nl> <nl> / / do stuff here <nl> class FeatureHistogram { <nl> double sum_left_hessian = sum_hessian - sum_right_hessian ; <nl> data_size_t left_count = num_data - right_count ; <nl> double current_gain = <nl> - GetLeafGain < true , true > ( <nl> + GetLeafGain < true , true , USE_SMOOTHING > ( <nl> sum_left_gradient , sum_left_hessian , meta_ - > config - > lambda_l1 , <nl> - meta_ - > config - > lambda_l2 , meta_ - > config - > max_delta_step ) + <nl> - GetLeafGain < true , true > ( <nl> + meta_ - > config - > lambda_l2 , meta_ - > config - > max_delta_step , <nl> + meta_ - > config - > path_smooth , left_count , parent_output ) + <nl> + GetLeafGain < true , true , USE_SMOOTHING > ( <nl> sum_right_gradient , sum_right_hessian , meta_ - > config - > lambda_l1 , <nl> - meta_ - > config - > lambda_l2 , meta_ - > config - > max_delta_step ) ; <nl> + meta_ - > config - > lambda_l2 , meta_ - > config - > max_delta_step , <nl> + meta_ - > config - > path_smooth , right_count , parent_output ) ; <nl> <nl> / / gain with split is worse than without split <nl> if ( std : : isnan ( current_gain ) | | current_gain < = min_gain_shift ) { <nl> class FeatureHistogram { <nl> <nl> / / update split information <nl> output - > threshold = threshold ; <nl> - output - > left_output = CalculateSplittedLeafOutput < true , true > ( <nl> + output - > left_output = CalculateSplittedLeafOutput < true , true , USE_SMOOTHING > ( <nl> sum_left_gradient , sum_left_hessian , meta_ - > config - > lambda_l1 , <nl> - meta_ - > config - > lambda_l2 , meta_ - > config - > max_delta_step ) ; <nl> + meta_ - > config - > lambda_l2 , meta_ - > config - > max_delta_step , <nl> + meta_ - > config - > path_smooth , left_count , parent_output ) ; <nl> output - > left_count = left_count ; <nl> output - > left_sum_gradient = sum_left_gradient ; <nl> output - > left_sum_hessian = sum_left_hessian - kEpsilon ; <nl> - output - > right_output = CalculateSplittedLeafOutput < true , true > ( <nl> + output - > right_output = CalculateSplittedLeafOutput < true , true , USE_SMOOTHING > ( <nl> sum_gradient - sum_left_gradient , sum_hessian - sum_left_hessian , <nl> meta_ - > config - > lambda_l1 , meta_ - > config - > lambda_l2 , <nl> - meta_ - > config - > max_delta_step ) ; <nl> + meta_ - > config - > max_delta_step , meta_ - > config - > path_smooth , <nl> + right_count , parent_output ) ; <nl> output - > right_count = num_data - left_count ; <nl> output - > right_sum_gradient = sum_gradient - sum_left_gradient ; <nl> output - > right_sum_hessian = sum_hessian - sum_left_hessian - kEpsilon ; <nl> class FeatureHistogram { <nl> output - > default_left = true ; <nl> } <nl> <nl> - void GatherInfoForThresholdCategorical ( double sum_gradient , <nl> - double sum_hessian , uint32_t threshold , <nl> - data_size_t num_data , <nl> - SplitInfo * output ) { <nl> + void GatherInfoForThresholdCategorical ( double sum_gradient , double sum_hessian , <nl> + uint32_t threshold , data_size_t num_data , <nl> + double parent_output , SplitInfo * output ) { <nl> + bool use_smoothing = meta_ - > config - > path_smooth > kEpsilon ; <nl> + if ( use_smoothing ) { <nl> + GatherInfoForThresholdCategoricalInner < true > ( sum_gradient , sum_hessian , threshold , <nl> + num_data , parent_output , output ) ; <nl> + } else { <nl> + GatherInfoForThresholdCategoricalInner < false > ( sum_gradient , sum_hessian , threshold , <nl> + num_data , parent_output , output ) ; <nl> + } <nl> + } <nl> + <nl> + template < bool USE_SMOOTHING > <nl> + void GatherInfoForThresholdCategoricalInner ( double sum_gradient , <nl> + double sum_hessian , uint32_t threshold , <nl> + data_size_t num_data , double parent_output , <nl> + SplitInfo * output ) { <nl> / / get SplitInfo for a given one - hot categorical split . <nl> output - > default_left = false ; <nl> - double gain_shift = GetLeafGain < true , true > ( <nl> - sum_gradient , sum_hessian , meta_ - > config - > lambda_l1 , <nl> - meta_ - > config - > lambda_l2 , meta_ - > config - > max_delta_step ) ; <nl> + double gain_shift = GetLeafGainGivenOutput < true > ( <nl> + sum_gradient , sum_hessian , meta_ - > config - > lambda_l1 , meta_ - > config - > lambda_l2 , parent_output ) ; <nl> double min_gain_shift = gain_shift + meta_ - > config - > min_gain_to_split ; <nl> bool is_full_categorical = meta_ - > missing_type = = MissingType : : None ; <nl> int used_bin = meta_ - > num_bin - 1 + is_full_categorical ; <nl> class FeatureHistogram { <nl> double sum_right_gradient = sum_gradient - sum_left_gradient ; <nl> / / current split gain <nl> double current_gain = <nl> - GetLeafGain < true , true > ( sum_right_gradient , sum_right_hessian , <nl> - meta_ - > config - > lambda_l1 , l2 , <nl> - meta_ - > config - > max_delta_step ) + <nl> - GetLeafGain < true , true > ( sum_left_gradient , sum_left_hessian , <nl> - meta_ - > config - > lambda_l1 , l2 , <nl> - meta_ - > config - > max_delta_step ) ; <nl> + GetLeafGain < true , true , USE_SMOOTHING > ( sum_right_gradient , sum_right_hessian , <nl> + meta_ - > config - > lambda_l1 , l2 , <nl> + meta_ - > config - > max_delta_step , <nl> + meta_ - > config - > path_smooth , right_count , <nl> + parent_output ) + <nl> + GetLeafGain < true , true , USE_SMOOTHING > ( sum_left_gradient , sum_left_hessian , <nl> + meta_ - > config - > lambda_l1 , l2 , <nl> + meta_ - > config - > max_delta_step , <nl> + meta_ - > config - > path_smooth , left_count , <nl> + parent_output ) ; <nl> if ( std : : isnan ( current_gain ) | | current_gain < = min_gain_shift ) { <nl> output - > gain = kMinScore ; <nl> Log : : Warning ( <nl> " ' Forced Split ' will be ignored since the gain getting worse . " ) ; <nl> return ; <nl> } <nl> - <nl> - output - > left_output = CalculateSplittedLeafOutput < true , true > ( <nl> + output - > left_output = CalculateSplittedLeafOutput < true , true , USE_SMOOTHING > ( <nl> sum_left_gradient , sum_left_hessian , meta_ - > config - > lambda_l1 , l2 , <nl> - meta_ - > config - > max_delta_step ) ; <nl> + meta_ - > config - > max_delta_step , meta_ - > config - > path_smooth , left_count , <nl> + parent_output ) ; <nl> output - > left_count = left_count ; <nl> output - > left_sum_gradient = sum_left_gradient ; <nl> output - > left_sum_hessian = sum_left_hessian - kEpsilon ; <nl> - output - > right_output = CalculateSplittedLeafOutput < true , true > ( <nl> + output - > right_output = CalculateSplittedLeafOutput < true , true , USE_SMOOTHING > ( <nl> sum_right_gradient , sum_right_hessian , meta_ - > config - > lambda_l1 , l2 , <nl> - meta_ - > config - > max_delta_step ) ; <nl> + meta_ - > config - > max_delta_step , meta_ - > config - > path_smooth , right_count , <nl> + parent_output ) ; <nl> output - > right_count = right_count ; <nl> output - > right_sum_gradient = sum_gradient - sum_left_gradient ; <nl> output - > right_sum_hessian = sum_right_hessian - kEpsilon ; <nl> class FeatureHistogram { <nl> return Common : : Sign ( s ) * reg_s ; <nl> } <nl> <nl> - template < bool USE_L1 , bool USE_MAX_OUTPUT > <nl> + template < bool USE_L1 , bool USE_MAX_OUTPUT , bool USE_SMOOTHING > <nl> static double CalculateSplittedLeafOutput ( double sum_gradients , <nl> double sum_hessians , double l1 , <nl> - double l2 , double max_delta_step ) { <nl> + double l2 , double max_delta_step , <nl> + double smoothing , data_size_t num_data , <nl> + double parent_output ) { <nl> + double ret ; <nl> if ( USE_L1 ) { <nl> - double ret = - ThresholdL1 ( sum_gradients , l1 ) / ( sum_hessians + l2 ) ; <nl> - if ( USE_MAX_OUTPUT ) { <nl> - if ( max_delta_step > 0 & & std : : fabs ( ret ) > max_delta_step ) { <nl> - return Common : : Sign ( ret ) * max_delta_step ; <nl> - } <nl> - } <nl> - return ret ; <nl> + ret = - ThresholdL1 ( sum_gradients , l1 ) / ( sum_hessians + l2 ) ; <nl> } else { <nl> - double ret = - sum_gradients / ( sum_hessians + l2 ) ; <nl> - if ( USE_MAX_OUTPUT ) { <nl> - if ( max_delta_step > 0 & & std : : fabs ( ret ) > max_delta_step ) { <nl> - return Common : : Sign ( ret ) * max_delta_step ; <nl> - } <nl> + ret = - sum_gradients / ( sum_hessians + l2 ) ; <nl> + } <nl> + if ( USE_MAX_OUTPUT ) { <nl> + if ( max_delta_step > 0 & & std : : fabs ( ret ) > max_delta_step ) { <nl> + ret = Common : : Sign ( ret ) * max_delta_step ; <nl> } <nl> - return ret ; <nl> } <nl> + if ( USE_SMOOTHING ) { <nl> + ret = ret * ( num_data / smoothing ) / ( num_data / smoothing + 1 ) \ <nl> + + parent_output / ( num_data / smoothing + 1 ) ; <nl> + } <nl> + return ret ; <nl> } <nl> <nl> - template < bool USE_MC , bool USE_L1 , bool USE_MAX_OUTPUT > <nl> + template < bool USE_MC , bool USE_L1 , bool USE_MAX_OUTPUT , bool USE_SMOOTHING > <nl> static double CalculateSplittedLeafOutput ( <nl> double sum_gradients , double sum_hessians , double l1 , double l2 , <nl> - double max_delta_step , const ConstraintEntry & constraints ) { <nl> - double ret = CalculateSplittedLeafOutput < USE_L1 , USE_MAX_OUTPUT > ( <nl> - sum_gradients , sum_hessians , l1 , l2 , max_delta_step ) ; <nl> + double max_delta_step , const ConstraintEntry & constraints , <nl> + double smoothing , data_size_t num_data , double parent_output ) { <nl> + double ret = CalculateSplittedLeafOutput < USE_L1 , USE_MAX_OUTPUT , USE_SMOOTHING > ( <nl> + sum_gradients , sum_hessians , l1 , l2 , max_delta_step , smoothing , num_data , parent_output ) ; <nl> if ( USE_MC ) { <nl> if ( ret < constraints . min ) { <nl> ret = constraints . min ; <nl> class FeatureHistogram { <nl> } <nl> <nl> private : <nl> - template < bool USE_MC , bool USE_L1 , bool USE_MAX_OUTPUT > <nl> + template < bool USE_MC , bool USE_L1 , bool USE_MAX_OUTPUT , bool USE_SMOOTHING > <nl> static double GetSplitGains ( double sum_left_gradients , <nl> double sum_left_hessians , <nl> double sum_right_gradients , <nl> double sum_right_hessians , double l1 , double l2 , <nl> double max_delta_step , <nl> const ConstraintEntry & constraints , <nl> - int8_t monotone_constraint ) { <nl> + int8_t monotone_constraint , <nl> + double smoothing , <nl> + data_size_t left_count , <nl> + data_size_t right_count , <nl> + double parent_output ) { <nl> if ( ! USE_MC ) { <nl> - return GetLeafGain < USE_L1 , USE_MAX_OUTPUT > ( sum_left_gradients , <nl> - sum_left_hessians , l1 , l2 , <nl> - max_delta_step ) + <nl> - GetLeafGain < USE_L1 , USE_MAX_OUTPUT > ( sum_right_gradients , <nl> - sum_right_hessians , l1 , l2 , <nl> - max_delta_step ) ; <nl> + return GetLeafGain < USE_L1 , USE_MAX_OUTPUT , USE_SMOOTHING > ( sum_left_gradients , <nl> + sum_left_hessians , l1 , l2 , <nl> + max_delta_step , smoothing , <nl> + left_count , parent_output ) + <nl> + GetLeafGain < USE_L1 , USE_MAX_OUTPUT , USE_SMOOTHING > ( sum_right_gradients , <nl> + sum_right_hessians , l1 , l2 , <nl> + max_delta_step , smoothing , <nl> + right_count , parent_output ) ; <nl> } else { <nl> double left_output = <nl> - CalculateSplittedLeafOutput < USE_MC , USE_L1 , USE_MAX_OUTPUT > ( <nl> + CalculateSplittedLeafOutput < USE_MC , USE_L1 , USE_MAX_OUTPUT , USE_SMOOTHING > ( <nl> sum_left_gradients , sum_left_hessians , l1 , l2 , max_delta_step , <nl> - constraints ) ; <nl> + constraints , smoothing , left_count , parent_output ) ; <nl> double right_output = <nl> - CalculateSplittedLeafOutput < USE_MC , USE_L1 , USE_MAX_OUTPUT > ( <nl> + CalculateSplittedLeafOutput < USE_MC , USE_L1 , USE_MAX_OUTPUT , USE_SMOOTHING > ( <nl> sum_right_gradients , sum_right_hessians , l1 , l2 , max_delta_step , <nl> - constraints ) ; <nl> + constraints , smoothing , right_count , parent_output ) ; <nl> if ( ( ( monotone_constraint > 0 ) & & ( left_output > right_output ) ) | | <nl> ( ( monotone_constraint < 0 ) & & ( left_output < right_output ) ) ) { <nl> return 0 ; <nl> class FeatureHistogram { <nl> } <nl> } <nl> <nl> - template < bool USE_L1 , bool USE_MAX_OUTPUT > <nl> + template < bool USE_L1 , bool USE_MAX_OUTPUT , bool USE_SMOOTHING > <nl> static double GetLeafGain ( double sum_gradients , double sum_hessians , <nl> - double l1 , double l2 , double max_delta_step ) { <nl> - if ( ! USE_MAX_OUTPUT ) { <nl> + double l1 , double l2 , double max_delta_step , <nl> + double smoothing , data_size_t num_data , double parent_output ) { <nl> + if ( ! USE_MAX_OUTPUT & & ! USE_SMOOTHING ) { <nl> if ( USE_L1 ) { <nl> const double sg_l1 = ThresholdL1 ( sum_gradients , l1 ) ; <nl> return ( sg_l1 * sg_l1 ) / ( sum_hessians + l2 ) ; <nl> class FeatureHistogram { <nl> return ( sum_gradients * sum_gradients ) / ( sum_hessians + l2 ) ; <nl> } <nl> } else { <nl> - double output = CalculateSplittedLeafOutput < USE_L1 , USE_MAX_OUTPUT > ( <nl> - sum_gradients , sum_hessians , l1 , l2 , max_delta_step ) ; <nl> - return GetLeafGainGivenOutput < USE_L1 > ( sum_gradients , sum_hessians , l1 , l2 , <nl> - output ) ; <nl> + double output = CalculateSplittedLeafOutput < USE_L1 , USE_MAX_OUTPUT , USE_SMOOTHING > ( <nl> + sum_gradients , sum_hessians , l1 , l2 , max_delta_step , smoothing , num_data , parent_output ) ; <nl> + return GetLeafGainGivenOutput < USE_L1 > ( sum_gradients , sum_hessians , l1 , l2 , output ) ; <nl> } <nl> } <nl> <nl> class FeatureHistogram { <nl> } <nl> } <nl> <nl> - template < bool USE_RAND , bool USE_MC , bool USE_L1 , bool USE_MAX_OUTPUT , <nl> + template < bool USE_RAND , bool USE_MC , bool USE_L1 , bool USE_MAX_OUTPUT , bool USE_SMOOTHING , <nl> bool REVERSE , bool SKIP_DEFAULT_BIN , bool NA_AS_MISSING > <nl> void FindBestThresholdSequentially ( double sum_gradient , double sum_hessian , <nl> data_size_t num_data , <nl> const ConstraintEntry & constraints , <nl> double min_gain_shift , SplitInfo * output , <nl> - int rand_threshold ) { <nl> + int rand_threshold , double parent_output ) { <nl> const int8_t offset = meta_ - > offset ; <nl> double best_sum_left_gradient = NAN ; <nl> double best_sum_left_hessian = NAN ; <nl> class FeatureHistogram { <nl> } <nl> } <nl> / / current split gain <nl> - double current_gain = GetSplitGains < USE_MC , USE_L1 , USE_MAX_OUTPUT > ( <nl> + double current_gain = GetSplitGains < USE_MC , USE_L1 , USE_MAX_OUTPUT , USE_SMOOTHING > ( <nl> sum_left_gradient , sum_left_hessian , sum_right_gradient , <nl> sum_right_hessian , meta_ - > config - > lambda_l1 , <nl> meta_ - > config - > lambda_l2 , meta_ - > config - > max_delta_step , <nl> - constraints , meta_ - > monotone_type ) ; <nl> + constraints , meta_ - > monotone_type , meta_ - > config - > path_smooth , <nl> + left_count , right_count , parent_output ) ; <nl> / / gain with split is worse than without split <nl> if ( current_gain < = min_gain_shift ) { <nl> continue ; <nl> class FeatureHistogram { <nl> } <nl> } <nl> / / current split gain <nl> - double current_gain = GetSplitGains < USE_MC , USE_L1 , USE_MAX_OUTPUT > ( <nl> + double current_gain = GetSplitGains < USE_MC , USE_L1 , USE_MAX_OUTPUT , USE_SMOOTHING > ( <nl> sum_left_gradient , sum_left_hessian , sum_right_gradient , <nl> sum_right_hessian , meta_ - > config - > lambda_l1 , <nl> meta_ - > config - > lambda_l2 , meta_ - > config - > max_delta_step , <nl> - constraints , meta_ - > monotone_type ) ; <nl> + constraints , meta_ - > monotone_type , meta_ - > config - > path_smooth , left_count , <nl> + right_count , parent_output ) ; <nl> / / gain with split is worse than without split <nl> if ( current_gain < = min_gain_shift ) { <nl> continue ; <nl> class FeatureHistogram { <nl> / / update split information <nl> output - > threshold = best_threshold ; <nl> output - > left_output = <nl> - CalculateSplittedLeafOutput < USE_MC , USE_L1 , USE_MAX_OUTPUT > ( <nl> + CalculateSplittedLeafOutput < USE_MC , USE_L1 , USE_MAX_OUTPUT , USE_SMOOTHING > ( <nl> best_sum_left_gradient , best_sum_left_hessian , <nl> meta_ - > config - > lambda_l1 , meta_ - > config - > lambda_l2 , <nl> - meta_ - > config - > max_delta_step , constraints ) ; <nl> + meta_ - > config - > max_delta_step , constraints , meta_ - > config - > path_smooth , <nl> + best_left_count , parent_output ) ; <nl> output - > left_count = best_left_count ; <nl> output - > left_sum_gradient = best_sum_left_gradient ; <nl> output - > left_sum_hessian = best_sum_left_hessian - kEpsilon ; <nl> output - > right_output = <nl> - CalculateSplittedLeafOutput < USE_MC , USE_L1 , USE_MAX_OUTPUT > ( <nl> + CalculateSplittedLeafOutput < USE_MC , USE_L1 , USE_MAX_OUTPUT , USE_SMOOTHING > ( <nl> sum_gradient - best_sum_left_gradient , <nl> sum_hessian - best_sum_left_hessian , meta_ - > config - > lambda_l1 , <nl> meta_ - > config - > lambda_l2 , meta_ - > config - > max_delta_step , <nl> - constraints ) ; <nl> + constraints , meta_ - > config - > path_smooth , num_data - best_left_count , <nl> + parent_output ) ; <nl> output - > right_count = num_data - best_left_count ; <nl> output - > right_sum_gradient = sum_gradient - best_sum_left_gradient ; <nl> output - > right_sum_hessian = <nl> class FeatureHistogram { <nl> bool is_splittable_ = true ; <nl> <nl> std : : function < void ( double , double , data_size_t , const ConstraintEntry & , <nl> - SplitInfo * ) > <nl> + double , SplitInfo * ) > <nl> find_best_threshold_fun_ ; <nl> } ; <nl> <nl> class HistogramPool { <nl> if ( old_config - > lambda_l1 ! = config - > lambda_l1 | | <nl> old_config - > monotone_constraints ! = config - > monotone_constraints | | <nl> old_config - > extra_trees ! = config - > extra_trees | | <nl> - old_config - > max_delta_step ! = config - > max_delta_step ) { <nl> + old_config - > max_delta_step ! = config - > max_delta_step | | <nl> + old_config - > path_smooth ! = config - > path_smooth ) { <nl> # pragma omp parallel for schedule ( static ) <nl> for ( int i = 0 ; i < cache_size_ ; + + i ) { <nl> for ( int j = 0 ; j < train_data - > num_features ( ) ; + + j ) { <nl> mmm a / src / treelearner / gpu_tree_learner . cpp <nl> ppp b / src / treelearner / gpu_tree_learner . cpp <nl> void GPUTreeLearner : : Split ( Tree * tree , int best_Leaf , int * left_leaf , int * right <nl> Log : : Fatal ( " Bug in GPU histogram ! split % d : % d , smaller_leaf : % d , larger_leaf : % d \ n " , best_split_info . left_count , best_split_info . right_count , smaller_leaf_splits_ - > num_data_in_leaf ( ) , larger_leaf_splits_ - > num_data_in_leaf ( ) ) ; <nl> } <nl> } else { <nl> - smaller_leaf_splits_ - > Init ( * right_leaf , data_partition_ . get ( ) , best_split_info . right_sum_gradient , best_split_info . right_sum_hessian ) ; <nl> - larger_leaf_splits_ - > Init ( * left_leaf , data_partition_ . get ( ) , best_split_info . left_sum_gradient , best_split_info . left_sum_hessian ) ; <nl> + smaller_leaf_splits_ - > Init ( * right_leaf , data_partition_ . get ( ) , best_split_info . right_sum_gradient , best_split_info . right_sum_hessian , best_split_info . right_output ) ; <nl> + larger_leaf_splits_ - > Init ( * left_leaf , data_partition_ . get ( ) , best_split_info . left_sum_gradient , best_split_info . left_sum_hessian , best_split_info . left_output ) ; <nl> if ( ( best_split_info . left_count ! = larger_leaf_splits_ - > num_data_in_leaf ( ) ) | | <nl> ( best_split_info . right_count ! = smaller_leaf_splits_ - > num_data_in_leaf ( ) ) ) { <nl> Log : : Fatal ( " Bug in GPU histogram ! split % d : % d , smaller_leaf : % d , larger_leaf : % d \ n " , best_split_info . left_count , best_split_info . right_count , smaller_leaf_splits_ - > num_data_in_leaf ( ) , larger_leaf_splits_ - > num_data_in_leaf ( ) ) ; <nl> mmm a / src / treelearner / leaf_splits . hpp <nl> ppp b / src / treelearner / leaf_splits . hpp <nl> class LeafSplits { <nl> public : <nl> explicit LeafSplits ( data_size_t num_data ) <nl> : num_data_in_leaf_ ( num_data ) , num_data_ ( num_data ) , <nl> - data_indices_ ( nullptr ) { <nl> + data_indices_ ( nullptr ) , weight_ ( 0 ) { <nl> } <nl> void ResetNumData ( data_size_t num_data ) { <nl> num_data_ = num_data ; <nl> class LeafSplits { <nl> * \ param sum_gradients <nl> * \ param sum_hessians <nl> * / <nl> - void Init ( int leaf , const DataPartition * data_partition , double sum_gradients , double sum_hessians ) { <nl> + void Init ( int leaf , const DataPartition * data_partition , double sum_gradients , <nl> + double sum_hessians , double weight ) { <nl> leaf_index_ = leaf ; <nl> data_indices_ = data_partition - > GetIndexOnLeaf ( leaf , & num_data_in_leaf_ ) ; <nl> sum_gradients_ = sum_gradients ; <nl> sum_hessians_ = sum_hessians ; <nl> + weight_ = weight ; <nl> } <nl> <nl> / * ! <nl> class LeafSplits { <nl> / * ! \ brief Get indices of data of current leaf * / <nl> const data_size_t * data_indices ( ) const { return data_indices_ ; } <nl> <nl> + / * ! \ brief Get weight of current leaf * / <nl> + double weight ( ) const { return weight_ ; } <nl> + <nl> + <nl> <nl> private : <nl> / * ! \ brief current leaf index * / <nl> class LeafSplits { <nl> double sum_hessians_ ; <nl> / * ! \ brief indices of data of current leaf * / <nl> const data_size_t * data_indices_ ; <nl> + / * ! \ brief weight of current leaf * / <nl> + double weight_ ; <nl> } ; <nl> <nl> } / / namespace LightGBM <nl> mmm a / src / treelearner / serial_tree_learner . cpp <nl> ppp b / src / treelearner / serial_tree_learner . cpp <nl> Tree * SerialTreeLearner : : FitByExistingTree ( const Tree * old_tree , const score_t * <nl> sum_grad + = gradients [ idx ] ; <nl> sum_hess + = hessians [ idx ] ; <nl> } <nl> - double output = FeatureHistogram : : CalculateSplittedLeafOutput < true , true > ( <nl> - sum_grad , sum_hess , config_ - > lambda_l1 , config_ - > lambda_l2 , <nl> - config_ - > max_delta_step ) ; <nl> + double output ; <nl> + if ( ( config_ - > path_smooth > kEpsilon ) & ( i > 0 ) ) { <nl> + output = FeatureHistogram : : CalculateSplittedLeafOutput < true , true , true > ( <nl> + sum_grad , sum_hess , config_ - > lambda_l1 , config_ - > lambda_l2 , <nl> + config_ - > max_delta_step , config_ - > path_smooth , cnt_leaf_data , tree - > leaf_parent ( i ) ) ; <nl> + } else { <nl> + output = FeatureHistogram : : CalculateSplittedLeafOutput < true , true , false > ( <nl> + sum_grad , sum_hess , config_ - > lambda_l1 , config_ - > lambda_l2 , <nl> + config_ - > max_delta_step , config_ - > path_smooth , cnt_leaf_data , 0 ) ; <nl> + } <nl> auto old_leaf_output = tree - > LeafOutput ( i ) ; <nl> auto new_leaf_output = output * tree - > shrinkage ( ) ; <nl> tree - > SetLeafOutput ( i , config_ - > refit_decay_rate * old_leaf_output + ( 1 . 0 - config_ - > refit_decay_rate ) * new_leaf_output ) ; <nl> int32_t SerialTreeLearner : : ForceSplits ( Tree * tree , int * left_leaf , <nl> left_leaf_splits - > sum_hessians ( ) , <nl> left_threshold , <nl> left_leaf_splits - > num_data_in_leaf ( ) , <nl> + left_leaf_splits - > weight ( ) , <nl> & left_split ) ; <nl> left_split . feature = left_feature ; <nl> forceSplitMap [ * left_leaf ] = left_split ; <nl> int32_t SerialTreeLearner : : ForceSplits ( Tree * tree , int * left_leaf , <nl> right_leaf_splits - > sum_hessians ( ) , <nl> right_threshold , <nl> right_leaf_splits - > num_data_in_leaf ( ) , <nl> + right_leaf_splits - > weight ( ) , <nl> & right_split ) ; <nl> right_split . feature = right_feature ; <nl> forceSplitMap [ * right_leaf ] = right_split ; <nl> void SerialTreeLearner : : SplitInner ( Tree * tree , int best_leaf , int * left_leaf , <nl> CHECK_GT ( best_split_info . left_count , 0 ) ; <nl> smaller_leaf_splits_ - > Init ( * left_leaf , data_partition_ . get ( ) , <nl> best_split_info . left_sum_gradient , <nl> - best_split_info . left_sum_hessian ) ; <nl> + best_split_info . left_sum_hessian , <nl> + best_split_info . left_output ) ; <nl> larger_leaf_splits_ - > Init ( * right_leaf , data_partition_ . get ( ) , <nl> best_split_info . right_sum_gradient , <nl> - best_split_info . right_sum_hessian ) ; <nl> + best_split_info . right_sum_hessian , <nl> + best_split_info . right_output ) ; <nl> } else { <nl> CHECK_GT ( best_split_info . right_count , 0 ) ; <nl> smaller_leaf_splits_ - > Init ( * right_leaf , data_partition_ . get ( ) , <nl> best_split_info . right_sum_gradient , <nl> - best_split_info . right_sum_hessian ) ; <nl> + best_split_info . right_sum_hessian , <nl> + best_split_info . right_output ) ; <nl> larger_leaf_splits_ - > Init ( * left_leaf , data_partition_ . get ( ) , <nl> best_split_info . left_sum_gradient , <nl> - best_split_info . left_sum_hessian ) ; <nl> + best_split_info . left_sum_hessian , <nl> + best_split_info . left_output ) ; <nl> } <nl> auto leaves_need_update = constraints_ - > Update ( <nl> tree , is_numerical_split , * left_leaf , * right_leaf , <nl> void SerialTreeLearner : : ComputeBestSplitForFeature ( <nl> return ; <nl> } <nl> SplitInfo new_split ; <nl> + double parent_output ; <nl> + if ( leaf_splits - > leaf_index ( ) = = 0 ) { <nl> + / / for root leaf the " parent " output is its own output because we don ' t apply any smoothing to the root <nl> + parent_output = FeatureHistogram : : CalculateSplittedLeafOutput < true , true , true , false > ( <nl> + leaf_splits - > sum_gradients ( ) , leaf_splits - > sum_hessians ( ) , config_ - > lambda_l1 , <nl> + config_ - > lambda_l2 , config_ - > max_delta_step , constraints_ - > Get ( leaf_splits - > leaf_index ( ) ) , <nl> + config_ - > path_smooth , static_cast < data_size_t > ( num_data ) , 0 ) ; <nl> + } else { <nl> + parent_output = leaf_splits - > weight ( ) ; <nl> + } <nl> histogram_array_ [ feature_index ] . FindBestThreshold ( <nl> leaf_splits - > sum_gradients ( ) , leaf_splits - > sum_hessians ( ) , num_data , <nl> - constraints_ - > Get ( leaf_splits - > leaf_index ( ) ) , & new_split ) ; <nl> + constraints_ - > Get ( leaf_splits - > leaf_index ( ) ) , parent_output , & new_split ) ; <nl> new_split . feature = real_fidx ; <nl> if ( cegb_ ! = nullptr ) { <nl> new_split . gain - = <nl> mmm a / src / treelearner / voting_parallel_tree_learner . cpp <nl> ppp b / src / treelearner / voting_parallel_tree_learner . cpp <nl> void VotingParallelTreeLearner < TREELEARNER_T > : : Split ( Tree * tree , int best_Leaf , <nl> if ( best_split_info . left_count < best_split_info . right_count ) { <nl> smaller_leaf_splits_global_ - > Init ( * left_leaf , this - > data_partition_ . get ( ) , <nl> best_split_info . left_sum_gradient , <nl> - best_split_info . left_sum_hessian ) ; <nl> + best_split_info . left_sum_hessian , <nl> + best_split_info . left_output ) ; <nl> larger_leaf_splits_global_ - > Init ( * right_leaf , this - > data_partition_ . get ( ) , <nl> best_split_info . right_sum_gradient , <nl> - best_split_info . right_sum_hessian ) ; <nl> + best_split_info . right_sum_hessian , <nl> + best_split_info . right_output ) ; <nl> } else { <nl> smaller_leaf_splits_global_ - > Init ( * right_leaf , this - > data_partition_ . get ( ) , <nl> best_split_info . right_sum_gradient , <nl> - best_split_info . right_sum_hessian ) ; <nl> + best_split_info . right_sum_hessian , <nl> + best_split_info . right_output ) ; <nl> larger_leaf_splits_global_ - > Init ( * left_leaf , this - > data_partition_ . get ( ) , <nl> best_split_info . left_sum_gradient , <nl> - best_split_info . left_sum_hessian ) ; <nl> + best_split_info . left_sum_hessian , <nl> + best_split_info . left_output ) ; <nl> } <nl> } <nl> <nl> mmm a / tests / python_package_test / test_engine . py <nl> ppp b / tests / python_package_test / test_engine . py <nl> def test_extra_trees ( self ) : <nl> err_new = mean_squared_error ( y , predicted_new ) <nl> self . assertLess ( err , err_new ) <nl> <nl> + def test_path_smoothing ( self ) : <nl> + # check path smoothing increases regularization <nl> + X , y = load_boston ( True ) <nl> + lgb_x = lgb . Dataset ( X , label = y ) <nl> + params = { ' objective ' : ' regression ' , <nl> + ' num_leaves ' : 32 , <nl> + ' verbose ' : - 1 , <nl> + ' seed ' : 0 } <nl> + est = lgb . train ( params , lgb_x , num_boost_round = 10 ) <nl> + predicted = est . predict ( X ) <nl> + err = mean_squared_error ( y , predicted ) <nl> + params [ ' path_smooth ' ] = 1 <nl> + est = lgb . train ( params , lgb_x , num_boost_round = 10 ) <nl> + predicted_new = est . predict ( X ) <nl> + err_new = mean_squared_error ( y , predicted_new ) <nl> + self . assertLess ( err , err_new ) <nl> + <nl> @ unittest . skipIf ( not lgb . compat . PANDAS_INSTALLED , ' pandas is not installed ' ) <nl> def test_trees_to_dataframe ( self ) : <nl> <nl> | Path smoothing ( ) | microsoft/LightGBM | e50a9151ed0409183bee0dc8272e83b56f1f9e5a | 2020-05-03T05:53:15Z |
mmm a / ports / libraw / CONTROL <nl> ppp b / ports / libraw / CONTROL <nl> <nl> Source : libraw <nl> - Version : 0 . 19 . 0 <nl> + Version : 0 . 19 . 0 - 1 <nl> Build - Depends : lcms , jasper <nl> Description : raw image decoder library <nl> mmm a / ports / libraw / portfile . cmake <nl> ppp b / ports / libraw / portfile . cmake <nl> else ( ) <nl> endif ( ) <nl> file ( WRITE $ { CURRENT_PACKAGES_DIR } / include / libraw / libraw_types . h " $ { LIBRAW_H } " ) <nl> <nl> - # Rename thread - safe version to be " raw . lib " . This is unfortunately needed <nl> - # because otherwise libraries that build on top of libraw have to choose . <nl> - file ( REMOVE $ { CURRENT_PACKAGES_DIR } / lib / raw . lib $ { CURRENT_PACKAGES_DIR } / debug / lib / rawd . lib ) <nl> - file ( RENAME $ { CURRENT_PACKAGES_DIR } / lib / raw_r . lib $ { CURRENT_PACKAGES_DIR } / lib / raw . lib ) <nl> - file ( RENAME $ { CURRENT_PACKAGES_DIR } / debug / lib / raw_rd . lib $ { CURRENT_PACKAGES_DIR } / debug / lib / rawd . lib ) <nl> + if ( NOT VCPKG_CMAKE_SYSTEM_NAME OR VCPKG_CMAKE_SYSTEM_NAME STREQUAL " WindowsStore " ) <nl> + # Rename thread - safe version to be " raw . lib " . This is unfortunately needed <nl> + # because otherwise libraries that build on top of libraw have to choose . <nl> + file ( REMOVE $ { CURRENT_PACKAGES_DIR } / lib / raw . lib $ { CURRENT_PACKAGES_DIR } / debug / lib / rawd . lib ) <nl> + file ( RENAME $ { CURRENT_PACKAGES_DIR } / lib / raw_r . lib $ { CURRENT_PACKAGES_DIR } / lib / raw . lib ) <nl> + file ( RENAME $ { CURRENT_PACKAGES_DIR } / debug / lib / raw_rd . lib $ { CURRENT_PACKAGES_DIR } / debug / lib / rawd . lib ) <nl> + <nl> + # Cleanup <nl> + file ( GLOB RELEASE_EXECUTABLES $ { CURRENT_PACKAGES_DIR } / bin / * . exe ) <nl> + file ( REMOVE $ { RELEASE_EXECUTABLES } ) <nl> + file ( GLOB DEBUG_EXECUTABLES $ { CURRENT_PACKAGES_DIR } / debug / bin / * . exe ) <nl> + file ( REMOVE $ { DEBUG_EXECUTABLES } ) <nl> + endif ( ) <nl> <nl> - # Cleanup <nl> - file ( REMOVE_RECURSE $ { CURRENT_PACKAGES_DIR } / debug / include ) <nl> - file ( GLOB RELEASE_EXECUTABLES $ { CURRENT_PACKAGES_DIR } / bin / * . exe ) <nl> - file ( REMOVE $ { RELEASE_EXECUTABLES } ) <nl> - file ( GLOB DEBUG_EXECUTABLES $ { CURRENT_PACKAGES_DIR } / debug / bin / * . exe ) <nl> - file ( REMOVE $ { DEBUG_EXECUTABLES } ) <nl> if ( VCPKG_LIBRARY_LINKAGE STREQUAL static ) <nl> file ( REMOVE_RECURSE $ { CURRENT_PACKAGES_DIR } / bin $ { CURRENT_PACKAGES_DIR } / debug / bin ) <nl> else ( ) <nl> - file ( REMOVE $ { CURRENT_PACKAGES_DIR } / bin / raw . dll $ { CURRENT_PACKAGES_DIR } / debug / bin / rawd . dll ) <nl> + if ( VCPKG_CMAKE_SYSTEM_NAME AND NOT VCPKG_CMAKE_SYSTEM_NAME STREQUAL " WindowsStore " ) <nl> + file ( REMOVE $ { CURRENT_PACKAGES_DIR } / bin / raw . dll $ { CURRENT_PACKAGES_DIR } / debug / bin / rawd . dll ) <nl> + endif ( ) <nl> endif ( ) <nl> <nl> + file ( REMOVE_RECURSE $ { CURRENT_PACKAGES_DIR } / debug / include ) <nl> + file ( REMOVE_RECURSE $ { CURRENT_PACKAGES_DIR } / debug / share ) <nl> + <nl> # Rename cmake module into a config in order to allow more flexible lookup rules <nl> file ( RENAME $ { CURRENT_PACKAGES_DIR } / share / libraw / FindLibRaw . cmake $ { CURRENT_PACKAGES_DIR } / share / libraw / LibRaw - config . cmake ) <nl> <nl> | Merge pull request from Voskrese / patch - 1 | microsoft/vcpkg | 0c5b3ae2257da1f329e32a2260a1d11b03d9fd72 | 2018-08-29T01:26:37Z |
mmm a / hphp / doc / ir . specification <nl> ppp b / hphp / doc / ir . specification <nl> To array conversions : <nl> <nl> | ConvDictToArr , D ( Arr ) , S ( Dict ) , PRc | CRc <nl> <nl> - | ConvShapeToArr , D ( Arr ) , S ( Shape ) , PRc | CRc <nl> - <nl> | ConvKeysetToArr , D ( Arr ) , S ( Keyset ) , PRc | CRc <nl> <nl> | ConvFuncToArr , D ( Arr ) , S ( Func ) , PRc <nl> To vec conversions : <nl> <nl> | ConvDictToVec , D ( Vec ) , S ( Dict ) , PRc | CRc <nl> <nl> - | ConvShapeToVec , D ( Vec ) , S ( Shape ) , PRc | CRc <nl> - <nl> | ConvKeysetToVec , D ( Vec ) , S ( Keyset ) , PRc | CRc <nl> <nl> | ConvClsMethToVec , D ( Vec ) , S ( ClsMeth ) , PRc | CRc <nl> To dict conversions : <nl> <nl> | ConvArrToDict , D ( Dict ) , S ( Arr ) , PRc | CRc <nl> <nl> - | ConvShapeToDict , D ( Dict ) , S ( Shape ) , PRc | CRc <nl> - <nl> | ConvVecToDict , D ( Dict ) , S ( Vec ) , PRc | CRc <nl> <nl> | ConvKeysetToDict , D ( Dict ) , S ( Keyset ) , PRc | CRc <nl> To keyset conversions : <nl> <nl> | ConvDictToKeyset , D ( Keyset ) , S ( Dict ) , PRc | CRc <nl> <nl> - | ConvShapeToKeyset , D ( Keyset ) , S ( Shape ) , PRc | CRc <nl> - <nl> | ConvClsMethToKeyset , D ( Keyset ) , S ( ClsMeth ) , PRc | CRc <nl> <nl> | ConvObjToKeyset , D ( Keyset ) , S ( Obj ) , PRc | CRc <nl> To varray conversions : <nl> <nl> | ConvDictToVArr , DArrPacked , S ( Dict ) , PRc | CRc <nl> <nl> - | ConvShapeToVArr , DArrPacked , S ( Shape ) , PRc | CRc <nl> - <nl> | ConvKeysetToVArr , DArrPacked , S ( Keyset ) , PRc | CRc <nl> <nl> | ConvClsMethToVArr , DArrPacked , S ( ClsMeth ) , PRc | CRc <nl> To darray conversion : <nl> <nl> | ConvDictToDArr , DArrMixed , S ( Dict ) , PRc | CRc <nl> <nl> - | ConvShapeToDArr , DArrMixed , S ( Shape ) , PRc | CRc <nl> - <nl> | ConvKeysetToDArr , DArrMixed , S ( Keyset ) , PRc | CRc <nl> <nl> | ConvClsMethToDArr , DArrMixed , S ( ClsMeth ) , PRc | CRc <nl> To string conversions : <nl> SameArr and NSameArr may re - enter the VM and therefore may throw <nl> exceptions . SameArr and NSameArr never re - enter or throw . <nl> <nl> - | GtShape , D ( Bool ) , S ( Shape , Arr , Dict ) S ( Shape , Arr , Dict ) , NF <nl> - <nl> - | GteShape , D ( Bool ) , S ( Shape , Arr , Dict ) S ( Shape , Arr , Dict ) , NF <nl> - <nl> - | LtShape , D ( Bool ) , S ( Shape , Arr , Dict ) S ( Shape , Arr , Dict ) , NF <nl> - <nl> - | LteShape , D ( Bool ) , S ( Shape , Arr , Dict ) S ( Shape , Arr , Dict ) , NF <nl> - <nl> - | EqShape , D ( Bool ) , S ( Shape , Arr , Dict ) S ( Shape , Arr , Dict ) , NF <nl> - <nl> - | NeqShape , D ( Bool ) , S ( Shape , Arr , Dict ) S ( Shape , Arr , Dict ) , NF <nl> - <nl> - | SameShape , D ( Bool ) , S ( Shape , Arr , Dict ) S ( Shape , Arr , Dict ) , NF <nl> - <nl> - | NSameShape , D ( Bool ) , S ( Shape , Arr , Dict ) S ( Shape , Arr , Dict ) , NF <nl> - <nl> - | CmpShape , D ( Int ) , S ( Shape , Arr , Dict ) S ( Shape , Arr , Dict ) , NF <nl> - <nl> - Perform comparison of Shapes using PHP semantics . All versions except for <nl> - SameShape and NSameShape may re - enter the VM and therefore may throw <nl> - exceptions . SameShape and NSameShape never re - enter or throw . <nl> - <nl> | GtVec , D ( Bool ) , S ( Vec ) S ( Vec ) , NF <nl> <nl> | GteVec , D ( Bool ) , S ( Vec ) S ( Vec ) , NF <nl> To string conversions : <nl> <nl> | CountDict , D ( Int ) , S ( Dict ) , NF <nl> <nl> - | CountShape , D ( Int ) , S ( Shape ) , NF <nl> - <nl> | CountKeyset , D ( Int ) , S ( Keyset ) , NF <nl> <nl> | CountCollection , D ( Int ) , S ( Obj ) , NF <nl> mmm a / hphp / hhbbc / index . cpp <nl> ppp b / hphp / hhbbc / index . cpp <nl> Index : : ConstraintResolution Index : : get_type_for_annotated_type ( <nl> case KindOfDict : return TDict ; <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : return TKeyset ; <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : not_implemented ( ) ; <nl> case KindOfPersistentArray : <nl> case KindOfArray : return TPArr ; <nl> case KindOfResource : return TRes ; <nl> mmm a / hphp / hhbbc / optimize . cpp <nl> ppp b / hphp / hhbbc / optimize . cpp <nl> Bytecode gen_constant ( const Cell & cell ) { <nl> case KindOfPersistentKeyset : <nl> assert ( cell . m_data . parr - > isKeyset ( ) ) ; <nl> return bc : : Keyset { cell . m_data . parr } ; <nl> - case KindOfShape : <nl> - case KindOfPersistentShape : <nl> - not_implemented ( ) ; <nl> case KindOfArray : <nl> assert ( cell . m_data . parr - > isStatic ( ) ) ; <nl> case KindOfPersistentArray : <nl> mmm a / hphp / hhbbc / type - system . cpp <nl> ppp b / hphp / hhbbc / type - system . cpp <nl> Type from_cell ( Cell cell ) { <nl> always_assert ( cell . m_data . parr - > isKeyset ( ) ) ; <nl> return keyset_val ( cell . m_data . parr ) ; <nl> <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> case KindOfRecord : / / TODO ( arnabde ) <nl> not_implemented ( ) ; <nl> <nl> Type from_DataType ( DataType dt ) { <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : return TKeyset ; <nl> case KindOfRecord : return TRecord ; <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : not_implemented ( ) ; <nl> case KindOfPersistentArray : <nl> case KindOfArray : return TArr ; <nl> case KindOfRef : return TRef ; <nl> mmm a / hphp / runtime / base / annot - type . h <nl> ppp b / hphp / runtime / base / annot - type . h <nl> annotCompat ( DataType dt , AnnotType at , const StringData * annotClsName ) { <nl> case KindOfString : <nl> return interface_supports_string ( annotClsName ) <nl> ? AnnotAction : : Pass : AnnotAction : : Fail ; <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> - / / TODO ( T31025155 ) : Emit a warning . <nl> - return interface_supports_shape ( annotClsName ) <nl> - ? AnnotAction : : Pass : AnnotAction : : Fail ; <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> return interface_supports_array ( annotClsName ) <nl> mmm a / hphp / runtime / base / apc - array . cpp <nl> ppp b / hphp / runtime / base / apc - array . cpp <nl> APCArray : : MakeSharedDict ( ArrayData * dict , APCHandleLevel level , <nl> ) ; <nl> } <nl> <nl> - APCHandle : : Pair <nl> - APCArray : : MakeSharedShape ( ArrayData * shape , APCHandleLevel level , <nl> - bool unserializeObj ) { <nl> - assertx ( shape - > isShape ( ) ) ; <nl> - if ( auto const value = APCTypedValue : : HandlePersistent ( <nl> - APCTypedValue : : StaticShape { } , APCTypedValue : : UncountedShape { } , shape ) ) { <nl> - return value ; <nl> - } <nl> - return MakeSharedImpl ( <nl> - shape , <nl> - level , <nl> - [ & ] ( ) { return MakeHash ( shape , APCKind : : SharedShape , unserializeObj ) ; } , <nl> - [ & ] ( DataWalker : : PointerMap * m ) { return MakeUncountedShape ( shape , m ) ; } , <nl> - [ & ] ( StringData * s ) { return APCString : : MakeSerializedShape ( s ) ; } <nl> - ) ; <nl> - } <nl> - <nl> APCHandle : : Pair <nl> APCArray : : MakeSharedKeyset ( ArrayData * keyset , APCHandleLevel level , <nl> bool unserializeObj ) { <nl> APCHandle * APCArray : : MakeUncountedDict ( <nl> return value - > getHandle ( ) ; <nl> } <nl> <nl> - APCHandle * APCArray : : MakeUncountedShape ( <nl> - ArrayData * shape , <nl> - DataWalker : : PointerMap * m ) { <nl> - assertx ( apcExtension : : UseUncounted ) ; <nl> - assertx ( shape - > isShape ( ) ) ; <nl> - auto data = MixedArray : : MakeUncounted ( shape , true , m ) ; <nl> - auto mem = reinterpret_cast < APCTypedValue * > ( data ) - 1 ; <nl> - auto value = new ( mem ) APCTypedValue ( APCTypedValue : : UncountedShape { } , data ) ; <nl> - return value - > getHandle ( ) ; <nl> - } <nl> - <nl> APCHandle * APCArray : : MakeUncountedKeyset ( ArrayData * keyset ) { <nl> assertx ( apcExtension : : UseUncounted ) ; <nl> assertx ( keyset - > isKeyset ( ) ) ; <nl> mmm a / hphp / runtime / base / apc - array . h <nl> ppp b / hphp / runtime / base / apc - array . h <nl> struct APCArray { <nl> static APCHandle : : Pair MakeSharedDict ( ArrayData * data , <nl> APCHandleLevel level , <nl> bool unserializeObj ) ; <nl> - static APCHandle : : Pair MakeSharedShape ( ArrayData * data , <nl> - APCHandleLevel level , <nl> - bool unserializeObj ) ; <nl> static APCHandle : : Pair MakeSharedKeyset ( ArrayData * data , <nl> APCHandleLevel level , <nl> bool unserializeObj ) ; <nl> struct APCArray { <nl> DataWalker : : PointerMap * m = nullptr ) ; <nl> static APCHandle * MakeUncountedDict ( ArrayData * dict , <nl> DataWalker : : PointerMap * m = nullptr ) ; <nl> - static APCHandle * MakeUncountedShape ( ArrayData * shape , <nl> - DataWalker : : PointerMap * m = nullptr ) ; <nl> static APCHandle * MakeUncountedKeyset ( ArrayData * dict ) ; <nl> <nl> static APCHandle : : Pair MakeSharedEmptyArray ( ) ; <nl> struct APCArray { <nl> } <nl> ArrayData * toLocalVec ( ) const { return PackedArray : : MakeVecFromAPC ( this ) ; } <nl> ArrayData * toLocalDict ( ) const { return MixedArray : : MakeDictFromAPC ( this ) ; } <nl> - ArrayData * toLocalShape ( ) const { return MixedArray : : MakeShapeFromAPC ( this ) ; } <nl> ArrayData * toLocalKeyset ( ) const { return SetArray : : MakeSetFromAPC ( this ) ; } <nl> <nl> / / <nl> struct APCArray { <nl> return m_handle . kind ( ) = = APCKind : : SharedDict ; <nl> } <nl> <nl> - bool isShape ( ) const { <nl> - return m_handle . kind ( ) = = APCKind : : SharedShape ; <nl> - } <nl> - <nl> bool isKeyset ( ) const { <nl> return m_handle . kind ( ) = = APCKind : : SharedKeyset ; <nl> } <nl> mmm a / hphp / runtime / base / apc - handle . cpp <nl> ppp b / hphp / runtime / base / apc - handle . cpp <nl> APCHandle : : Pair APCHandle : : Create ( const_variant_ref source , <nl> return APCArray : : MakeSharedKeyset ( ad , level , unserializeObj ) ; <nl> } <nl> <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : { <nl> - auto ad = source . getArrayData ( ) ; <nl> - assertx ( ad - > isShape ( ) ) ; <nl> - return APCArray : : MakeSharedShape ( ad , level , unserializeObj ) ; <nl> - } <nl> - <nl> case KindOfPersistentArray : <nl> case KindOfArray : { <nl> auto const ad = val ( cell ) . parr ; <nl> Variant APCHandle : : toLocalHelper ( ) const { <nl> case APCKind : : UncountedVec : <nl> case APCKind : : StaticDict : <nl> case APCKind : : UncountedDict : <nl> - case APCKind : : StaticShape : <nl> - case APCKind : : UncountedShape : <nl> case APCKind : : StaticKeyset : <nl> case APCKind : : UncountedKeyset : <nl> not_reached ( ) ; <nl> Variant APCHandle : : toLocalHelper ( ) const { <nl> assertx ( v . isDict ( ) ) ; <nl> return v ; <nl> } <nl> - case APCKind : : SerializedShape : { <nl> - auto const serShape = APCString : : fromHandle ( this ) - > getStringData ( ) ; <nl> - auto const v = apc_unserialize ( serShape - > data ( ) , serShape - > size ( ) ) ; <nl> - assertx ( v . isShape ( ) ) ; <nl> - return v ; <nl> - } <nl> case APCKind : : SerializedKeyset : { <nl> auto const serKeyset = APCString : : fromHandle ( this ) - > getStringData ( ) ; <nl> auto const v = apc_unserialize ( serKeyset - > data ( ) , serKeyset - > size ( ) ) ; <nl> Variant APCHandle : : toLocalHelper ( ) const { <nl> return Variant : : attach ( <nl> APCArray : : fromHandle ( this ) - > toLocalDict ( ) <nl> ) ; <nl> - case APCKind : : SharedShape : <nl> - return Variant : : attach ( <nl> - APCArray : : fromHandle ( this ) - > toLocalShape ( ) <nl> - ) ; <nl> case APCKind : : SharedKeyset : <nl> return Variant : : attach ( <nl> APCArray : : fromHandle ( this ) - > toLocalKeyset ( ) <nl> void APCHandle : : deleteShared ( ) { <nl> case APCKind : : StaticArray : <nl> case APCKind : : StaticVec : <nl> case APCKind : : StaticDict : <nl> - case APCKind : : StaticShape : <nl> case APCKind : : StaticKeyset : <nl> case APCKind : : PersistentFunc : <nl> delete APCTypedValue : : fromHandle ( this ) ; <nl> void APCHandle : : deleteShared ( ) { <nl> case APCKind : : SerializedArray : <nl> case APCKind : : SerializedVec : <nl> case APCKind : : SerializedDict : <nl> - case APCKind : : SerializedShape : <nl> case APCKind : : SerializedKeyset : <nl> case APCKind : : SerializedObject : <nl> APCString : : Delete ( APCString : : fromHandle ( this ) ) ; <nl> void APCHandle : : deleteShared ( ) { <nl> case APCKind : : SharedArray : <nl> case APCKind : : SharedVec : <nl> case APCKind : : SharedDict : <nl> - case APCKind : : SharedShape : <nl> case APCKind : : SharedKeyset : <nl> APCArray : : Delete ( this ) ; <nl> return ; <nl> void APCHandle : : deleteShared ( ) { <nl> case APCKind : : UncountedArray : <nl> case APCKind : : UncountedVec : <nl> case APCKind : : UncountedDict : <nl> - case APCKind : : UncountedShape : <nl> case APCKind : : UncountedKeyset : <nl> case APCKind : : UncountedString : <nl> assertx ( false ) ; <nl> bool APCHandle : : checkInvariants ( ) const { <nl> case APCKind : : UncountedDict : <nl> assertx ( m_type = = KindOfPersistentDict ) ; <nl> return true ; <nl> - case APCKind : : StaticShape : <nl> - case APCKind : : UncountedShape : <nl> - assertx ( m_type = = KindOfPersistentShape ) ; <nl> - return true ; <nl> case APCKind : : StaticKeyset : <nl> case APCKind : : UncountedKeyset : <nl> assertx ( m_type = = KindOfPersistentKeyset ) ; <nl> bool APCHandle : : checkInvariants ( ) const { <nl> case APCKind : : SharedPackedArray : <nl> case APCKind : : SharedVec : <nl> case APCKind : : SharedDict : <nl> - case APCKind : : SharedShape : <nl> case APCKind : : SharedKeyset : <nl> case APCKind : : SharedObject : <nl> case APCKind : : SharedCollection : <nl> case APCKind : : SerializedArray : <nl> case APCKind : : SerializedVec : <nl> case APCKind : : SerializedDict : <nl> - case APCKind : : SerializedShape : <nl> case APCKind : : SerializedKeyset : <nl> case APCKind : : SerializedObject : <nl> assertx ( m_type = = kInvalidDataType ) ; <nl> mmm a / hphp / runtime / base / apc - handle . h <nl> ppp b / hphp / runtime / base / apc - handle . h <nl> enum class APCKind : uint8_t { <nl> UncountedArray , <nl> UncountedVec , <nl> UncountedDict , <nl> - UncountedShape , <nl> UncountedKeyset , <nl> StaticString , <nl> StaticArray , <nl> StaticVec , <nl> StaticDict , <nl> - StaticShape , <nl> StaticKeyset , <nl> SharedString , SharedArray , <nl> SharedPackedArray , SharedVec , <nl> SharedDict , SharedKeyset , <nl> - SharedShape , <nl> SharedVArray , SharedDArray , <nl> SharedObject , SharedCollection , <nl> SerializedArray , SerializedVec , <nl> - SerializedDict , SerializedShape , <nl> + SerializedDict , <nl> SerializedKeyset , <nl> SerializedObject , <nl> FuncEntity <nl> enum class APCKind : uint8_t { <nl> * UncountedVec APCTypedValue KindOfPersistentVec <nl> * StaticDict APCTypedValue KindOfPersistentDict <nl> * UncountedDict APCTypedValue KindOfPersistentDict <nl> - * StaticShape APCTypedValue KindOfPersistentShape <nl> - * UncountedShape APCTypedValue KindOfPersistentShape <nl> * StaticKeyset APCTypedValue KindOfPersistentKeyset <nl> * UncountedKeyset APCTypedValue KindOfPersistentKeyset <nl> * SharedString APCString kInvalidDataType <nl> enum class APCKind : uint8_t { <nl> * SharedPackedArray APCArray kInvalidDataType <nl> * SharedVec APCArray kInvalidDataType <nl> * SharedDict APCArray kInvalidDataType <nl> - * SharedShape APCArray kInvalidDataType <nl> * SharedKeyset APCArray kInvalidDataType <nl> * SharedDArray APCArray kInvalidDataType <nl> * SharedVArray APCArray kInvalidDataType <nl> struct APCHandle { <nl> static_assert ( APCKind : : UncountedString < APCKind : : UncountedArray & & <nl> APCKind : : UncountedArray < APCKind : : UncountedVec & & <nl> APCKind : : UncountedVec < APCKind : : UncountedDict & & <nl> - APCKind : : UncountedDict < APCKind : : UncountedShape & & <nl> - APCKind : : UncountedShape < APCKind : : UncountedKeyset & & <nl> + APCKind : : UncountedDict < APCKind : : UncountedKeyset & & <nl> static_cast < int > ( APCKind : : UncountedKeyset ) - <nl> - static_cast < int > ( APCKind : : UncountedString ) = = 5 , <nl> + static_cast < int > ( APCKind : : UncountedString ) = = 4 , <nl> " The Uncounted APCKinds must be consecutive , and " <nl> " in the following order so that gcc can optimize " <nl> " this to a range check . " ) ; <nl> struct APCHandle { <nl> m_kind = = APCKind : : UncountedArray | | <nl> m_kind = = APCKind : : UncountedVec | | <nl> m_kind = = APCKind : : UncountedDict | | <nl> - m_kind = = APCKind : : UncountedShape | | <nl> m_kind = = APCKind : : UncountedKeyset ; <nl> } <nl> <nl> mmm a / hphp / runtime / base / apc - local - array . h <nl> ppp b / hphp / runtime / base / apc - local - array . h <nl> struct APCLocalArray final : ArrayData , <nl> static constexpr auto ToKeyset = & ArrayCommon : : ToKeyset ; <nl> static constexpr auto ToVArray = & ArrayCommon : : ToVArray ; <nl> static constexpr auto ToDArray = & ArrayCommon : : ToDArray ; <nl> - static constexpr auto ToShape = & ArrayCommon : : ToShape ; <nl> <nl> public : <nl> using ArrayData : : decRefCount ; <nl> mmm a / hphp / runtime / base / apc - stats . cpp <nl> ppp b / hphp / runtime / base / apc - stats . cpp <nl> size_t getMemSize ( const APCHandle * handle ) { <nl> case APCKind : : StaticArray : <nl> case APCKind : : StaticVec : <nl> case APCKind : : StaticDict : <nl> - case APCKind : : StaticShape : <nl> case APCKind : : StaticKeyset : <nl> return sizeof ( APCHandle ) ; <nl> <nl> size_t getMemSize ( const APCHandle * handle ) { <nl> case APCKind : : SerializedArray : <nl> case APCKind : : SerializedVec : <nl> case APCKind : : SerializedDict : <nl> - case APCKind : : SerializedShape : <nl> case APCKind : : SerializedKeyset : <nl> return getMemSize ( APCString : : fromHandle ( handle ) ) ; <nl> <nl> size_t getMemSize ( const APCHandle * handle ) { <nl> case APCKind : : UncountedDict : <nl> return sizeof ( APCTypedValue ) + <nl> getMemSize ( APCTypedValue : : fromHandle ( handle ) - > getDictData ( ) ) ; <nl> - case APCKind : : UncountedShape : <nl> - return sizeof ( APCTypedValue ) + <nl> - getMemSize ( APCTypedValue : : fromHandle ( handle ) - > getShapeData ( ) ) ; <nl> case APCKind : : UncountedKeyset : <nl> return sizeof ( APCTypedValue ) + <nl> getMemSize ( APCTypedValue : : fromHandle ( handle ) - > getKeysetData ( ) ) ; <nl> size_t getMemSize ( const APCHandle * handle ) { <nl> case APCKind : : SharedPackedArray : <nl> case APCKind : : SharedVec : <nl> case APCKind : : SharedDict : <nl> - case APCKind : : SharedShape : <nl> case APCKind : : SharedKeyset : <nl> case APCKind : : SharedVArray : <nl> case APCKind : : SharedDArray : <nl> size_t getMemSize ( const ArrayData * arr , bool recurse ) { <nl> } <nl> return size ; <nl> } <nl> - case ArrayData : : ArrayKind : : kShapeKind : <nl> case ArrayData : : ArrayKind : : kDictKind : <nl> case ArrayData : : ArrayKind : : kMixedKind : { <nl> auto const mixed = MixedArray : : asMixed ( arr ) ; <nl> APCDetailedStats : : APCDetailedStats ( ) : m_uncounted ( nullptr ) <nl> , m_serArray ( nullptr ) <nl> , m_serVec ( nullptr ) <nl> , m_serDict ( nullptr ) <nl> - , m_serShape ( nullptr ) <nl> , m_serKeyset ( nullptr ) <nl> , m_apcArray ( nullptr ) <nl> , m_apcVec ( nullptr ) <nl> APCDetailedStats : : APCDetailedStats ( ) : m_uncounted ( nullptr ) <nl> m_serArray = ServiceData : : createCounter ( " apc . type_ser_array " ) ; <nl> m_serVec = ServiceData : : createCounter ( " apc . type_ser_vec " ) ; <nl> m_serDict = ServiceData : : createCounter ( " apc . type_ser_dict " ) ; <nl> - m_serShape = ServiceData : : createCounter ( " apc . type_ser_shape " ) ; <nl> m_serKeyset = ServiceData : : createCounter ( " apc . type_ser_keyset " ) ; <nl> m_apcArray = ServiceData : : createCounter ( " apc . type_apc_array " ) ; <nl> m_apcVec = ServiceData : : createCounter ( " apc . type_apc_vec " ) ; <nl> m_apcDict = ServiceData : : createCounter ( " apc . type_apc_dict " ) ; <nl> - m_apcShape = ServiceData : : createCounter ( " apc . type_apc_shape " ) ; <nl> m_apcKeyset = ServiceData : : createCounter ( " apc . type_apc_keyset " ) ; <nl> m_uncArray = ServiceData : : createCounter ( " apc . type_unc_array " ) ; <nl> m_uncVec = ServiceData : : createCounter ( " apc . type_unc_vec " ) ; <nl> m_uncDict = ServiceData : : createCounter ( " apc . type_unc_dict " ) ; <nl> - m_uncShape = ServiceData : : createCounter ( " apc . type_unc_shape " ) ; <nl> m_uncKeyset = ServiceData : : createCounter ( " apc . type_unc_keyset " ) ; <nl> m_serObject = ServiceData : : createCounter ( " apc . type_ser_object " ) ; <nl> m_apcObject = ServiceData : : createCounter ( " apc . type_apc_object " ) ; <nl> const StaticString s_typeUncountedString ( " type_unc_string " ) ; <nl> const StaticString s_typeSerArray ( " type_ser_array " ) ; <nl> const StaticString s_typeSerVec ( " type_ser_vec " ) ; <nl> const StaticString s_typeSerDict ( " type_ser_dict " ) ; <nl> - const StaticString s_typeSerShape ( " type_ser_shape " ) ; <nl> const StaticString s_typeSerKeyset ( " type_ser_keyset " ) ; <nl> const StaticString s_typeAPCArray ( " type_apc_array " ) ; <nl> const StaticString s_typeAPCVec ( " type_apc_vec " ) ; <nl> const StaticString s_typeAPCDict ( " type_apc_dict " ) ; <nl> - const StaticString s_typeAPCShape ( " type_apc_shape " ) ; <nl> const StaticString s_typeAPCKeyset ( " type_apc_keyset " ) ; <nl> const StaticString s_typUncountedArray ( " type_unc_array " ) ; <nl> const StaticString s_typUncountedVec ( " type_unc_vec " ) ; <nl> const StaticString s_typUncountedDict ( " type_unc_dict " ) ; <nl> - const StaticString s_typUncountedShape ( " type_unc_shape " ) ; <nl> const StaticString s_typUncountedKeyset ( " type_unc_keyset " ) ; <nl> const StaticString s_typeSerObject ( " type_ser_object " ) ; <nl> const StaticString s_typeAPCObject ( " type_apc_object " ) ; <nl> std : : string APCDetailedStats : : getStatsInfo ( ) const { <nl> std : : to_string ( m_serVec - > getValue ( ) ) + <nl> " \ nSerialized dict count : " + <nl> std : : to_string ( m_serDict - > getValue ( ) ) + <nl> - " \ nSerialized shape count : " + <nl> - std : : to_string ( m_serShape - > getValue ( ) ) + <nl> " \ nSerialized keyset count : " + <nl> std : : to_string ( m_serKeyset - > getValue ( ) ) + <nl> " \ nAPC array count : " + <nl> std : : string APCDetailedStats : : getStatsInfo ( ) const { <nl> std : : to_string ( m_apcVec - > getValue ( ) ) + <nl> " \ nAPC dict count : " + <nl> std : : to_string ( m_apcDict - > getValue ( ) ) + <nl> - " \ nAPC shape count : " + <nl> - std : : to_string ( m_apcShape - > getValue ( ) ) + <nl> " \ nAPC keyset count : " + <nl> std : : to_string ( m_apcKeyset - > getValue ( ) ) + <nl> " \ nUncounted array count : " + <nl> std : : string APCDetailedStats : : getStatsInfo ( ) const { <nl> std : : to_string ( m_uncVec - > getValue ( ) ) + <nl> " \ nUncounted dict count : " + <nl> std : : to_string ( m_uncDict - > getValue ( ) ) + <nl> - " \ nUncounted shape count : " + <nl> - std : : to_string ( m_uncShape - > getValue ( ) ) + <nl> " \ nUncounted keyset count : " + <nl> std : : to_string ( m_uncKeyset - > getValue ( ) ) + <nl> " \ nSerialized object count : " + <nl> void APCDetailedStats : : collectStats ( <nl> stats . insert ( <nl> std : : pair < const StringData * , int64_t > ( s_typeSerDict . get ( ) , <nl> m_serDict - > getValue ( ) ) ) ; <nl> - stats . insert ( <nl> - std : : pair < const StringData * , int64_t > ( s_typeSerShape . get ( ) , <nl> - m_serShape - > getValue ( ) ) ) ; <nl> stats . insert ( <nl> std : : pair < const StringData * , int64_t > ( s_typeSerKeyset . get ( ) , <nl> m_serKeyset - > getValue ( ) ) ) ; <nl> void APCDetailedStats : : collectStats ( <nl> stats . insert ( <nl> std : : pair < const StringData * , int64_t > ( s_typeAPCDict . get ( ) , <nl> m_apcDict - > getValue ( ) ) ) ; <nl> - stats . insert ( <nl> - std : : pair < const StringData * , int64_t > ( s_typeAPCShape . get ( ) , <nl> - m_apcShape - > getValue ( ) ) ) ; <nl> stats . insert ( <nl> std : : pair < const StringData * , int64_t > ( s_typeAPCKeyset . get ( ) , <nl> m_apcKeyset - > getValue ( ) ) ) ; <nl> void APCDetailedStats : : collectStats ( <nl> stats . insert ( <nl> std : : pair < const StringData * , int64_t > ( s_typUncountedDict . get ( ) , <nl> m_uncDict - > getValue ( ) ) ) ; <nl> - stats . insert ( <nl> - std : : pair < const StringData * , int64_t > ( s_typUncountedShape . get ( ) , <nl> - m_uncShape - > getValue ( ) ) ) ; <nl> stats . insert ( <nl> std : : pair < const StringData * , int64_t > ( s_typUncountedKeyset . get ( ) , <nl> m_uncKeyset - > getValue ( ) ) ) ; <nl> APCDetailedStats : : counterFor ( const APCHandle * handle ) { <nl> case APCKind : : StaticArray : <nl> case APCKind : : StaticVec : <nl> case APCKind : : StaticDict : <nl> - case APCKind : : StaticShape : <nl> case APCKind : : StaticKeyset : <nl> return m_uncounted ; <nl> <nl> APCDetailedStats : : counterFor ( const APCHandle * handle ) { <nl> case APCKind : : UncountedDict : <nl> return m_uncDict ; <nl> <nl> - case APCKind : : UncountedShape : <nl> - return m_uncShape ; <nl> - <nl> case APCKind : : UncountedKeyset : <nl> return m_uncKeyset ; <nl> <nl> APCDetailedStats : : counterFor ( const APCHandle * handle ) { <nl> case APCKind : : SerializedDict : <nl> return m_serDict ; <nl> <nl> - case APCKind : : SerializedShape : <nl> - return m_serShape ; <nl> - <nl> case APCKind : : SerializedKeyset : <nl> return m_serKeyset ; <nl> <nl> APCDetailedStats : : counterFor ( const APCHandle * handle ) { <nl> case APCKind : : SharedDict : <nl> return m_apcDict ; <nl> <nl> - case APCKind : : SharedShape : <nl> - return m_apcShape ; <nl> - <nl> case APCKind : : SharedKeyset : <nl> return m_apcKeyset ; <nl> <nl> mmm a / hphp / runtime / base / apc - stats . h <nl> ppp b / hphp / runtime / base / apc - stats . h <nl> struct APCDetailedStats { <nl> ServiceData : : ExportedCounter * m_serVec ; <nl> / / Number of serialized dicts <nl> ServiceData : : ExportedCounter * m_serDict ; <nl> - / / Number of serialized shapes <nl> - ServiceData : : ExportedCounter * m_serShape ; <nl> / / Number of serialized keysets <nl> ServiceData : : ExportedCounter * m_serKeyset ; <nl> / / Number of APC arrays <nl> struct APCDetailedStats { <nl> ServiceData : : ExportedCounter * m_apcVec ; <nl> / / Number of APC dicts <nl> ServiceData : : ExportedCounter * m_apcDict ; <nl> - / / Number of APC shapes <nl> - ServiceData : : ExportedCounter * m_apcShape ; <nl> / / Number of APC keysets <nl> ServiceData : : ExportedCounter * m_apcKeyset ; <nl> / / Number of uncounted arrays . Uncounted arrays are kind of <nl> struct APCDetailedStats { <nl> / / Number of uncounted dicts . Uncounted dicts are kind of <nl> / / static dicts whose lifetime is controlled by the treadmill <nl> ServiceData : : ExportedCounter * m_uncDict ; <nl> - / / Number of uncounted shapes . Uncounted shapes are kind of <nl> - / / static shapes whose lifetime is controlled by the treadmill <nl> - ServiceData : : ExportedCounter * m_uncShape ; <nl> / / Number of uncounted keysets . Uncounted keysets are kind of <nl> / / static keysets whose lifetime is controlled by the treadmill <nl> ServiceData : : ExportedCounter * m_uncKeyset ; <nl> mmm a / hphp / runtime / base / apc - string . h <nl> ppp b / hphp / runtime / base / apc - string . h <nl> struct APCString { <nl> return MakeSharedString ( APCKind : : SerializedDict , str ) ; <nl> } <nl> <nl> - static APCHandle : : Pair MakeSerializedShape ( StringData * str ) { <nl> - return MakeSharedString ( APCKind : : SerializedShape , str ) ; <nl> - } <nl> - <nl> static APCHandle : : Pair MakeSerializedKeyset ( StringData * str ) { <nl> return MakeSharedString ( APCKind : : SerializedKeyset , str ) ; <nl> } <nl> struct APCString { <nl> handle - > kind ( ) = = APCKind : : SerializedArray | | <nl> handle - > kind ( ) = = APCKind : : SerializedVec | | <nl> handle - > kind ( ) = = APCKind : : SerializedDict | | <nl> - handle - > kind ( ) = = APCKind : : SerializedShape | | <nl> handle - > kind ( ) = = APCKind : : SerializedKeyset | | <nl> handle - > kind ( ) = = APCKind : : SerializedObject ) ; <nl> static_assert ( <nl> struct APCString { <nl> handle - > kind ( ) = = APCKind : : SerializedArray | | <nl> handle - > kind ( ) = = APCKind : : SerializedVec | | <nl> handle - > kind ( ) = = APCKind : : SerializedDict | | <nl> - handle - > kind ( ) = = APCKind : : SerializedShape | | <nl> handle - > kind ( ) = = APCKind : : SerializedKeyset | | <nl> handle - > kind ( ) = = APCKind : : SerializedObject ) ; <nl> static_assert ( <nl> mmm a / hphp / runtime / base / apc - typed - value . cpp <nl> ppp b / hphp / runtime / base / apc - typed - value . cpp <nl> bool APCTypedValue : : checkInvariants ( ) const { <nl> assertx ( m_data . dict - > isDict ( ) ) ; <nl> assertx ( m_data . dict - > isStatic ( ) ) ; <nl> break ; <nl> - case APCKind : : StaticShape : <nl> - assertx ( m_data . shape - > isShape ( ) ) ; <nl> - assertx ( m_data . shape - > isStatic ( ) ) ; <nl> - break ; <nl> case APCKind : : StaticKeyset : <nl> assertx ( m_data . keyset - > isKeyset ( ) ) ; <nl> assertx ( m_data . keyset - > isStatic ( ) ) ; <nl> bool APCTypedValue : : checkInvariants ( ) const { <nl> assertx ( m_data . dict - > isDict ( ) ) ; <nl> assertx ( m_data . dict - > isUncounted ( ) ) ; <nl> break ; <nl> - case APCKind : : UncountedShape : <nl> - assertx ( m_data . shape - > isShape ( ) ) ; <nl> - assertx ( m_data . shape - > isUncounted ( ) ) ; <nl> - break ; <nl> case APCKind : : UncountedKeyset : <nl> assertx ( m_data . keyset - > isKeyset ( ) ) ; <nl> assertx ( m_data . keyset - > isUncounted ( ) ) ; <nl> bool APCTypedValue : : checkInvariants ( ) const { <nl> case APCKind : : SharedCollection : <nl> case APCKind : : SharedVec : <nl> case APCKind : : SharedDict : <nl> - case APCKind : : SharedShape : <nl> case APCKind : : SharedKeyset : <nl> case APCKind : : SharedVArray : <nl> case APCKind : : SharedDArray : <nl> bool APCTypedValue : : checkInvariants ( ) const { <nl> case APCKind : : SerializedObject : <nl> case APCKind : : SerializedVec : <nl> case APCKind : : SerializedDict : <nl> - case APCKind : : SerializedShape : <nl> case APCKind : : SerializedKeyset : <nl> assertx ( false ) ; <nl> break ; <nl> mmm a / hphp / runtime / base / apc - typed - value . h <nl> ppp b / hphp / runtime / base / apc - typed - value . h <nl> struct APCTypedValue { <nl> assertx ( checkInvariants ( ) ) ; <nl> } <nl> <nl> - enum class StaticShape { } ; <nl> - APCTypedValue ( StaticShape , ArrayData * data ) <nl> - : m_handle ( APCKind : : StaticShape , KindOfPersistentShape ) { <nl> - assertx ( data - > isShape ( ) ) ; <nl> - assertx ( data - > isStatic ( ) ) ; <nl> - m_data . shape = data ; <nl> - assertx ( checkInvariants ( ) ) ; <nl> - } <nl> - <nl> - enum class UncountedShape { } ; <nl> - APCTypedValue ( UncountedShape , ArrayData * data ) <nl> - : m_handle ( APCKind : : UncountedShape , KindOfPersistentShape ) { <nl> - assertx ( data - > isShape ( ) ) ; <nl> - assertx ( data - > isUncounted ( ) ) ; <nl> - m_data . shape = data ; <nl> - assertx ( checkInvariants ( ) ) ; <nl> - } <nl> - <nl> enum class StaticKeyset { } ; <nl> APCTypedValue ( StaticKeyset , ArrayData * data ) <nl> : m_handle ( APCKind : : StaticKeyset , KindOfPersistentKeyset ) { <nl> struct APCTypedValue { <nl> return m_data . dict ; <nl> } <nl> <nl> - ArrayData * getShapeData ( ) const { <nl> - assertx ( checkInvariants ( ) ) ; <nl> - assertx ( m_handle . kind ( ) = = APCKind : : StaticShape | | <nl> - m_handle . kind ( ) = = APCKind : : UncountedShape ) ; <nl> - return m_data . shape ; <nl> - } <nl> - <nl> ArrayData * getKeysetData ( ) const { <nl> assertx ( checkInvariants ( ) ) ; <nl> assertx ( m_handle . kind ( ) = = APCKind : : StaticKeyset | | <nl> mmm a / hphp / runtime / base / array - common . cpp <nl> ppp b / hphp / runtime / base / array - common . cpp <nl> ArrayData * ArrayCommon : : ToDArray ( ArrayData * a , bool ) { <nl> return init . create ( ) ; <nl> } <nl> <nl> - ArrayData * ArrayCommon : : ToShape ( ArrayData * a , bool copy ) { <nl> - auto arr = RuntimeOption : : EvalHackArrDVArrs <nl> - ? ArrayCommon : : ToDict ( a , copy ) <nl> - : ArrayCommon : : ToDArray ( a , copy ) ; <nl> - arr = arr - > toShapeInPlaceIfCompatible ( ) ; <nl> - return arr ; <nl> - } <nl> - <nl> ArrayCommon : : RefCheckResult <nl> ArrayCommon : : CheckForRefs ( const ArrayData * ad ) { <nl> auto result = RefCheckResult : : Pass ; <nl> mmm a / hphp / runtime / base / array - common . h <nl> ppp b / hphp / runtime / base / array - common . h <nl> struct ArrayCommon { <nl> static ArrayData * ToVArray ( ArrayData * , bool ) ; <nl> static ArrayData * ToDArray ( ArrayData * , bool ) ; <nl> <nl> - static ArrayData * ToShape ( ArrayData * , bool ) ; <nl> - <nl> enum class RefCheckResult { <nl> Pass , / / No refs <nl> Fail , / / Referenced ref <nl> mmm a / hphp / runtime / base / array - data - defs . h <nl> ppp b / hphp / runtime / base / array - data - defs . h <nl> inline ArrayData * ArrayData : : toPHPArrayIntishCast ( bool copy ) { <nl> return g_array_funcs . toPHPArrayIntishCast [ kind ( ) ] ( this , copy ) ; <nl> } <nl> <nl> - inline ArrayData * ArrayData : : toShape ( bool copy ) { <nl> - return g_array_funcs . toShape [ kind ( ) ] ( this , copy ) ; <nl> - } <nl> - <nl> inline ArrayData * ArrayData : : toDict ( bool copy ) { <nl> return g_array_funcs . toDict [ kind ( ) ] ( this , copy ) ; <nl> } <nl> mmm a / hphp / runtime / base / array - data - inl . h <nl> ppp b / hphp / runtime / base / array - data - inl . h <nl> ALWAYS_INLINE ArrayData * staticEmptyDictArray ( ) { <nl> return static_cast < ArrayData * > ( vp ) ; <nl> } <nl> <nl> - ALWAYS_INLINE ArrayData * staticEmptyShapeArray ( ) { <nl> - void * vp = RuntimeOption : : EvalHackArrDVArrs <nl> - ? & s_theEmptyShapeDict : & s_theEmptyShapeDArray ; <nl> - return static_cast < ArrayData * > ( vp ) ; <nl> - } <nl> - <nl> ALWAYS_INLINE ArrayData * staticEmptyKeysetArray ( ) { <nl> void * vp = & s_theEmptySetArray ; <nl> return static_cast < ArrayData * > ( vp ) ; <nl> ALWAYS_INLINE ArrayData * ArrayData : : CreateDict ( ) { <nl> : staticEmptyDictArray ( ) ; <nl> } <nl> <nl> - ALWAYS_INLINE ArrayData * ArrayData : : CreateShape ( ) { <nl> - return staticEmptyShapeArray ( ) ; <nl> - } <nl> - <nl> ALWAYS_INLINE ArrayData * ArrayData : : CreateKeyset ( ) { <nl> return staticEmptyKeysetArray ( ) ; <nl> } <nl> inline bool ArrayData : : noCopyOnWrite ( ) const { <nl> <nl> inline bool ArrayData : : isPacked ( ) const { return kind ( ) = = kPackedKind ; } <nl> inline bool ArrayData : : isMixed ( ) const { return kind ( ) = = kMixedKind ; } <nl> - <nl> - / * <nl> - * isMixedOrShape checks whether the ArrayData is kMixedKind or a Shape that <nl> - * behaves like a mixed PHP Array . This is important because this check is <nl> - * often used to check that a piece of code is only operating on <nl> - * mixed PHP array - like objects and not dict - like objects . <nl> - * / <nl> - inline bool ArrayData : : isMixedOrShape ( ) const { <nl> - return kind ( ) = = kMixedKind | | <nl> - ( ! RuntimeOption : : EvalHackArrDVArrs & & kind ( ) = = kShapeKind ) ; <nl> - } <nl> inline bool ArrayData : : isApcArray ( ) const { return kind ( ) = = kApcKind ; } <nl> inline bool ArrayData : : isGlobalsArray ( ) const { return kind ( ) = = kGlobalsKind ; } <nl> inline bool ArrayData : : isEmptyArray ( ) const { return kind ( ) = = kEmptyKind ; } <nl> inline bool ArrayData : : isDict ( ) const { return kind ( ) = = kDictKind ; } <nl> <nl> - / * <nl> - * isDictOrShape checks whether the ArrayData is a dict or a Shape that <nl> - * behaves like a dict . This is important because this check is often used <nl> - * to check that a piece of code is only operating on dict - like objects and <nl> - * not array - like objects . <nl> - * / <nl> - inline bool ArrayData : : isDictOrShape ( ) const { <nl> - return kind ( ) = = kDictKind | | <nl> - ( RuntimeOption : : EvalHackArrDVArrs & & kind ( ) = = kShapeKind ) ; <nl> - } <nl> inline bool ArrayData : : isVecArray ( ) const { return kind ( ) = = kVecKind ; } <nl> inline bool ArrayData : : isKeyset ( ) const { return kind ( ) = = kKeysetKind ; } <nl> - inline bool ArrayData : : isShape ( ) const { return kind ( ) = = kShapeKind ; } <nl> inline bool ArrayData : : isRecordArray ( ) const { return kind ( ) = = kRecordKind ; } <nl> <nl> inline bool ArrayData : : hasPackedLayout ( ) const { <nl> return isPacked ( ) | | isVecArray ( ) ; <nl> } <nl> inline bool ArrayData : : hasMixedLayout ( ) const { <nl> - return isMixed ( ) | | isDict ( ) | | isShape ( ) ; <nl> + return isMixed ( ) | | isDict ( ) ; <nl> } <nl> <nl> inline bool ArrayData : : isPHPArray ( ) const { <nl> - return RuntimeOption : : EvalHackArrDVArrs <nl> - ? kind ( ) < kShapeKind <nl> - : kind ( ) < = kShapeKind ; <nl> + return kind ( ) < = kRecordKind ; <nl> } <nl> <nl> inline bool ArrayData : : isHackArray ( ) const { <nl> - return RuntimeOption : : EvalHackArrDVArrs <nl> - ? kind ( ) > = kShapeKind <nl> - : kind ( ) > = kDictKind ; <nl> + return kind ( ) > = kDictKind ; <nl> } <nl> <nl> inline ArrayData : : DVArray ArrayData : : dvArray ( ) const { <nl> inline bool ArrayData : : isDictOrDArray ( ) const { <nl> return RuntimeOption : : EvalHackArrDVArrs ? isDict ( ) : isDArray ( ) ; <nl> } <nl> <nl> - inline bool ArrayData : : isDictOrDArrayOrShape ( ) const { <nl> - return isShape ( ) | | isDictOrDArray ( ) ; <nl> - } <nl> - <nl> / / gcc doesn ' t optimize ( a & 3 ) = = ( b & 3 ) very well ; help it a little . <nl> inline bool ArrayData : : dvArrayEqual ( const ArrayData * a , const ArrayData * b ) { <nl> return ( ( a - > m_aux16 ^ b - > m_aux16 ) & kDVArrayMask ) = = 0 ; <nl> inline bool ArrayData : : dvArraySanityCheck ( ) const { <nl> if ( ! RuntimeOption : : EvalHackArrDVArrs ) { <nl> if ( isPacked ( ) ) return ! ( dv & kDArray ) ; <nl> if ( isMixed ( ) ) return ! ( dv & kVArray ) ; <nl> - if ( isShape ( ) ) return dv = = kDArray ; <nl> } <nl> return dv = = kNotDVArray ; <nl> } <nl> inline bool ArrayData : : useWeakKeys ( ) const { return isPHPArray ( ) ; } <nl> <nl> inline DataType ArrayData : : toDataType ( ) const { <nl> auto const k = kind ( ) ; <nl> - if ( k < kShapeKind ) return KindOfArray ; <nl> + if ( k < kRecordKind ) return KindOfArray ; <nl> if ( k = = kVecKind ) return KindOfVec ; <nl> if ( k = = kDictKind ) return KindOfDict ; <nl> - if ( k = = kShapeKind ) return KindOfShape ; <nl> assertx ( k = = kKeysetKind ) ; <nl> return KindOfKeyset ; <nl> } <nl> <nl> inline DataType ArrayData : : toPersistentDataType ( ) const { <nl> auto const k = kind ( ) ; <nl> - if ( k < kShapeKind ) return KindOfPersistentArray ; <nl> + if ( k < kRecordKind ) return KindOfPersistentArray ; <nl> if ( k = = kVecKind ) return KindOfPersistentVec ; <nl> if ( k = = kDictKind ) return KindOfPersistentDict ; <nl> - if ( k = = kShapeKind ) return KindOfPersistentShape ; <nl> assertx ( k = = kKeysetKind ) ; <nl> return KindOfPersistentKeyset ; <nl> } <nl> mmm a / hphp / runtime / base / array - data . cpp <nl> ppp b / hphp / runtime / base / array - data . cpp <nl> struct ScalarHash { <nl> case KindOfInt64 : <nl> case KindOfDouble : <nl> case KindOfPersistentString : <nl> - case KindOfPersistentShape : <nl> case KindOfPersistentArray : <nl> case KindOfPersistentVec : <nl> case KindOfPersistentDict : <nl> struct ScalarHash { <nl> ret = folly : : hash : : hash_combine ( ret , v . m_data . num ) ; <nl> break ; <nl> case KindOfString : <nl> - case KindOfShape : <nl> case KindOfArray : <nl> case KindOfVec : <nl> case KindOfDict : <nl> struct ScalarHash { <nl> if ( ad1 = = ad2 ) return true ; <nl> if ( ad1 - > size ( ) ! = ad2 - > size ( ) ) return false ; <nl> if ( ! ArrayData : : dvArrayEqual ( ad1 , ad2 ) ) return false ; <nl> - if ( ad1 - > isHackArray ( ) | | ad1 - > isShape ( ) ) { <nl> - if ( ! ad2 - > isHackArray ( ) & & ! ad2 - > isShape ( ) ) return false ; <nl> + if ( ad1 - > isHackArray ( ) ) { <nl> + if ( ! ad2 - > isHackArray ( ) ) return false ; <nl> if ( ad1 - > kind ( ) ! = ad2 - > kind ( ) ) return false ; <nl> - } else if ( ad2 - > isHackArray ( ) | | ad2 - > isShape ( ) ) { <nl> + } else if ( ad2 - > isHackArray ( ) ) { <nl> return false ; <nl> } <nl> <nl> void ArrayData : : GetScalarArray ( ArrayData * * parr ) { <nl> arr - > hasProvenanceData ( ) ; <nl> <nl> if ( arr - > empty ( ) & & LIKELY ( ! provenanceEnabled ) ) { <nl> - if ( arr - > isShape ( ) ) return replace ( staticEmptyShapeArray ( ) ) ; <nl> if ( arr - > isKeyset ( ) ) return replace ( staticEmptyKeysetArray ( ) ) ; <nl> if ( arr - > isVArray ( ) ) return replace ( staticEmptyVArray ( ) ) ; <nl> if ( arr - > isDArray ( ) ) return replace ( staticEmptyDArray ( ) ) ; <nl> static_assert ( ArrayFunctions : : NK = = ArrayData : : ArrayKind : : kNumKinds , <nl> APCLocalArray : : entry , \ <nl> GlobalsArray : : entry , \ <nl> RecordArray : : entry , \ <nl> - MixedArray : : entry # # Shape , / * Shape * / \ <nl> MixedArray : : entry # # Dict , / * Dict * / \ <nl> PackedArray : : entry # # Vec , / * Vec * / \ <nl> SetArray : : entry , / * Keyset * / \ <nl> const ArrayFunctions g_array_funcs = { <nl> DISPATCH ( ToPHPArray ) <nl> DISPATCH ( ToPHPArrayIntishCast ) <nl> <nl> - / * <nl> - * ArrayData * ToShape ( ArrayData * , bool ) <nl> - * <nl> - * Convert to a shape . If already a shape , it will be returned unchange <nl> - * ( without copying ) . If copy is false , it may be converted in place . If the <nl> - * input array contains references , an exception will be thrown . <nl> - * / <nl> - DISPATCH ( ToShape ) <nl> - <nl> / * <nl> * ArrayData * ToDict ( ArrayData * , bool ) <nl> * <nl> ArrayData * ArrayData : : Create ( TypedValue name , TypedValue value ) { <nl> return init . create ( ) ; <nl> } <nl> <nl> - / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - <nl> - ArrayData * ArrayData : : toShapeInPlaceIfCompatible ( ) { <nl> - if ( size ( ) = = 0 & & isStatic ( ) ) { <nl> - return ArrayData : : CreateShape ( ) ; <nl> - } <nl> - assertx ( ( RuntimeOption : : EvalHackArrDVArrs & & isDict ( ) ) | | <nl> - ( ! RuntimeOption : : EvalHackArrDVArrs & & isMixed ( ) & & isDArray ( ) ) ) ; <nl> - if ( ! isRefCounted ( ) ) { <nl> - auto ad = MixedArray : : Copy ( this ) ; <nl> - ad - > m_kind = HeaderKind : : Shape ; <nl> - return ad ; <nl> - } <nl> - assertx ( ! cowCheck ( ) ) ; <nl> - m_kind = HeaderKind : : Shape ; <nl> - return this ; <nl> - } <nl> - <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> / / reads <nl> <nl> int ArrayData : : compare ( const ArrayData * v2 ) const { <nl> raiseHackArrCompatArrHackArrCmp ( ) ; <nl> } <nl> if ( v2 - > isVecArray ( ) ) throw_vec_compare_exception ( ) ; <nl> - if ( v2 - > isDictOrShape ( ) ) throw_dict_compare_exception ( ) ; <nl> + if ( v2 - > isDict ( ) ) throw_dict_compare_exception ( ) ; <nl> if ( v2 - > isKeyset ( ) ) throw_keyset_compare_exception ( ) ; <nl> not_reached ( ) ; <nl> } <nl> bool ArrayData : : equal ( const ArrayData * v2 , bool strict ) const { <nl> return false ; <nl> } ; <nl> <nl> - if ( isShape ( ) ) { <nl> - if ( UNLIKELY ( ! v2 - > isDictOrDArrayOrShape ( ) ) ) return mixed ( ) ; <nl> - return strict <nl> - ? MixedArray : : ShapeSame ( this , v2 ) : MixedArray : : ShapeEqual ( this , v2 ) ; <nl> - } <nl> - <nl> - if ( v2 - > isShape ( ) ) { <nl> - if ( UNLIKELY ( ! isDictOrDArrayOrShape ( ) ) ) return mixed ( ) ; <nl> - return strict <nl> - ? MixedArray : : ShapeSame ( this , v2 ) : MixedArray : : ShapeEqual ( this , v2 ) ; <nl> - } <nl> - <nl> if ( isPHPArray ( ) ) { <nl> if ( UNLIKELY ( ! v2 - > isPHPArray ( ) ) ) return mixed ( ) ; <nl> return strict ? Same ( this , v2 ) : Equal ( this , v2 ) ; <nl> tv_rval ArrayData : : getNotFound ( const StringData * k , bool error ) const { <nl> } <nl> <nl> const char * ArrayData : : kindToString ( ArrayKind kind ) { <nl> - std : : array < const char * , 10 > names = { { <nl> + std : : array < const char * , 9 > names = { { <nl> " PackedKind " , <nl> " MixedKind " , <nl> " EmptyKind " , <nl> " ApcKind " , <nl> " GlobalsKind " , <nl> " RecordKind " , <nl> - " ShapeKind " , <nl> " DictKind " , <nl> " VecKind " , <nl> " KeysetKind " <nl> std : : string describeKeyType ( const TypedValue * tv ) { <nl> case KindOfDict : return " dict " ; <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : return " keyset " ; <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> - return RuntimeOption : : EvalHackArrDVArrs ? " dict " : " array " ; <nl> case KindOfPersistentArray : <nl> case KindOfArray : return " array " ; <nl> case KindOfResource : <nl> std : : string describeKeyValue ( TypedValue tv ) { <nl> case KindOfDict : <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> case KindOfResource : <nl> mmm a / hphp / runtime / base / array - data . h <nl> ppp b / hphp / runtime / base / array - data . h <nl> struct ArrayData : MaybeCountable { <nl> kApcKind = 3 , / / APCLocalArray <nl> kGlobalsKind = 4 , / / GlobalsArray <nl> kRecordKind = 5 , / / RecordArray <nl> - kShapeKind = 6 , / / Shape <nl> - kDictKind = 7 , / / Hack dict <nl> - kVecKind = 8 , / / Hack vec <nl> - kKeysetKind = 9 , / / Hack keyset <nl> - kNumKinds = 10 / / insert new values before kNumKinds . <nl> + kDictKind = 6 , / / Hack dict <nl> + kVecKind = 7 , / / Hack vec <nl> + kKeysetKind = 8 , / / Hack keyset <nl> + kNumKinds = 9 / / insert new values before kNumKinds . <nl> } ; <nl> <nl> / * <nl> struct ArrayData : MaybeCountable { <nl> static ArrayData * Create ( ) ; <nl> static ArrayData * CreateVec ( ) ; <nl> static ArrayData * CreateDict ( ) ; <nl> - static ArrayData * CreateShape ( ) ; <nl> static ArrayData * CreateKeyset ( ) ; <nl> static ArrayData * CreateVArray ( ) ; <nl> static ArrayData * CreateDArray ( ) ; <nl> struct ArrayData : MaybeCountable { <nl> * / <nl> ArrayData * toPHPArray ( bool copy ) ; <nl> ArrayData * toPHPArrayIntishCast ( bool copy ) ; <nl> - ArrayData * toShape ( bool copy ) ; <nl> ArrayData * toDict ( bool copy ) ; <nl> ArrayData * toVec ( bool copy ) ; <nl> ArrayData * toKeyset ( bool copy ) ; <nl> ArrayData * toVArray ( bool copy ) ; <nl> ArrayData * toDArray ( bool copy ) ; <nl> <nl> - / * <nl> - * Converts this to a Shape in place if this is a compatible type with Shapes . <nl> - * If this is not refCounted it will instead make a copy before converting <nl> - * to a Shape . <nl> - * <nl> - * Dicts are compatible when RuntimeOption : : EvalHackArrDVArrs is set and <nl> - * DArrays are compatible when it is not set . Empty arrays are always <nl> - * compatible . No other types are compatible . <nl> - * <nl> - * This function will return ArrayData : : CreateShape when this is empty and it <nl> - * will return this otherwise . <nl> - * / <nl> - ArrayData * toShapeInPlaceIfCompatible ( ) ; <nl> - <nl> / * <nl> * Return an array with identical contents to this array , but of an array <nl> * kind which can handle all array operations . <nl> struct ArrayData : MaybeCountable { <nl> * / <nl> bool isPacked ( ) const ; <nl> bool isMixed ( ) const ; <nl> - bool isMixedOrShape ( ) const ; <nl> bool isApcArray ( ) const ; <nl> bool isGlobalsArray ( ) const ; <nl> bool isEmptyArray ( ) const ; <nl> bool isDict ( ) const ; <nl> - bool isDictOrShape ( ) const ; <nl> - bool isShape ( ) const ; <nl> bool isVecArray ( ) const ; <nl> bool isKeyset ( ) const ; <nl> bool isRecordArray ( ) const ; <nl> struct ArrayData : MaybeCountable { <nl> bool isNotDVArray ( ) const ; <nl> bool isVecOrVArray ( ) const ; <nl> bool isDictOrDArray ( ) const ; <nl> - bool isDictOrDArrayOrShape ( ) const ; <nl> <nl> static bool dvArrayEqual ( const ArrayData * a , const ArrayData * b ) ; <nl> <nl> static_assert ( ArrayData : : kMixedKind = = uint8_t ( HeaderKind : : Mixed ) , " " ) ; <nl> static_assert ( ArrayData : : kEmptyKind = = uint8_t ( HeaderKind : : Empty ) , " " ) ; <nl> static_assert ( ArrayData : : kApcKind = = uint8_t ( HeaderKind : : Apc ) , " " ) ; <nl> static_assert ( ArrayData : : kGlobalsKind = = uint8_t ( HeaderKind : : Globals ) , " " ) ; <nl> - static_assert ( ArrayData : : kShapeKind = = uint8_t ( HeaderKind : : Shape ) , " " ) ; <nl> static_assert ( ArrayData : : kDictKind = = uint8_t ( HeaderKind : : Dict ) , " " ) ; <nl> static_assert ( ArrayData : : kVecKind = = uint8_t ( HeaderKind : : VecArray ) , " " ) ; <nl> static_assert ( ArrayData : : kRecordKind = = uint8_t ( HeaderKind : : RecordArray ) , " " ) ; <nl> extern std : : aligned_storage < sizeof ( ArrayData ) , 16 > : : type s_theEmptyVecArray ; <nl> extern std : : aligned_storage < sizeof ( ArrayData ) , 16 > : : type s_theEmptyVArray ; <nl> extern std : : aligned_storage < kEmptyMixedArraySize , 16 > : : type s_theEmptyDictArray ; <nl> extern std : : aligned_storage < kEmptyMixedArraySize , 16 > : : type s_theEmptyDArray ; <nl> - extern std : : aligned_storage < kEmptyMixedArraySize , 16 > : : type <nl> - s_theEmptyShapeDArray ; <nl> - extern std : : aligned_storage < kEmptyMixedArraySize , 16 > : : type s_theEmptyShapeDict ; <nl> extern std : : aligned_storage < kEmptySetArraySize , 16 > : : type s_theEmptySetArray ; <nl> <nl> / * <nl> ArrayData * staticEmptyVArray ( ) ; <nl> ArrayData * staticEmptyDArray ( ) ; <nl> ArrayData * staticEmptyVecArray ( ) ; <nl> ArrayData * staticEmptyDictArray ( ) ; <nl> - ArrayData * staticEmptyShapeArray ( ) ; <nl> ArrayData * staticEmptyKeysetArray ( ) ; <nl> <nl> / * <nl> struct ArrayFunctions { <nl> ArrayData * ( * escalate [ NK ] ) ( const ArrayData * ) ; <nl> ArrayData * ( * toPHPArray [ NK ] ) ( ArrayData * , bool ) ; <nl> ArrayData * ( * toPHPArrayIntishCast [ NK ] ) ( ArrayData * , bool ) ; <nl> - ArrayData * ( * toShape [ NK ] ) ( ArrayData * , bool ) ; <nl> ArrayData * ( * toDict [ NK ] ) ( ArrayData * , bool ) ; <nl> ArrayData * ( * toVec [ NK ] ) ( ArrayData * , bool ) ; <nl> ArrayData * ( * toKeyset [ NK ] ) ( ArrayData * , bool ) ; <nl> mmm a / hphp / runtime / base / builtin - functions . h <nl> ppp b / hphp / runtime / base / builtin - functions . h <nl> inline bool is_array ( const Cell * c ) { <nl> } <nl> return false ; <nl> } <nl> - return tvIsArrayOrShape ( c ) ; <nl> + return tvIsArray ( c ) ; <nl> } <nl> <nl> inline bool is_vec ( const Cell * c ) { <nl> inline bool is_vec ( const Cell * c ) { <nl> <nl> inline bool is_dict ( const Cell * c ) { <nl> assertx ( cellIsPlausible ( * c ) ) ; <nl> - return tvIsDictOrShape ( c ) ; <nl> + return tvIsDict ( c ) ; <nl> } <nl> <nl> inline bool is_keyset ( const Cell * c ) { <nl> inline bool is_varray ( const Cell * c ) { <nl> <nl> inline bool is_darray ( const Cell * c ) { <nl> return RuntimeOption : : EvalHackArrDVArrs <nl> - ? tvIsDictOrShape ( c ) <nl> + ? tvIsDict ( c ) <nl> : ( tvIsArray ( c ) & & c - > m_data . parr - > isDArray ( ) ) ; <nl> } <nl> <nl> mmm a / hphp / runtime / base / collections . cpp <nl> ppp b / hphp / runtime / base / collections . cpp <nl> ArrayData * deepCopyVecArray ( ArrayData * arr ) { <nl> } <nl> <nl> ArrayData * deepCopyDict ( ArrayData * arr ) { <nl> - assertx ( arr - > isDictOrShape ( ) ) ; <nl> + assertx ( arr - > isDict ( ) ) ; <nl> Array ar ( arr ) ; <nl> MixedArray : : IterateKV ( <nl> MixedArray : : asMixed ( arr ) , <nl> void deepCopy ( tv_lval lval ) { <nl> return ; <nl> } <nl> <nl> - case KindOfShape : { <nl> - auto & original = val ( lval ) . parr ; <nl> - auto arr = RuntimeOption : : EvalHackArrDVArrs ? <nl> - deepCopyDict ( original ) : deepCopyArray ( original ) ; <nl> - decRefArr ( original ) ; <nl> - original = arr ; <nl> - return ; <nl> - } <nl> - <nl> case KindOfDict : { <nl> auto & original = val ( lval ) . parr ; <nl> auto arr = deepCopyDict ( original ) ; <nl> mmm a / hphp / runtime / base / concurrent - shared - store . cpp <nl> ppp b / hphp / runtime / base / concurrent - shared - store . cpp <nl> EntryInfo : : Type EntryInfo : : getAPCType ( const APCHandle * handle ) { <nl> case APCKind : : StaticArray : <nl> case APCKind : : StaticVec : <nl> case APCKind : : StaticDict : <nl> - case APCKind : : StaticShape : <nl> case APCKind : : StaticKeyset : <nl> return EntryInfo : : Type : : Uncounted ; <nl> case APCKind : : UncountedString : <nl> EntryInfo : : Type EntryInfo : : getAPCType ( const APCHandle * handle ) { <nl> return EntryInfo : : Type : : UncountedVec ; <nl> case APCKind : : UncountedDict : <nl> return EntryInfo : : Type : : UncountedDict ; <nl> - case APCKind : : UncountedShape : <nl> - return EntryInfo : : Type : : UncountedShape ; <nl> case APCKind : : UncountedKeyset : <nl> return EntryInfo : : Type : : UncountedKeyset ; <nl> case APCKind : : SerializedArray : <nl> EntryInfo : : Type EntryInfo : : getAPCType ( const APCHandle * handle ) { <nl> return EntryInfo : : Type : : SerializedVec ; <nl> case APCKind : : SerializedDict : <nl> return EntryInfo : : Type : : SerializedDict ; <nl> - case APCKind : : SerializedShape : <nl> - return EntryInfo : : Type : : SerializedShape ; <nl> case APCKind : : SerializedKeyset : <nl> return EntryInfo : : Type : : SerializedKeyset ; <nl> case APCKind : : SharedVec : <nl> return EntryInfo : : Type : : APCVec ; <nl> case APCKind : : SharedDict : <nl> return EntryInfo : : Type : : APCDict ; <nl> - case APCKind : : SharedShape : <nl> - return EntryInfo : : Type : : APCShape ; <nl> case APCKind : : SharedKeyset : <nl> return EntryInfo : : Type : : APCKeyset ; <nl> case APCKind : : SharedArray : <nl> struct HotCache { <nl> return HotValue { APCTypedValue : : fromHandle ( h ) - > getVecData ( ) } ; <nl> case APCKind : : UncountedDict : <nl> return HotValue { APCTypedValue : : fromHandle ( h ) - > getDictData ( ) } ; <nl> - case APCKind : : UncountedShape : <nl> - return HotValue { APCTypedValue : : fromHandle ( h ) - > getShapeData ( ) } ; <nl> case APCKind : : UncountedKeyset : <nl> return HotValue { APCTypedValue : : fromHandle ( h ) - > getKeysetData ( ) } ; <nl> default : <nl> mmm a / hphp / runtime / base / concurrent - shared - store . h <nl> ppp b / hphp / runtime / base / concurrent - shared - store . h <nl> struct EntryInfo { <nl> SerializedObject , <nl> UncountedVec , <nl> UncountedDict , <nl> - UncountedShape , <nl> UncountedKeyset , <nl> SerializedVec , <nl> SerializedDict , <nl> - SerializedShape , <nl> SerializedKeyset , <nl> APCVec , <nl> APCDict , <nl> - APCShape , <nl> APCKeyset , <nl> } ; <nl> <nl> mmm a / hphp / runtime / base / datatype - profiler . cpp <nl> ppp b / hphp / runtime / base / datatype - profiler . cpp <nl> DataTypeProfiler : : DataTypeProfiler ( std : : string name ) <nl> , m_vec ( name + " = KindOfVec " ) <nl> , m_persistent_dict ( name + " = KindOfPersistentDict " ) <nl> , m_dict ( name + " = KindOfDict " ) <nl> - , m_persistent_shape ( name + " = KindOfPersistentShape " ) <nl> - , m_shape ( name + " = KindOfShape " ) <nl> , m_persistent_keyset ( name + " = KindOfPersistentKeyset " ) <nl> , m_keyset ( name + " = KindOfKeyset " ) <nl> , m_object ( name + " = KindOfObject " ) <nl> DataType DataTypeProfiler : : operator ( ) ( DataType type ) { <nl> case KindOfDict : m_dict . count ( ) ; break ; <nl> case KindOfPersistentKeyset : m_persistent_keyset . count ( ) ; break ; <nl> case KindOfKeyset : m_keyset . count ( ) ; break ; <nl> - case KindOfPersistentShape : m_persistent_shape . count ( ) ; break ; <nl> - case KindOfShape : m_shape . count ( ) ; break ; <nl> case KindOfPersistentArray : m_persistent_array . count ( ) ; break ; <nl> case KindOfArray : m_array . count ( ) ; break ; <nl> case KindOfObject : m_object . count ( ) ; break ; <nl> DataTypeProfiler : : ~ DataTypeProfiler ( ) { <nl> m_vec . hits ( ) + <nl> m_persistent_dict . hits ( ) + <nl> m_dict . hits ( ) + <nl> - m_persistent_shape . hits ( ) + <nl> - m_shape . hits ( ) + <nl> m_persistent_keyset . hits ( ) + <nl> m_keyset . hits ( ) + <nl> m_persistent_array . hits ( ) + <nl> DataTypeProfiler : : ~ DataTypeProfiler ( ) { <nl> " KindOfVec = % . 1f % % " <nl> " KindOfPersistentDict = % . 1f % % " <nl> " KindOfDict = % . 1f % % " <nl> - " KindOfPersistentShape = % . 1f % % " <nl> - " KindOfShape = % . 1f % % " <nl> " KindOfPersistentKeyset = % . 1f % % " <nl> " KindOfKeyset = % . 1f % % " <nl> " KindOfObject = % . 1f % % " <nl> DataTypeProfiler : : ~ DataTypeProfiler ( ) { <nl> 100 . 0 * m_vec . hits ( ) / total , <nl> 100 . 0 * m_persistent_dict . hits ( ) / total , <nl> 100 . 0 * m_dict . hits ( ) / total , <nl> - 100 . 0 * m_persistent_shape . hits ( ) / total , <nl> - 100 . 0 * m_shape . hits ( ) / total , <nl> 100 . 0 * m_persistent_keyset . hits ( ) / total , <nl> 100 . 0 * m_keyset . hits ( ) / total , <nl> 100 . 0 * m_object . hits ( ) / total , <nl> mmm a / hphp / runtime / base / datatype - profiler . h <nl> ppp b / hphp / runtime / base / datatype - profiler . h <nl> struct DataTypeProfiler { <nl> m_persistent_array , m_array , <nl> m_persistent_vec , m_vec , <nl> m_persistent_dict , m_dict , <nl> - m_persistent_shape , m_shape , <nl> m_persistent_keyset , m_keyset , <nl> m_object , m_resource , m_ref , m_func , m_class , m_clsmeth , <nl> m_record ; <nl> mmm a / hphp / runtime / base / datatype . cpp <nl> ppp b / hphp / runtime / base / datatype . cpp <nl> MaybeDataType get_datatype ( <nl> return KindOfObject ; <nl> } <nl> <nl> - bool isArrayOrShapeType ( DataType t ) { <nl> - return isArrayType ( t ) | | <nl> - ( ! RuntimeOption : : EvalHackArrDVArrs & & isShapeType ( t ) ) ; <nl> - } <nl> - bool isArrayOrShapeType ( MaybeDataType t ) { <nl> - return t & & isArrayOrShapeType ( * t ) ; <nl> - } <nl> - <nl> bool isVecOrArrayType ( DataType t ) { <nl> return RuntimeOption : : EvalHackArrDVArrs ? isVecType ( t ) : isArrayType ( t ) ; <nl> } <nl> bool isDictOrArrayType ( DataType t ) { <nl> return RuntimeOption : : EvalHackArrDVArrs ? isDictType ( t ) : isArrayType ( t ) ; <nl> } <nl> <nl> - bool isDictOrShapeType ( DataType t ) { <nl> - return isDictType ( t ) | | <nl> - ( RuntimeOption : : EvalHackArrDVArrs & & isShapeType ( t ) ) ; <nl> - } <nl> - bool isDictOrShapeType ( MaybeDataType t ) { <nl> - return t & & isDictOrShapeType ( * t ) ; <nl> - } <nl> - <nl> bool equivDataTypes ( DataType t1 , DataType t2 ) { <nl> - return sameDataTypes ( t1 , t2 ) | | <nl> - ( RuntimeOption : : EvalHackArrDVArrs ? <nl> - ( ( isShapeType ( t1 ) & & isDictType ( t2 ) ) | | <nl> - ( isDictType ( t1 ) & & isShapeType ( t2 ) ) ) : <nl> - ( ( isShapeType ( t1 ) & & isArrayType ( t2 ) ) | | <nl> - ( isArrayType ( t1 ) & & isShapeType ( t2 ) ) ) ) ; <nl> + return sameDataTypes ( t1 , t2 ) ; <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> mmm a / hphp / runtime / base / datatype . h <nl> ppp b / hphp / runtime / base / datatype . h <nl> namespace HPHP { <nl> * - Audit jit : : emitTypeTest ( ) . <nl> * / <nl> # define DATATYPES \ <nl> - DT ( PersistentArray , - 14 ) \ <nl> - DT ( Array , - 13 ) \ <nl> - DT ( PersistentShape , - 12 ) \ <nl> - DT ( Shape , - 11 ) \ <nl> + DT ( PersistentArray , - 12 ) \ <nl> + DT ( Array , - 11 ) \ <nl> DT ( PersistentKeyset , - 10 ) \ <nl> DT ( Keyset , - 9 ) \ <nl> DT ( PersistentDict , - 8 ) \ <nl> inline bool isDictType ( MaybeDataType t ) { <nl> return t & & isDictType ( * t ) ; <nl> } <nl> <nl> - constexpr bool isShapeType ( DataType t ) { <nl> - return <nl> - static_cast < DataType > ( dt_t ( t ) & ~ kRefCountedBit ) = = KindOfPersistentShape ; <nl> - } <nl> - inline bool isShapeType ( MaybeDataType t ) { <nl> - return t & & isShapeType ( * t ) ; <nl> - } <nl> - <nl> - / * <nl> - * isArrayOrShapeType checks whether DataType is an Array or a Shape that <nl> - * behaves like an Array . This is important because this check is often used <nl> - * to check that a piece of code is only operating on array - like objects and <nl> - * not dict - like objects . <nl> - * / <nl> - bool isArrayOrShapeType ( DataType ) ; <nl> - bool isArrayOrShapeType ( MaybeDataType ) ; <nl> - <nl> / * <nl> * Based on EvalHackArrDVArrs checks whether t is vec / dict or array <nl> * / <nl> bool isVecOrArrayType ( DataType t ) ; <nl> bool isDictOrArrayType ( DataType t ) ; <nl> - / * <nl> - * isDictOrShapeType checks whether DataType is a Dict or a Shape that <nl> - * behaves like a Dict . This is important because this check is often used <nl> - * to check that a piece of code is only operating on dict - like objects and <nl> - * not array - like objects . <nl> - * / <nl> - bool isDictOrShapeType ( DataType ) ; <nl> - bool isDictOrShapeType ( MaybeDataType ) ; <nl> <nl> constexpr bool isKeysetType ( DataType t ) { <nl> return <nl> constexpr bool sameDataTypes ( DataType t1 , DataType t2 ) { <nl> * Return whether two DataTypes for primitive types are " equivalent " as far as <nl> * user - visible PHP types are concerned ( i . e . ignoring different types of <nl> * strings , arrays , and Hack arrays ) . Note that KindOfUninit and KindOfNull are <nl> - * not considered equivalent . This function differs from sameDataTypes because <nl> - * it considers Shapes to be equivalent to Dicts / Arrays depending on <nl> - * RuntimeOption : : EvalHackArrDVArrs . A good rule of thumb : equivDataTypes <nl> + * not considered equivalent . A good rule of thumb : equivDataTypes <nl> * should be preferred at runtime and sameDataTypes should be preferred at <nl> * compile time . <nl> * / <nl> bool operator > = ( DataType , DataType ) = delete ; <nl> case KindOfInt64 : \ <nl> case KindOfDouble : \ <nl> case KindOfPersistentString : \ <nl> - case KindOfPersistentShape : \ <nl> case KindOfPersistentArray : \ <nl> case KindOfPersistentVec : \ <nl> case KindOfPersistentDict : \ <nl> mmm a / hphp / runtime / base / empty - array . cpp <nl> ppp b / hphp / runtime / base / empty - array . cpp <nl> ArrayData * EmptyArray : : ToDict ( ArrayData * , bool ) { <nl> return ArrayData : : CreateDict ( ) ; <nl> } <nl> <nl> - ArrayData * EmptyArray : : ToShape ( ArrayData * , bool ) { <nl> - return ArrayData : : CreateShape ( ) ; <nl> - } <nl> - <nl> ArrayData * EmptyArray : : ToVec ( ArrayData * , bool ) { <nl> return ArrayData : : CreateVec ( ) ; <nl> } <nl> mmm a / hphp / runtime / base / empty - array . h <nl> ppp b / hphp / runtime / base / empty - array . h <nl> struct EmptyArray final : type_scan : : MarkCollectable < EmptyArray > { <nl> return ArrayData : : CreateDArray ( ) ; <nl> } <nl> static ArrayData * ToDict ( ArrayData * , bool ) ; <nl> - static ArrayData * ToShape ( ArrayData * , bool ) ; <nl> static ArrayData * ToVec ( ArrayData * , bool ) ; <nl> static ArrayData * ToKeyset ( ArrayData * , bool ) ; <nl> static void Renumber ( ArrayData * ) { } <nl> mmm a / hphp / runtime / base / header - kind . h <nl> ppp b / hphp / runtime / base / header - kind . h <nl> enum class HeaderKind : uint8_t { <nl> Packed , Mixed , Empty , Apc , Globals , <nl> / / Record array <nl> RecordArray , <nl> - / / Shape <nl> - Shape , <nl> / / Hack arrays <nl> Dict , VecArray , Keyset , <nl> / / Other ordinary refcounted heap objects <nl> mmm a / hphp / runtime / base / heap - collect . cpp <nl> ppp b / hphp / runtime / base / heap - collect . cpp <nl> DEBUG_ONLY bool checkEnqueuedKind ( const HeapObject * h ) { <nl> case HeaderKind : : Packed : <nl> case HeaderKind : : Mixed : <nl> case HeaderKind : : Dict : <nl> - case HeaderKind : : Shape : <nl> case HeaderKind : : VecArray : <nl> case HeaderKind : : Keyset : <nl> case HeaderKind : : Empty : <nl> mmm a / hphp / runtime / base / heap - report . cpp <nl> ppp b / hphp / runtime / base / heap - report . cpp <nl> DEBUG_ONLY std : : string describe ( const HeapGraph & g , int n ) { <nl> case HeaderKind : : Packed : <nl> case HeaderKind : : Mixed : <nl> case HeaderKind : : Dict : <nl> - case HeaderKind : : Shape : <nl> case HeaderKind : : Empty : <nl> case HeaderKind : : VecArray : <nl> case HeaderKind : : Keyset : <nl> mmm a / hphp / runtime / base / heap - scan . h <nl> ppp b / hphp / runtime / base / heap - scan . h <nl> inline void scanHeapObject ( const HeapObject * h , type_scan : : Scanner & scanner ) { <nl> return PackedArray : : scan ( static_cast < const ArrayData * > ( h ) , scanner ) ; <nl> case HeaderKind : : Mixed : <nl> case HeaderKind : : Dict : <nl> - case HeaderKind : : Shape : <nl> return static_cast < const MixedArray * > ( h ) - > scan ( scanner ) ; <nl> case HeaderKind : : Keyset : <nl> return static_cast < const SetArray * > ( h ) - > scan ( scanner ) ; <nl> mmm a / hphp / runtime / base / memory - manager - defs . h <nl> ppp b / hphp / runtime / base / memory - manager - defs . h <nl> inline size_t allocSize ( const HeapObject * h ) { <nl> 0 , / * APCLocalArray * / <nl> sizeClass < GlobalsArray > ( ) , <nl> 0 , / * RecordArray * / <nl> - 0 , / * Shape * / <nl> 0 , / * Dict * / <nl> 0 , / * VecArray * / <nl> 0 , / * KeySet * / <nl> inline size_t allocSize ( const HeapObject * h ) { <nl> assertx ( size = = MemoryManager : : sizeClass ( size ) ) ; <nl> return size ; <nl> case HeaderKind : : Mixed : <nl> - case HeaderKind : : Shape : <nl> case HeaderKind : : Dict : <nl> / / size = fn of h - > m_scale <nl> size = static_cast < const MixedArray * > ( h ) - > heapSize ( ) ; <nl> mmm a / hphp / runtime / base / memory - manager . cpp <nl> ppp b / hphp / runtime / base / memory - manager . cpp <nl> void MemoryManager : : flush ( ) { <nl> <nl> const std : : array < char * , NumHeaderKinds > header_names = { { <nl> " PackedArray " , " MixedArray " , " EmptyArray " , " ApcArray " , " GlobalsArray " , <nl> - " RecordArray " , " ShapeArray " , " DictArray " , " VecArray " , " KeysetArray " , <nl> + " RecordArray " , " DictArray " , " VecArray " , " KeysetArray " , <nl> " String " , " Resource " , " Ref " , " ClsMeth " , " Record " , <nl> " Object " , " NativeObject " , " WaitHandle " , " AsyncFuncWH " , " AwaitAllWH " , <nl> " Closure " , " Vector " , " Map " , " Set " , " Pair " , " ImmVector " , " ImmMap " , " ImmSet " , <nl> void MemoryManager : : checkHeap ( const char * phase ) { <nl> break ; <nl> case HeaderKind : : Packed : <nl> case HeaderKind : : Mixed : <nl> - case HeaderKind : : Shape : <nl> case HeaderKind : : Dict : <nl> case HeaderKind : : Empty : <nl> case HeaderKind : : VecArray : <nl> mmm a / hphp / runtime / base / mixed - array - defs . h <nl> ppp b / hphp / runtime / base / mixed - array - defs . h <nl> void ConvertTvToUncounted ( <nl> break ; <nl> } <nl> <nl> - case KindOfShape : <nl> - case KindOfPersistentShape : { <nl> - auto & ad = data . parr ; <nl> - assertx ( ad - > isShape ( ) ) ; <nl> - if ( handlePersistent ( ad ) ) break ; <nl> - if ( ad - > empty ( ) ) { <nl> - ad = ArrayData : : CreateShape ( ) ; <nl> - } else { <nl> - ad = MixedArray : : MakeUncounted ( ad , false , seen ) ; <nl> - } <nl> - break ; <nl> - } <nl> - <nl> case KindOfArray : <nl> type = KindOfPersistentArray ; <nl> / / Fall - through . <nl> mmm a / hphp / runtime / base / mixed - array . cpp <nl> ppp b / hphp / runtime / base / mixed - array . cpp <nl> static_assert ( MixedArray : : computeAllocBytes ( MixedArray : : SmallScale ) = = <nl> <nl> std : : aligned_storage < kEmptyMixedArraySize , 16 > : : type s_theEmptyDictArray ; <nl> std : : aligned_storage < kEmptyMixedArraySize , 16 > : : type s_theEmptyDArray ; <nl> - std : : aligned_storage < kEmptyMixedArraySize , 16 > : : type s_theEmptyShapeDArray ; <nl> - std : : aligned_storage < kEmptyMixedArraySize , 16 > : : type s_theEmptyShapeDict ; <nl> <nl> struct MixedArray : : Initializer { <nl> Initializer ( ) { <nl> struct MixedArray : : DArrayInitializer { <nl> } ; <nl> MixedArray : : DArrayInitializer MixedArray : : s_darr_initializer ; <nl> <nl> - struct MixedArray : : ShapeInitializer { <nl> - ShapeInitializer ( ) { <nl> - { <nl> - auto const ad = reinterpret_cast < MixedArray * > ( & s_theEmptyShapeDArray ) ; <nl> - ad - > initHash ( 1 ) ; <nl> - ad - > m_sizeAndPos = 0 ; <nl> - ad - > m_scale_used = 1 ; <nl> - ad - > m_nextKI = 0 ; <nl> - ad - > initHeader_16 ( HeaderKind : : Shape , StaticValue , ArrayData : : kDArray ) ; <nl> - assertx ( ad - > checkInvariants ( ) ) ; <nl> - } <nl> - { <nl> - auto const ad = reinterpret_cast < MixedArray * > ( & s_theEmptyShapeDict ) ; <nl> - ad - > initHash ( 1 ) ; <nl> - ad - > m_sizeAndPos = 0 ; <nl> - ad - > m_scale_used = 1 ; <nl> - ad - > m_nextKI = 0 ; <nl> - ad - > initHeader ( HeaderKind : : Shape , StaticValue ) ; <nl> - assertx ( ad - > checkInvariants ( ) ) ; <nl> - } <nl> - } <nl> - } ; <nl> - MixedArray : : ShapeInitializer MixedArray : : s_shape_initializer ; <nl> - <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> <nl> namespace { <nl> ALWAYS_INLINE <nl> ArrayData * MixedArray : : MakeReserveImpl ( uint32_t size , <nl> HeaderKind hk , <nl> ArrayData : : DVArray dvArray ) { <nl> - assertx ( hk = = HeaderKind : : Mixed | | hk = = HeaderKind : : Dict | | <nl> - hk = = HeaderKind : : Shape ) ; <nl> + assertx ( hk = = HeaderKind : : Mixed | | hk = = HeaderKind : : Dict ) ; <nl> assertx ( dvArray = = ArrayData : : kNotDVArray | | dvArray = = ArrayData : : kDArray ) ; <nl> assertx ( hk ! = HeaderKind : : Dict | | dvArray = = ArrayData : : kNotDVArray ) ; <nl> assertx ( ! RuntimeOption : : EvalHackArrDVArrs | | <nl> ArrayData * MixedArray : : MakeReserveDict ( uint32_t size ) { <nl> return tryTagArrProvDict ( ad ) ; <nl> } <nl> <nl> - ArrayData * MixedArray : : MakeReserveShape ( uint32_t size ) { <nl> - auto const ad = MakeReserveImpl ( size , HeaderKind : : Shape , <nl> - RuntimeOption : : EvalHackArrDVArrs ? <nl> - ArrayData : : kNotDVArray : ArrayData : : kDArray ) ; <nl> - assertx ( ad - > isShape ( ) ) ; <nl> - return ad ; <nl> - } <nl> - <nl> ArrayData * MixedArray : : MakeReserveSame ( const ArrayData * other , <nl> uint32_t capacity ) { <nl> capacity = ( capacity ? capacity : other - > size ( ) ) ; <nl> ArrayData * MixedArray : : MakeReserveSame ( const ArrayData * other , <nl> return MixedArray : : MakeReserveDict ( capacity ) ; <nl> } <nl> <nl> - if ( other - > isShape ( ) ) { <nl> - return MixedArray : : MakeReserveShape ( capacity ) ; <nl> - } <nl> - <nl> if ( other - > isKeyset ( ) ) { <nl> return SetArray : : MakeReserveSet ( capacity ) ; <nl> } <nl> MixedArray * MixedArray : : CopyMixed ( const MixedArray & other , <nl> AllocMode mode , <nl> HeaderKind dest_hk , <nl> ArrayData : : DVArray dvArray ) { <nl> - assertx ( dest_hk = = HeaderKind : : Mixed | | <nl> - dest_hk = = HeaderKind : : Dict | | <nl> - dest_hk = = HeaderKind : : Shape ) ; <nl> + assertx ( dest_hk = = HeaderKind : : Mixed | | dest_hk = = HeaderKind : : Dict ) ; <nl> assertx ( dvArray = = ArrayData : : kNotDVArray | | dvArray = = ArrayData : : kDArray ) ; <nl> assertx ( dest_hk ! = HeaderKind : : Dict | | dvArray = = ArrayData : : kNotDVArray ) ; <nl> if ( mode = = AllocMode : : Static ) { <nl> ArrayData * MixedArray : : MakeDArrayFromAPC ( const APCArray * apc ) { <nl> return init . create ( ) ; <nl> } <nl> <nl> - ArrayData * MixedArray : : MakeShapeFromAPC ( const APCArray * apc ) { <nl> - auto arr = RuntimeOption : : EvalHackArrDVArrs <nl> - ? MixedArray : : MakeDictFromAPC ( apc ) <nl> - : MixedArray : : MakeDArrayFromAPC ( apc ) ; <nl> - arr = arr - > toShapeInPlaceIfCompatible ( ) ; <nl> - return arr ; <nl> - } <nl> - <nl> / / = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = <nl> / / Destruction <nl> <nl> ALWAYS_INLINE <nl> ArrayData * MixedArray : : FromDictImpl ( ArrayData * adIn , <nl> bool copy , <nl> bool toDArray ) { <nl> - assertx ( adIn - > isDictOrShape ( ) ) ; <nl> + assertx ( adIn - > isDict ( ) ) ; <nl> auto a = asMixed ( adIn ) ; <nl> <nl> auto const size = a - > size ( ) ; <nl> ArrayData * MixedArray : : ToPHPArrayIntishCastDict ( ArrayData * adIn , bool copy ) { <nl> return out ; <nl> } <nl> <nl> - ArrayData * MixedArray : : ToPHPArrayShape ( ArrayData * in , bool copy ) { <nl> - assertx ( in - > isShape ( ) ) ; <nl> - auto arr = RuntimeOption : : EvalHackArrDVArrs <nl> - ? MixedArray : : ToPHPArrayDict ( in , copy ) <nl> - : MixedArray : : ToPHPArray ( in , copy ) ; <nl> - if ( arr = = staticEmptyArray ( ) ) return arr ; <nl> - assertx ( ! arr - > cowCheck ( ) ) ; <nl> - arr - > m_kind = HeaderKind : : Mixed ; <nl> - return arr ; <nl> - } <nl> - <nl> ArrayData * MixedArray : : ToDArray ( ArrayData * in , bool copy ) { <nl> auto a = asMixed ( in ) ; <nl> assertx ( a - > isMixed ( ) ) ; <nl> ArrayData * MixedArray : : ToDArrayDict ( ArrayData * in , bool copy ) { <nl> return out ; <nl> } <nl> <nl> - ArrayData * MixedArray : : ToDArrayShape ( ArrayData * in , bool copy ) { <nl> - assertx ( in - > isShape ( ) ) ; <nl> - if ( RuntimeOption : : EvalHackArrDVArrs ) { <nl> - auto out = FromDictImpl < IntishCast : : None > ( in , copy , true ) ; <nl> - assertx ( out - > isDArray ( ) ) ; <nl> - assertx ( ! out - > isLegacyArray ( ) ) ; <nl> - return out ; <nl> - } <nl> - assertx ( in - > isDArray ( ) ) ; <nl> - auto a = asMixed ( in ) ; <nl> - auto out = copy ? a - > copyMixed ( ) : a ; <nl> - out - > m_kind = HeaderKind : : Mixed ; <nl> - assertx ( out - > checkInvariants ( ) ) ; <nl> - return out ; <nl> - } <nl> - <nl> MixedArray * MixedArray : : ToDictInPlace ( ArrayData * ad ) { <nl> auto a = asMixed ( ad ) ; <nl> assertx ( a - > isMixed ( ) ) ; <nl> MixedArray * MixedArray : : ToDictInPlace ( ArrayData * ad ) { <nl> <nl> ArrayData * MixedArray : : ToDict ( ArrayData * ad , bool copy ) { <nl> auto a = asMixed ( ad ) ; <nl> - assertx ( a - > isMixedOrShape ( ) ) ; <nl> + assertx ( a - > isMixed ( ) ) ; <nl> <nl> if ( a - > empty ( ) & & a - > m_nextKI = = 0 ) return ArrayData : : CreateDict ( ) ; <nl> <nl> ArrayData * MixedArray : : ToDict ( ArrayData * ad , bool copy ) { <nl> } <nl> } <nl> <nl> - ArrayData * MixedArray : : ToShape ( ArrayData * ad , bool copy ) { <nl> - assertx ( ad - > isMixed ( ) ) ; <nl> - if ( RuntimeOption : : EvalHackArrDVArrs ) { <nl> - auto a = ToDict ( ad , copy ) ; <nl> - a = a - > toShapeInPlaceIfCompatible ( ) ; <nl> - return a ; <nl> - } <nl> - auto a = asMixed ( ad ) ; <nl> - if ( a - > getSize ( ) = = 0 ) return ArrayData : : CreateShape ( ) ; <nl> - assertx ( a - > isDArray ( ) ) ; <nl> - auto out = copy ? a - > copyMixed ( ) : a ; <nl> - out - > setDVArray ( ArrayData : : kDArray ) ; <nl> - out - > setLegacyArray ( false ) ; <nl> - assertx ( out - > checkInvariants ( ) ) ; <nl> - return out - > toShapeInPlaceIfCompatible ( ) ; <nl> - } <nl> - <nl> ArrayData * MixedArray : : ToDictDict ( ArrayData * ad , bool ) { <nl> assertx ( asMixed ( ad ) - > checkInvariants ( ) ) ; <nl> assertx ( ad - > isDict ( ) ) ; <nl> return ad ; <nl> } <nl> <nl> - ArrayData * MixedArray : : ToShapeShape ( ArrayData * ad , bool ) { <nl> - assertx ( asMixed ( ad ) - > checkInvariants ( ) ) ; <nl> - assertx ( ad - > isShape ( ) ) ; <nl> - return ad ; <nl> - } <nl> - <nl> void MixedArray : : Renumber ( ArrayData * ad ) { <nl> asMixed ( ad ) - > compact ( true ) ; <nl> } <nl> void MixedArray : : OnSetEvalScalar ( ArrayData * ad ) { <nl> <nl> tv_rval MixedArray : : NvTryGetIntDict ( const ArrayData * ad , int64_t k ) { <nl> assertx ( asMixed ( ad ) - > checkInvariants ( ) ) ; <nl> - assertx ( ad - > isDictOrShape ( ) ) ; <nl> + assertx ( ad - > isDict ( ) ) ; <nl> auto const ptr = MixedArray : : NvGetInt ( ad , k ) ; <nl> if ( UNLIKELY ( ! ptr ) ) throwOOBArrayKeyException ( k , ad ) ; <nl> return ptr ; <nl> tv_rval MixedArray : : NvTryGetIntDict ( const ArrayData * ad , int64_t k ) { <nl> tv_rval MixedArray : : NvTryGetStrDict ( const ArrayData * ad , <nl> const StringData * k ) { <nl> assertx ( asMixed ( ad ) - > checkInvariants ( ) ) ; <nl> - assertx ( ad - > isDictOrShape ( ) ) ; <nl> + assertx ( ad - > isDict ( ) ) ; <nl> auto const ptr = MixedArray : : NvGetStr ( ad , k ) ; <nl> if ( UNLIKELY ( ! ptr ) ) throwOOBArrayKeyException ( k , ad ) ; <nl> return ptr ; <nl> ArrayData * MixedArray : : SetWithRefStrInPlaceDict ( ArrayData * ad , StringData * k , <nl> ArrayData * <nl> MixedArray : : AppendWithRefDict ( ArrayData * adIn , TypedValue v ) { <nl> assertx ( asMixed ( adIn ) - > checkInvariants ( ) ) ; <nl> - assertx ( adIn - > isDictOrShape ( ) ) ; <nl> + assertx ( adIn - > isDict ( ) ) ; <nl> if ( tvIsReferenced ( v ) ) throwRefInvalidArrayValueException ( adIn ) ; <nl> return Append ( adIn , tvToInitCell ( v ) ) ; <nl> } <nl> MixedArray : : AppendWithRefDict ( ArrayData * adIn , TypedValue v ) { <nl> ArrayData * <nl> MixedArray : : AppendWithRefInPlaceDict ( ArrayData * adIn , TypedValue v ) { <nl> assertx ( asMixed ( adIn ) - > checkInvariants ( ) ) ; <nl> - assertx ( adIn - > isDictOrShape ( ) ) ; <nl> + assertx ( adIn - > isDict ( ) ) ; <nl> if ( tvIsReferenced ( v ) ) throwRefInvalidArrayValueException ( adIn ) ; <nl> return AppendInPlace ( adIn , tvToInitCell ( v ) ) ; <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> <nl> - tv_rval MixedArray : : NvTryGetIntShape ( const ArrayData * ad , int64_t k ) { <nl> - return RuntimeOption : : EvalHackArrDVArrs <nl> - ? NvTryGetIntDict ( ad , k ) <nl> - : NvGetInt ( ad , k ) ; <nl> - } <nl> - <nl> - tv_rval MixedArray : : NvTryGetStrShape ( const ArrayData * ad , <nl> - const StringData * k ) { <nl> - return RuntimeOption : : EvalHackArrDVArrs <nl> - ? NvTryGetStrDict ( ad , k ) <nl> - : NvGetStr ( ad , k ) ; <nl> - } <nl> - <nl> - ArrayData * MixedArray : : SetWithRefIntShape ( ArrayData * ad , int64_t k , <nl> - TypedValue v ) { <nl> - return RuntimeOption : : EvalHackArrDVArrs <nl> - ? SetWithRefIntDict ( ad , k , v ) <nl> - : SetWithRefInt ( ad , k , v ) ; <nl> - } <nl> - <nl> - ArrayData * MixedArray : : SetWithRefIntInPlaceShape ( ArrayData * ad , int64_t k , <nl> - TypedValue v ) { <nl> - return RuntimeOption : : EvalHackArrDVArrs <nl> - ? SetWithRefIntInPlaceDict ( ad , k , v ) <nl> - : SetWithRefIntInPlace ( ad , k , v ) ; <nl> - } <nl> - <nl> - ArrayData * MixedArray : : SetWithRefStrShape ( ArrayData * ad , StringData * k , <nl> - TypedValue v ) { <nl> - return RuntimeOption : : EvalHackArrDVArrs <nl> - ? SetWithRefStrDict ( ad , k , v ) <nl> - : SetWithRefStr ( ad , k , v ) ; <nl> - } <nl> - <nl> - ArrayData * MixedArray : : SetWithRefStrInPlaceShape ( ArrayData * ad , StringData * k , <nl> - TypedValue v ) { <nl> - return RuntimeOption : : EvalHackArrDVArrs <nl> - ? SetWithRefStrInPlaceDict ( ad , k , v ) <nl> - : SetWithRefStrInPlace ( ad , k , v ) ; <nl> - } <nl> - <nl> - ArrayData * <nl> - MixedArray : : AppendWithRefShape ( ArrayData * adIn , TypedValue v ) { <nl> - return RuntimeOption : : EvalHackArrDVArrs <nl> - ? AppendWithRefDict ( adIn , v ) <nl> - : AppendWithRef ( adIn , v ) ; <nl> - } <nl> - <nl> - ArrayData * <nl> - MixedArray : : AppendWithRefInPlaceShape ( ArrayData * adIn , TypedValue v ) { <nl> - return RuntimeOption : : EvalHackArrDVArrs <nl> - ? AppendWithRefInPlaceDict ( adIn , v ) <nl> - : AppendWithRefInPlace ( adIn , v ) ; <nl> - } <nl> - <nl> - / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - <nl> ALWAYS_INLINE <nl> bool MixedArray : : DictEqualHelper ( const ArrayData * ad1 , const ArrayData * ad2 , <nl> bool strict ) { <nl> assertx ( asMixed ( ad1 ) - > checkInvariants ( ) ) ; <nl> assertx ( asMixed ( ad2 ) - > checkInvariants ( ) ) ; <nl> - assertx ( ad1 - > isDictOrShape ( ) ) ; <nl> - assertx ( ad2 - > isDictOrShape ( ) ) ; <nl> + assertx ( ad1 - > isDict ( ) ) ; <nl> + assertx ( ad2 - > isDict ( ) ) ; <nl> <nl> if ( ad1 = = ad2 ) return true ; <nl> if ( ad1 - > size ( ) ! = ad2 - > size ( ) ) return false ; <nl> bool MixedArray : : DictNotSame ( const ArrayData * ad1 , const ArrayData * ad2 ) { <nl> return ! DictEqualHelper ( ad1 , ad2 , true ) ; <nl> } <nl> <nl> - bool MixedArray : : ShapeEqual ( const ArrayData * ad1 , const ArrayData * ad2 ) { <nl> - return RuntimeOption : : EvalHackArrDVArrs <nl> - ? DictEqual ( ad1 , ad2 ) <nl> - : ArrayData : : Equal ( ad1 , ad2 ) ; <nl> - } <nl> - <nl> - bool MixedArray : : ShapeNotEqual ( const ArrayData * ad1 , const ArrayData * ad2 ) { <nl> - return RuntimeOption : : EvalHackArrDVArrs <nl> - ? DictNotEqual ( ad1 , ad2 ) <nl> - : ArrayData : : NotEqual ( ad1 , ad2 ) ; <nl> - } <nl> - <nl> - bool MixedArray : : ShapeSame ( const ArrayData * ad1 , const ArrayData * ad2 ) { <nl> - return RuntimeOption : : EvalHackArrDVArrs <nl> - ? DictSame ( ad1 , ad2 ) <nl> - : ArrayData : : Same ( ad1 , ad2 ) ; <nl> - } <nl> - <nl> - bool MixedArray : : ShapeNotSame ( const ArrayData * ad1 , const ArrayData * ad2 ) { <nl> - return RuntimeOption : : EvalHackArrDVArrs <nl> - ? DictNotSame ( ad1 , ad2 ) <nl> - : ArrayData : : NotSame ( ad1 , ad2 ) ; <nl> - } <nl> - <nl> - bool MixedArray : : ShapeGt ( const ArrayData * ad1 , const ArrayData * ad2 ) { <nl> - if ( RuntimeOption : : EvalHackArrDVArrs ) { <nl> - throw_dict_compare_exception ( ) ; <nl> - } else { <nl> - return ArrayData : : Gt ( ad1 , ad2 ) ; <nl> - } <nl> - } <nl> - <nl> - bool MixedArray : : ShapeGte ( const ArrayData * ad1 , const ArrayData * ad2 ) { <nl> - if ( RuntimeOption : : EvalHackArrDVArrs ) { <nl> - throw_dict_compare_exception ( ) ; <nl> - } else { <nl> - return ArrayData : : Gte ( ad1 , ad2 ) ; <nl> - } <nl> - } <nl> - <nl> - bool MixedArray : : ShapeLt ( const ArrayData * ad1 , const ArrayData * ad2 ) { <nl> - if ( RuntimeOption : : EvalHackArrDVArrs ) { <nl> - throw_dict_compare_exception ( ) ; <nl> - } else { <nl> - return ArrayData : : Lt ( ad1 , ad2 ) ; <nl> - } <nl> - } <nl> - <nl> - bool MixedArray : : ShapeLte ( const ArrayData * ad1 , const ArrayData * ad2 ) { <nl> - if ( RuntimeOption : : EvalHackArrDVArrs ) { <nl> - throw_dict_compare_exception ( ) ; <nl> - } else { <nl> - return ArrayData : : Lte ( ad1 , ad2 ) ; <nl> - } <nl> - } <nl> - <nl> - bool MixedArray : : ShapeCompare ( const ArrayData * ad1 , const ArrayData * ad2 ) { <nl> - if ( RuntimeOption : : EvalHackArrDVArrs ) { <nl> - throw_dict_compare_exception ( ) ; <nl> - } else { <nl> - return ArrayData : : Compare ( ad1 , ad2 ) ; <nl> - } <nl> - } <nl> - <nl> - <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> <nl> } <nl> mmm a / hphp / runtime / base / mixed - array . h <nl> ppp b / hphp / runtime / base / mixed - array . h <nl> struct MixedArray final : ArrayData , <nl> static ArrayData * MakeReserveMixed ( uint32_t size ) ; <nl> static ArrayData * MakeReserveDArray ( uint32_t size ) ; <nl> static ArrayData * MakeReserveDict ( uint32_t size ) ; <nl> - static ArrayData * MakeReserveShape ( uint32_t size ) ; <nl> static constexpr auto MakeReserve = & MakeReserveMixed ; <nl> <nl> / * <nl> struct MixedArray final : ArrayData , <nl> <nl> static ArrayData * MakeDictFromAPC ( const APCArray * apc ) ; <nl> static ArrayData * MakeDArrayFromAPC ( const APCArray * apc ) ; <nl> - static ArrayData * MakeShapeFromAPC ( const APCArray * apc ) ; <nl> <nl> static bool DictEqual ( const ArrayData * , const ArrayData * ) ; <nl> static bool DictNotEqual ( const ArrayData * , const ArrayData * ) ; <nl> static bool DictSame ( const ArrayData * , const ArrayData * ) ; <nl> static bool DictNotSame ( const ArrayData * , const ArrayData * ) ; <nl> <nl> - static bool ShapeEqual ( const ArrayData * , const ArrayData * ) ; <nl> - static bool ShapeNotEqual ( const ArrayData * , const ArrayData * ) ; <nl> - static bool ShapeSame ( const ArrayData * , const ArrayData * ) ; <nl> - static bool ShapeNotSame ( const ArrayData * , const ArrayData * ) ; <nl> - static bool ShapeGt ( const ArrayData * , const ArrayData * ) ; <nl> - static bool ShapeGte ( const ArrayData * , const ArrayData * ) ; <nl> - static bool ShapeLt ( const ArrayData * , const ArrayData * ) ; <nl> - static bool ShapeLte ( const ArrayData * , const ArrayData * ) ; <nl> - static bool ShapeCompare ( const ArrayData * , const ArrayData * ) ; <nl> - <nl> using ArrayData : : decRefCount ; <nl> using ArrayData : : hasMultipleRefs ; <nl> using ArrayData : : hasExactlyOneRef ; <nl> struct MixedArray final : ArrayData , <nl> static ArrayData * ToPHPArray ( ArrayData * , bool ) ; <nl> static ArrayData * ToPHPArrayIntishCast ( ArrayData * , bool ) ; <nl> static ArrayData * ToDict ( ArrayData * , bool ) ; <nl> - static ArrayData * ToShape ( ArrayData * , bool ) ; <nl> static constexpr auto ToVec = & ArrayCommon : : ToVec ; <nl> static constexpr auto ToKeyset = & ArrayCommon : : ToKeyset ; <nl> static constexpr auto ToVArray = & ArrayCommon : : ToVArray ; <nl> struct MixedArray final : ArrayData , <nl> static constexpr auto NvGetIntPosDict = & NvGetIntPos ; <nl> static constexpr auto NvGetStrPosDict = & NvGetStrPos ; <nl> static tv_rval RvalIntDict ( const ArrayData * ad , int64_t k ) { <nl> - assertx ( ad - > isDictOrShape ( ) ) ; <nl> + assertx ( ad - > isDict ( ) ) ; <nl> return NvGetIntDict ( ad , k ) ; <nl> } <nl> static tv_rval RvalIntStrictDict ( const ArrayData * ad , int64_t k ) { <nl> - assertx ( ad - > isDictOrShape ( ) ) ; <nl> + assertx ( ad - > isDict ( ) ) ; <nl> return NvTryGetIntDict ( ad , k ) ; <nl> } <nl> static tv_rval RvalStrDict ( const ArrayData * ad , const StringData * k ) { <nl> - assertx ( ad - > isDictOrShape ( ) ) ; <nl> + assertx ( ad - > isDict ( ) ) ; <nl> return NvGetStrDict ( ad , k ) ; <nl> } <nl> static tv_rval RvalStrStrictDict ( const ArrayData * ad , <nl> const StringData * k ) { <nl> - assertx ( ad - > isDictOrShape ( ) ) ; <nl> + assertx ( ad - > isDict ( ) ) ; <nl> return NvTryGetStrDict ( ad , k ) ; <nl> } <nl> static constexpr auto ReleaseDict = & Release ; <nl> struct MixedArray final : ArrayData , <nl> static constexpr auto ToVecDict = & ArrayCommon : : ToVec ; <nl> static constexpr auto ToKeysetDict = & ArrayCommon : : ToKeyset ; <nl> static constexpr auto ToVArrayDict = & ArrayCommon : : ToVArray ; <nl> - static constexpr auto ToShapeDict = & ArrayCommon : : ToShape ; <nl> static ArrayData * ToDArrayDict ( ArrayData * , bool ) ; <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> struct MixedArray final : ArrayData , <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> <nl> - static tv_rval NvTryGetIntShape ( const ArrayData * , int64_t ) ; <nl> - static constexpr auto NvGetIntShape = & NvGetInt ; <nl> - static tv_rval NvTryGetStrShape ( const ArrayData * , <nl> - const StringData * ) ; <nl> - static constexpr auto NvGetStrShape = & NvGetStr ; <nl> - static constexpr auto NvGetIntPosShape = & NvGetIntPos ; <nl> - static constexpr auto NvGetStrPosShape = & NvGetStrPos ; <nl> - static tv_rval RvalIntShape ( const ArrayData * ad , int64_t k ) { <nl> - assertx ( ad - > isShape ( ) ) ; <nl> - return NvGetIntShape ( ad , k ) ; <nl> - } <nl> - static tv_rval RvalIntStrictShape ( const ArrayData * ad , int64_t k ) { <nl> - assertx ( ad - > isShape ( ) ) ; <nl> - return NvTryGetIntShape ( ad , k ) ; <nl> - } <nl> - static tv_rval RvalStrShape ( const ArrayData * ad , const StringData * k ) { <nl> - assertx ( ad - > isShape ( ) ) ; <nl> - return NvGetStrShape ( ad , k ) ; <nl> - } <nl> - static tv_rval RvalStrStrictShape ( const ArrayData * ad , <nl> - const StringData * k ) { <nl> - assertx ( ad - > isShape ( ) ) ; <nl> - return NvTryGetStrShape ( ad , k ) ; <nl> - } <nl> - static constexpr auto ReleaseShape = & Release ; <nl> - static constexpr auto NvGetKeyShape = & NvGetKey ; <nl> - static constexpr auto SetIntShape = & SetInt ; <nl> - static constexpr auto SetIntInPlaceShape = & SetIntInPlace ; <nl> - static constexpr auto SetStrShape = & SetStr ; <nl> - static constexpr auto SetStrInPlaceShape = & SetStrInPlace ; <nl> - static constexpr auto AddIntShape = & AddInt ; <nl> - static constexpr auto AddStrShape = & AddStr ; <nl> - static constexpr auto VsizeShape = & Vsize ; <nl> - static constexpr auto GetValueRefShape = & GetValueRef ; <nl> - static constexpr auto IsVectorDataShape = & IsVectorData ; <nl> - static constexpr auto ExistsIntShape = & ExistsInt ; <nl> - static constexpr auto ExistsStrShape = & ExistsStr ; <nl> - static constexpr auto LvalIntShape = & LvalInt ; <nl> - static constexpr auto LvalStrShape = & LvalStr ; <nl> - static constexpr auto LvalNewShape = & LvalNew ; <nl> - static constexpr auto RemoveIntShape = & RemoveInt ; <nl> - static constexpr auto RemoveIntInPlaceShape = & RemoveIntInPlace ; <nl> - static constexpr auto RemoveStrShape = & RemoveStr ; <nl> - static constexpr auto RemoveStrInPlaceShape = & RemoveStrInPlace ; <nl> - static constexpr auto IterBeginShape = & IterBegin ; <nl> - static constexpr auto IterLastShape = & IterLast ; <nl> - static constexpr auto IterEndShape = & IterEnd ; <nl> - static constexpr auto IterAdvanceShape = & IterAdvance ; <nl> - static constexpr auto IterRewindShape = & IterRewind ; <nl> - static constexpr auto EscalateForSortShape = & EscalateForSort ; <nl> - static constexpr auto KsortShape = & Ksort ; <nl> - static constexpr auto SortShape = & Sort ; <nl> - static constexpr auto AsortShape = & Asort ; <nl> - static constexpr auto UksortShape = & Uksort ; <nl> - static constexpr auto UsortShape = & Usort ; <nl> - static constexpr auto UasortShape = & Uasort ; <nl> - static constexpr auto CopyShape = & Copy ; <nl> - static constexpr auto CopyStaticShape = & CopyStatic ; <nl> - static constexpr auto AppendShape = & Append ; <nl> - static constexpr auto AppendInPlaceShape = & AppendInPlace ; <nl> - static ArrayData * SetWithRefIntShape ( ArrayData * , int64_t k , TypedValue v ) ; <nl> - static ArrayData * SetWithRefIntInPlaceShape ( ArrayData * , int64_t k , <nl> - TypedValue v ) ; <nl> - static ArrayData * SetWithRefStrShape ( ArrayData * , StringData * k , TypedValue ) ; <nl> - static ArrayData * SetWithRefStrInPlaceShape ( ArrayData * , StringData * k , <nl> - TypedValue ) ; <nl> - static ArrayData * AppendWithRefShape ( ArrayData * , TypedValue ) ; <nl> - static ArrayData * AppendWithRefInPlaceShape ( ArrayData * , TypedValue ) ; <nl> - static constexpr auto PlusEqShape = & PlusEq ; <nl> - static constexpr auto MergeShape = & Merge ; <nl> - static constexpr auto PopShape = & Pop ; <nl> - static constexpr auto DequeueShape = & Dequeue ; <nl> - static constexpr auto PrependShape = & Prepend ; <nl> - static constexpr auto RenumberShape = & Renumber ; <nl> - static constexpr auto OnSetEvalScalarShape = & OnSetEvalScalar ; <nl> - static constexpr auto EscalateShape = & Escalate ; <nl> - static ArrayData * ToPHPArrayShape ( ArrayData * , bool ) ; <nl> - static constexpr auto ToPHPArrayIntishCastShape = & ToPHPArrayShape ; <nl> - static ArrayData * ToShapeShape ( ArrayData * , bool ) ; <nl> - static constexpr auto ToVecShape = & ArrayCommon : : ToVec ; <nl> - static constexpr auto ToKeysetShape = & ArrayCommon : : ToKeyset ; <nl> - static constexpr auto ToVArrayShape = & ArrayCommon : : ToVArray ; <nl> - static constexpr auto ToDictShape = & ArrayCommon : : ToDict ; <nl> - static ArrayData * ToDArrayShape ( ArrayData * , bool ) ; <nl> - <nl> - static constexpr auto LvalSilentIntShape = & LvalSilentInt ; <nl> - static constexpr auto LvalSilentStrShape = & LvalSilentStr ; <nl> - <nl> - / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - <nl> private : <nl> MixedArray * copyMixed ( ) const ; <nl> static ArrayData * MakeReserveImpl ( uint32_t capacity , HeaderKind hk , <nl> struct MixedArray final : ArrayData , <nl> struct DArrayInitializer ; <nl> static DArrayInitializer s_darr_initializer ; <nl> <nl> - struct ShapeInitializer ; <nl> - static ShapeInitializer s_shape_initializer ; <nl> - <nl> int64_t m_nextKI ; / / Next integer key to use for append . <nl> } ; <nl> <nl> mmm a / hphp / runtime / base / packed - array . cpp <nl> ppp b / hphp / runtime / base / packed - array . cpp <nl> ArrayData * PackedArray : : ToDArray ( ArrayData * adIn , bool / * copy * / ) { <nl> return init . create ( ) ; <nl> } <nl> <nl> - ArrayData * PackedArray : : ToShape ( ArrayData * ad , bool copy ) { <nl> - auto arr = RuntimeOption : : EvalHackArrDVArrs <nl> - ? PackedArray : : ToDict ( ad , copy ) <nl> - : PackedArray : : ToDArray ( ad , copy ) ; <nl> - arr = arr - > toShapeInPlaceIfCompatible ( ) ; <nl> - return arr ; <nl> - } <nl> - <nl> ArrayData * PackedArray : : ToPHPArrayVec ( ArrayData * adIn , bool copy ) { <nl> assertx ( checkInvariants ( adIn ) ) ; <nl> assertx ( adIn - > isVecArray ( ) ) ; <nl> ArrayData * PackedArray : : ToDictVec ( ArrayData * ad , bool copy ) { <nl> : out ; <nl> } <nl> <nl> - ArrayData * PackedArray : : ToShapeVec ( ArrayData * ad , bool copy ) { <nl> - auto arr = RuntimeOption : : EvalHackArrDVArrs <nl> - ? PackedArray : : ToDictVec ( ad , copy ) <nl> - : PackedArray : : ToDArrayVec ( ad , copy ) ; <nl> - arr = arr - > toShapeInPlaceIfCompatible ( ) ; <nl> - return arr ; <nl> - } <nl> - <nl> ArrayData * PackedArray : : ToVec ( ArrayData * adIn , bool copy ) { <nl> assertx ( checkInvariants ( adIn ) ) ; <nl> assertx ( adIn - > isPacked ( ) ) ; <nl> mmm a / hphp / runtime / base / packed - array . h <nl> ppp b / hphp / runtime / base / packed - array . h <nl> struct PackedArray final : type_scan : : MarkCollectable < PackedArray > { <nl> static ArrayData * ToVArray ( ArrayData * , bool ) ; <nl> static ArrayData * ToDArray ( ArrayData * , bool ) ; <nl> static ArrayData * ToDict ( ArrayData * , bool ) ; <nl> - static ArrayData * ToShape ( ArrayData * , bool ) ; <nl> static ArrayData * ToVec ( ArrayData * , bool ) ; <nl> static void Renumber ( ArrayData * ) { } <nl> static void OnSetEvalScalar ( ArrayData * ) ; <nl> struct PackedArray final : type_scan : : MarkCollectable < PackedArray > { <nl> static constexpr auto ToPHPArrayIntishCastVec = & ToPHPArrayVec ; <nl> static ArrayData * ToVArrayVec ( ArrayData * , bool ) ; <nl> static ArrayData * ToDictVec ( ArrayData * , bool ) ; <nl> - static ArrayData * ToShapeVec ( ArrayData * , bool ) ; <nl> static ArrayData * ToVecVec ( ArrayData * , bool ) ; <nl> <nl> static constexpr auto MergeVec = & Merge ; <nl> struct PackedArray final : type_scan : : MarkCollectable < PackedArray > { <nl> static constexpr auto EscalateVec = & Escalate ; <nl> static constexpr auto ToKeysetVec = & ArrayCommon : : ToKeyset ; <nl> static constexpr auto ToDArrayVec = & ToDArray ; <nl> - static constexpr auto ToDArrayShape = & ToDArray ; <nl> <nl> static tv_rval RvalIntVec ( const ArrayData * ad , int64_t k ) { <nl> assertx ( ad - > isVecArray ( ) ) ; <nl> mmm a / hphp / runtime / base / perf - mem - event . cpp <nl> ppp b / hphp / runtime / base / perf - mem - event . cpp <nl> bool record_request_heap_mem_event ( const void * addr , <nl> <nl> case HeaderKind : : Mixed : <nl> case HeaderKind : : Dict : <nl> - case HeaderKind : : Shape : <nl> fill_record ( static_cast < const MixedArray * > ( hdr ) , addr , record ) ; <nl> break ; <nl> <nl> mmm a / hphp / runtime / base / record - array . cpp <nl> ppp b / hphp / runtime / base / record - array . cpp <nl> ArrayData * RecordArray : : ToPHPArrayIntishCast ( ArrayData * , bool ) { <nl> throw_not_implemented ( " This method on RecordArray " ) ; <nl> } <nl> <nl> - ArrayData * RecordArray : : ToShape ( ArrayData * , bool ) { <nl> - throw_not_implemented ( " This method on RecordArray " ) ; <nl> - } <nl> - <nl> ArrayData * RecordArray : : ToDict ( ArrayData * , bool ) { <nl> throw_not_implemented ( " This method on RecordArray " ) ; <nl> } <nl> mmm a / hphp / runtime / base / record - array . h <nl> ppp b / hphp / runtime / base / record - array . h <nl> struct RecordArray : ArrayData , <nl> static ArrayData * Escalate ( const ArrayData * ) ; <nl> static ArrayData * ToPHPArray ( ArrayData * , bool ) ; <nl> static ArrayData * ToPHPArrayIntishCast ( ArrayData * , bool ) ; <nl> - static ArrayData * ToShape ( ArrayData * , bool ) ; <nl> static ArrayData * ToDict ( ArrayData * , bool ) ; <nl> static ArrayData * ToVec ( ArrayData * , bool ) ; <nl> static ArrayData * ToKeyset ( ArrayData * , bool ) ; <nl> mmm a / hphp / runtime / base / set - array . cpp <nl> ppp b / hphp / runtime / base / set - array . cpp <nl> ArrayData * SetArray : : ToDArray ( ArrayData * ad , bool copy ) { <nl> return out ; <nl> } <nl> <nl> - ArrayData * SetArray : : ToShape ( ArrayData * ad , bool copy ) { <nl> - auto arr = RuntimeOption : : EvalHackArrDVArrs <nl> - ? SetArray : : ToDict ( ad , copy ) <nl> - : SetArray : : ToDArray ( ad , copy ) ; <nl> - arr = arr - > toShapeInPlaceIfCompatible ( ) ; <nl> - return arr ; <nl> - } <nl> - <nl> ArrayData * SetArray : : ToKeyset ( ArrayData * ad , bool / * copy * / ) { <nl> assertx ( asSet ( ad ) - > checkInvariants ( ) ) ; <nl> return ad ; <nl> mmm a / hphp / runtime / base / set - array . h <nl> ppp b / hphp / runtime / base / set - array . h <nl> struct SetArray final : ArrayData , <nl> static ArrayData * ToKeyset ( ArrayData * , bool ) ; <nl> static constexpr auto ToVArray = & ArrayCommon : : ToVArray ; <nl> static ArrayData * ToDArray ( ArrayData * , bool ) ; <nl> - static ArrayData * ToShape ( ArrayData * , bool ) ; <nl> static bool Equal ( const ArrayData * , const ArrayData * ) ; <nl> static bool NotEqual ( const ArrayData * , const ArrayData * ) ; <nl> static bool Same ( const ArrayData * , const ArrayData * ) ; <nl> mmm a / hphp / runtime / base / string - buffer . cpp <nl> ppp b / hphp / runtime / base / string - buffer . cpp <nl> void StringBuffer : : append ( const Variant & v ) { <nl> case KindOfDict : <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> case KindOfObject : <nl> mmm a / hphp / runtime / base / string - data . cpp <nl> ppp b / hphp / runtime / base / string - data . cpp <nl> bool StringData : : isNumeric ( ) const { <nl> case KindOfDict : <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> case KindOfObject : <nl> bool StringData : : isInteger ( ) const { <nl> case KindOfDict : <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> case KindOfObject : <nl> mmm a / hphp / runtime / base / tv - arith . cpp <nl> ppp b / hphp / runtime / base / tv - arith . cpp <nl> TypedNum numericConvHelper ( Cell cell ) { <nl> case KindOfDict : <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> throw_bad_array_operand ( cell . m_data . parr ) ; <nl> void cellIncDecOp ( Op op , tv_lval cell ) { <nl> case KindOfDict : <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> case KindOfObject : <nl> void cellBitNot ( Cell & cell ) { <nl> case KindOfDict : <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> case KindOfObject : <nl> mmm a / hphp / runtime / base / tv - comparisons . cpp <nl> ppp b / hphp / runtime / base / tv - comparisons . cpp <nl> typename Op : : RetType cellRelOp ( Op op , Cell cell , int64_t val ) { <nl> case KindOfKeyset : <nl> return op . keysetVsNonKeyset ( ) ; <nl> <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> - if ( RuntimeOption : : EvalHackArrDVArrs ) { <nl> - return op . dictVsNonDict ( ) ; <nl> - } <nl> - / / fallthrough <nl> - <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> if ( UNLIKELY ( op . noticeOnArrNonArr ( ) ) ) { <nl> typename Op : : RetType cellRelOp ( Op op , Cell cell , double val ) { <nl> case KindOfKeyset : <nl> return op . keysetVsNonKeyset ( ) ; <nl> <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> - if ( RuntimeOption : : EvalHackArrDVArrs ) { <nl> - return op . dictVsNonDict ( ) ; <nl> - } <nl> - / / Fallthrough <nl> - <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> if ( UNLIKELY ( op . noticeOnArrNonArr ( ) ) ) { <nl> typename Op : : RetType cellRelOp ( Op op , Cell cell , const StringData * val ) { <nl> case KindOfKeyset : <nl> return op . keysetVsNonKeyset ( ) ; <nl> <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> - if ( RuntimeOption : : EvalHackArrDVArrs ) { <nl> - return op . dictVsNonDict ( ) ; <nl> - } <nl> - / / Fallthrough <nl> - <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> if ( UNLIKELY ( op . noticeOnArrNonArr ( ) ) ) { <nl> typename Op : : RetType cellRelOp ( Op op , Cell cell , const ArrayData * ad ) { <nl> hackArr ( ) ; <nl> return op . keysetVsNonKeyset ( ) ; <nl> <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> - if ( RuntimeOption : : EvalHackArrDVArrs ) { <nl> - hackArr ( ) ; <nl> - return op . dictVsNonDict ( ) ; <nl> - } <nl> - / / Fallthrough <nl> - <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> return op ( cell . m_data . parr , ad ) ; <nl> typename Op : : RetType cellRelOp ( Op op , Cell cell , const ObjectData * od ) { <nl> case KindOfKeyset : <nl> return op . keysetVsNonKeyset ( ) ; <nl> <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> - if ( RuntimeOption : : EvalHackArrDVArrs ) { <nl> - return op . dictVsNonDict ( ) ; <nl> - } <nl> - / / Fallthrough <nl> - <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> if ( UNLIKELY ( op . noticeOnArrNonArr ( ) ) ) { <nl> typename Op : : RetType cellRelOp ( Op op , Cell cell , const ResourceData * rd ) { <nl> case KindOfKeyset : <nl> return op . keysetVsNonKeyset ( ) ; <nl> <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> - if ( RuntimeOption : : EvalHackArrDVArrs ) { <nl> - return op . dictVsNonDict ( ) ; <nl> - } <nl> - / / Fallthrough <nl> - <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> if ( UNLIKELY ( op . noticeOnArrNonArr ( ) ) ) { <nl> typename Op : : RetType cellRelOpVec ( Op op , Cell cell , const ArrayData * a ) { <nl> return op . vec ( cell . m_data . parr , a ) ; <nl> } <nl> <nl> - template < class Op > <nl> - typename Op : : RetType cellRelOpShape ( Op op , Cell cell , const ArrayData * a ) { <nl> - return RuntimeOption : : EvalHackArrDVArrs ? <nl> - cellRelOpDict ( op , cell , a ) : cellRelOp ( op , cell , a ) ; <nl> - } <nl> - <nl> template < class Op > <nl> typename Op : : RetType cellRelOpDict ( Op op , Cell cell , const ArrayData * a ) { <nl> assertx ( cellIsPlausible ( cell ) ) ; <nl> - assertx ( a - > isDictOrShape ( ) ) ; <nl> + assertx ( a - > isDict ( ) ) ; <nl> <nl> - if ( UNLIKELY ( ! isDictOrShapeType ( cell . m_type ) ) ) { <nl> + if ( UNLIKELY ( ! isDictType ( cell . m_type ) ) ) { <nl> if ( isVecType ( cell . m_type ) ) return op . vecVsNonVec ( ) ; <nl> if ( isKeysetType ( cell . m_type ) ) return op . keysetVsNonKeyset ( ) ; <nl> if ( UNLIKELY ( op . noticeOnArrHackArr ( ) & & isArrayType ( cell . m_type ) ) ) { <nl> typename Op : : RetType cellRelOp ( Op op , Cell cell , ClsMethDataRef clsMeth ) { <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : return op . keysetVsNonKeyset ( ) ; <nl> <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> - if ( RuntimeOption : : EvalHackArrDVArrs ) { <nl> - return op . dictVsNonDict ( ) ; <nl> - } <nl> - / / fall through <nl> case KindOfPersistentArray : <nl> case KindOfArray : { <nl> raiseClsMethToVecWarningHelper ( ) ; <nl> typename Op : : RetType cellRelOp ( Op op , Cell cell , const Func * val ) { <nl> case KindOfKeyset : <nl> return op . keysetVsNonKeyset ( ) ; <nl> <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> - if ( RuntimeOption : : EvalHackArrDVArrs ) { <nl> - return op . dictVsNonDict ( ) ; <nl> - } <nl> - / / Fallthrough <nl> - <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> if ( UNLIKELY ( op . noticeOnArrNonArr ( ) ) ) { <nl> typename Op : : RetType cellRelOp ( Op op , Cell cell , const Class * val ) { <nl> case KindOfKeyset : <nl> return op . keysetVsNonKeyset ( ) ; <nl> <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> - if ( RuntimeOption : : EvalHackArrDVArrs ) { <nl> - return op . dictVsNonDict ( ) ; <nl> - } <nl> - / / Fallthrough <nl> - <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> if ( UNLIKELY ( op . noticeOnArrNonArr ( ) ) ) { <nl> typename Op : : RetType cellRelOp ( Op op , Cell c1 , Cell c2 ) { <nl> case KindOfDict : return cellRelOpDict ( op , c1 , c2 . m_data . parr ) ; <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : return cellRelOpKeyset ( op , c1 , c2 . m_data . parr ) ; <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : return cellRelOpShape ( op , c1 , c2 . m_data . parr ) ; <nl> case KindOfPersistentArray : <nl> case KindOfArray : return cellRelOp ( op , c1 , c2 . m_data . parr ) ; <nl> case KindOfObject : return cellRelOp ( op , c1 , c2 . m_data . pobj ) ; <nl> struct Eq { <nl> return PackedArray : : VecEqual ( ad1 , ad2 ) ; <nl> } <nl> bool dict ( const ArrayData * ad1 , const ArrayData * ad2 ) const { <nl> - assertx ( ad1 - > isDictOrShape ( ) ) ; <nl> - assertx ( ad2 - > isDictOrShape ( ) ) ; <nl> + assertx ( ad1 - > isDict ( ) ) ; <nl> + assertx ( ad2 - > isDict ( ) ) ; <nl> return MixedArray : : DictEqual ( ad1 , ad2 ) ; <nl> } <nl> bool keyset ( const ArrayData * ad1 , const ArrayData * ad2 ) const { <nl> struct CompareBase { <nl> } <nl> <nl> RetType dict ( const ArrayData * ad1 , const ArrayData * ad2 ) const { <nl> - assertx ( ad1 - > isDictOrShape ( ) ) ; <nl> - assertx ( ad2 - > isDictOrShape ( ) ) ; <nl> + assertx ( ad1 - > isDict ( ) ) ; <nl> + assertx ( ad2 - > isDict ( ) ) ; <nl> throw_dict_compare_exception ( ) ; <nl> } <nl> RetType keyset ( const ArrayData * ad1 , const ArrayData * ad2 ) const { <nl> bool cellSame ( Cell c1 , Cell c2 ) { <nl> <nl> case KindOfPersistentDict : <nl> case KindOfDict : <nl> - if ( ! isDictOrShapeType ( c2 . m_type ) ) { <nl> + if ( ! isDictType ( c2 . m_type ) ) { <nl> phpArrayCheck ( ) ; <nl> return false ; <nl> } <nl> bool cellSame ( Cell c1 , Cell c2 ) { <nl> } <nl> return SetArray : : Same ( c1 . m_data . parr , c2 . m_data . parr ) ; <nl> <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : / / TODO ( T31025155 ) : Add warning . <nl> - if ( RuntimeOption : : EvalHackArrDVArrs ) { <nl> - if ( ! isDictOrShapeType ( c2 . m_type ) ) { <nl> - phpArrayCheck ( ) ; <nl> - return false ; <nl> - } <nl> - } else { <nl> - if ( ! isArrayOrShapeType ( c2 . m_type ) ) { <nl> - if ( UNLIKELY ( checkHACCompare ( ) & & isHackArrayType ( c2 . m_type ) ) ) { <nl> - raiseHackArrCompatArrHackArrCmp ( ) ; <nl> - } <nl> - return false ; <nl> - } <nl> - } <nl> - return MixedArray : : ShapeSame ( c1 . m_data . parr , c2 . m_data . parr ) ; <nl> - <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> if ( isClsMethType ( c2 . m_type ) ) { <nl> bool cellSame ( Cell c1 , Cell c2 ) { <nl> return ArrayData : : Same ( <nl> c1 . m_data . parr , clsMethToVecHelper ( c2 . m_data . pclsmeth ) . get ( ) ) ; <nl> } <nl> - if ( ! isArrayOrShapeType ( c2 . m_type ) ) { <nl> + if ( ! isArrayType ( c2 . m_type ) ) { <nl> if ( UNLIKELY ( checkHACCompare ( ) & & isHackArrayType ( c2 . m_type ) ) ) { <nl> raiseHackArrCompatArrHackArrCmp ( ) ; <nl> } <nl> bool cellSame ( Cell c1 , Cell c2 ) { <nl> clsMethToVecHelper ( c1 . m_data . pclsmeth ) . get ( ) , c2 . m_data . parr ) ; <nl> } <nl> } else { <nl> - if ( isArrayOrShapeType ( c2 . m_type ) ) { <nl> + if ( isArrayType ( c2 . m_type ) ) { <nl> raiseClsMethToVecWarningHelper ( ) ; <nl> return ArrayData : : Same ( <nl> clsMethToVecHelper ( c1 . m_data . pclsmeth ) . get ( ) , c2 . m_data . parr ) ; <nl> bool cellEqual ( Cell cell , const ArrayData * val ) { <nl> if ( val - > isPHPArray ( ) ) return cellRelOp ( Eq ( ) , cell , val ) ; <nl> if ( val - > isVecArray ( ) ) return cellRelOpVec ( Eq ( ) , cell , val ) ; <nl> if ( val - > isDict ( ) ) return cellRelOpDict ( Eq ( ) , cell , val ) ; <nl> - if ( val - > isShape ( ) ) return cellRelOpShape ( Eq ( ) , cell , val ) ; <nl> if ( val - > isKeyset ( ) ) return cellRelOpKeyset ( Eq ( ) , cell , val ) ; <nl> not_reached ( ) ; <nl> } <nl> bool cellLess ( Cell cell , const ArrayData * val ) { <nl> if ( val - > isPHPArray ( ) ) return cellRelOp ( Lt ( ) , cell , val ) ; <nl> if ( val - > isVecArray ( ) ) return cellRelOpVec ( Lt ( ) , cell , val ) ; <nl> if ( val - > isDict ( ) ) return cellRelOpDict ( Lt ( ) , cell , val ) ; <nl> - if ( val - > isShape ( ) ) return cellRelOpShape ( Lt ( ) , cell , val ) ; <nl> if ( val - > isKeyset ( ) ) return cellRelOpKeyset ( Lt ( ) , cell , val ) ; <nl> not_reached ( ) ; <nl> } <nl> bool cellGreater ( Cell cell , const ArrayData * val ) { <nl> if ( val - > isPHPArray ( ) ) return cellRelOp ( Gt ( ) , cell , val ) ; <nl> if ( val - > isVecArray ( ) ) return cellRelOpVec ( Gt ( ) , cell , val ) ; <nl> if ( val - > isDict ( ) ) return cellRelOpDict ( Gt ( ) , cell , val ) ; <nl> - if ( val - > isShape ( ) ) return cellRelOpShape ( Gt ( ) , cell , val ) ; <nl> if ( val - > isKeyset ( ) ) return cellRelOpKeyset ( Gt ( ) , cell , val ) ; <nl> not_reached ( ) ; <nl> } <nl> int64_t cellCompare ( Cell cell , const ArrayData * val ) { <nl> if ( val - > isPHPArray ( ) ) return cellRelOp ( Cmp ( ) , cell , val ) ; <nl> if ( val - > isVecArray ( ) ) return cellRelOpVec ( Cmp ( ) , cell , val ) ; <nl> if ( val - > isDict ( ) ) return cellRelOpDict ( Cmp ( ) , cell , val ) ; <nl> - if ( val - > isShape ( ) ) return cellRelOpShape ( Cmp ( ) , cell , val ) ; <nl> if ( val - > isKeyset ( ) ) return cellRelOpKeyset ( Cmp ( ) , cell , val ) ; <nl> not_reached ( ) ; <nl> } <nl> mmm a / hphp / runtime / base / tv - conversions - inl . h <nl> ppp b / hphp / runtime / base / tv - conversions - inl . h <nl> inline bool cellToBool ( Cell cell ) { <nl> case KindOfDict : <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> case KindOfPersistentArray : <nl> case KindOfArray : return ! cell . m_data . parr - > empty ( ) ; <nl> case KindOfObject : return cell . m_data . pobj - > toBoolean ( ) ; <nl> inline int64_t cellToInt ( Cell cell ) { <nl> case KindOfDict : <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> case KindOfPersistentArray : <nl> case KindOfArray : return cell . m_data . parr - > empty ( ) ? 0 : 1 ; <nl> case KindOfObject : return cell . m_data . pobj - > toInt64 ( ) ; <nl> inline Cell cellToKey ( Cell cell , const ArrayData * ad ) { <nl> return make_tv < KindOfInt64 > ( cell . m_data . pres - > data ( ) - > o_toInt64 ( ) ) ; <nl> <nl> case KindOfClsMeth : <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> case KindOfPersistentVec : <nl> mmm a / hphp / runtime / base / tv - conversions . cpp <nl> ppp b / hphp / runtime / base / tv - conversions . cpp <nl> enable_if_lval_t < T , void > tvCastToBooleanInPlace ( T tv ) { <nl> case KindOfPersistentVec : <nl> case KindOfPersistentDict : <nl> case KindOfPersistentKeyset : <nl> - case KindOfPersistentShape : <nl> case KindOfPersistentArray : <nl> b = ! val ( tv ) . parr - > empty ( ) ; <nl> continue ; <nl> enable_if_lval_t < T , void > tvCastToBooleanInPlace ( T tv ) { <nl> case KindOfVec : <nl> case KindOfDict : <nl> case KindOfKeyset : <nl> - case KindOfShape : <nl> case KindOfArray : <nl> b = ! val ( tv ) . parr - > empty ( ) ; <nl> tvDecRefArr ( tv ) ; <nl> enable_if_lval_t < T , void > tvCastToDoubleInPlace ( T tv ) { <nl> case KindOfPersistentVec : <nl> case KindOfPersistentDict : <nl> case KindOfPersistentKeyset : <nl> - case KindOfPersistentShape : <nl> case KindOfPersistentArray : <nl> d = val ( tv ) . parr - > empty ( ) ? 0 : 1 ; <nl> continue ; <nl> enable_if_lval_t < T , void > tvCastToDoubleInPlace ( T tv ) { <nl> case KindOfVec : <nl> case KindOfDict : <nl> case KindOfKeyset : <nl> - case KindOfShape : <nl> case KindOfArray : <nl> d = val ( tv ) . parr - > empty ( ) ? 0 : 1 ; <nl> tvDecRefArr ( tv ) ; <nl> enable_if_lval_t < T , void > tvCastToInt64InPlace ( T tv ) { <nl> case KindOfPersistentVec : <nl> case KindOfPersistentDict : <nl> case KindOfPersistentKeyset : <nl> - case KindOfPersistentShape : <nl> case KindOfPersistentArray : <nl> i = val ( tv ) . parr - > empty ( ) ? 0 : 1 ; <nl> continue ; <nl> enable_if_lval_t < T , void > tvCastToInt64InPlace ( T tv ) { <nl> case KindOfVec : <nl> case KindOfDict : <nl> case KindOfKeyset : <nl> - case KindOfShape : <nl> case KindOfArray : <nl> i = val ( tv ) . parr - > empty ( ) ? 0 : 1 ; <nl> tvDecRefArr ( tv ) ; <nl> double tvCastToDouble ( TypedValue tv ) { <nl> case KindOfDict : <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> return tv . m_data . parr - > empty ( ) ? 0 . 0 : 1 . 0 ; <nl> void cellCastToStringInPlace ( tv_lval tv ) { <nl> if ( type ( tv ) = = KindOfKeyset ) tvDecRefArr ( * tv ) ; <nl> return persistentString ( keyset_string . get ( ) ) ; <nl> <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> - if ( RuntimeOption : : EvalHackArrDVArrs ) { <nl> - raise_notice ( " Dict to string conversion " ) ; <nl> - if ( type ( tv ) = = KindOfShape ) tvDecRefArr ( * tv ) ; <nl> - return persistentString ( dict_string . get ( ) ) ; <nl> - } <nl> - / / Fallthrough <nl> - <nl> case KindOfArray : <nl> case KindOfPersistentArray : <nl> raise_notice ( " Array to string conversion " ) ; <nl> StringData * cellCastToStringData ( Cell tv ) { <nl> raise_notice ( " Keyset to string conversion " ) ; <nl> return keyset_string . get ( ) ; <nl> <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> - if ( RuntimeOption : : EvalHackArrDVArrs ) { <nl> - raise_notice ( " Dict to string conversion " ) ; <nl> - return dict_string . get ( ) ; <nl> - } <nl> - / / Fallthrough <nl> - <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> raise_notice ( " Array to string conversion " ) ; <nl> ArrayData * tvCastToArrayLikeData ( TypedValue tv ) { <nl> case KindOfDict : <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> case KindOfPersistentArray : <nl> case KindOfArray : { <nl> auto const ad = tv . m_data . parr ; <nl> assign ( LHS lhs , T & & rhs ) { <nl> variant_ref { lhs } = std : : forward < T > ( rhs ) ; <nl> } <nl> <nl> - template < typename T > <nl> - enable_if_lval_t < T , void > tvCastToShapeInPlace ( T tv ) { <nl> - if ( isShapeType ( type ( tv ) ) ) { <nl> - return ; <nl> - } <nl> - if ( RuntimeOption : : EvalHackArrDVArrs ) { <nl> - tvCastToDictInPlace ( tv ) ; <nl> - } else { <nl> - tvCastToDArrayInPlace ( tv ) ; <nl> - } <nl> - auto const ad = val ( tv ) . parr ; <nl> - assign ( tv , ad - > toShape ( ad - > cowCheck ( ) ) ) ; <nl> - } <nl> - <nl> template < typename T , IntishCast IC / * = IntishCast : : None * / > <nl> enable_if_lval_t < T , void > tvCastToArrayInPlace ( T tv ) { <nl> assertx ( tvIsPlausible ( * tv ) ) ; <nl> enable_if_lval_t < T , void > tvCastToArrayInPlace ( T tv ) { <nl> continue ; <nl> } <nl> <nl> - case KindOfPersistentShape : { <nl> - auto const adIn = val ( tv ) . parr ; <nl> - assertx ( adIn - > isShape ( ) ) ; <nl> - a = MixedArray : : ToPHPArrayShape ( adIn , true ) ; <nl> - assertx ( a ! = adIn ) ; <nl> - continue ; <nl> - } <nl> - <nl> - case KindOfShape : { <nl> - auto const adIn = val ( tv ) . parr ; <nl> - assertx ( adIn - > isShape ( ) ) ; <nl> - a = MixedArray : : ToPHPArrayShape ( adIn , adIn - > cowCheck ( ) ) ; <nl> - if ( a ! = adIn ) tvDecRefArr ( tv ) ; <nl> - continue ; <nl> - } <nl> - <nl> case KindOfPersistentArray : { <nl> auto * adIn = val ( tv ) . parr ; <nl> assertx ( adIn - > isPHPArray ( ) ) ; <nl> enable_if_lval_t < T , void > tvCastToVecInPlace ( T tv ) { <nl> continue ; <nl> } <nl> <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : { <nl> - auto const adIn = val ( tv ) . parr ; <nl> - assertx ( adIn - > isShape ( ) ) ; <nl> - a = MixedArray : : ToVecShape ( adIn , adIn - > cowCheck ( ) ) ; <nl> - assertx ( a ! = adIn ) ; <nl> - decRefArr ( adIn ) ; <nl> - continue ; <nl> - } <nl> - <nl> case KindOfPersistentArray : <nl> case KindOfArray : { <nl> auto * adIn = val ( tv ) . parr ; <nl> enable_if_lval_t < T , void > tvCastToDictInPlace ( T tv ) { <nl> continue ; <nl> } <nl> <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : { <nl> - auto const adIn = val ( tv ) . parr ; <nl> - assertx ( adIn - > isShape ( ) ) ; <nl> - a = MixedArray : : ToDictShape ( adIn , adIn - > cowCheck ( ) ) ; <nl> - if ( a ! = adIn ) decRefArr ( adIn ) ; <nl> - continue ; <nl> - } <nl> - <nl> case KindOfPersistentArray : <nl> case KindOfArray : { <nl> auto * adIn = val ( tv ) . parr ; <nl> enable_if_lval_t < T , void > tvCastToKeysetInPlace ( T tv ) { <nl> continue ; <nl> } <nl> <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : { <nl> - auto const adIn = val ( tv ) . parr ; <nl> - assertx ( adIn - > isShape ( ) ) ; <nl> - a = MixedArray : : ToKeysetShape ( adIn , adIn - > cowCheck ( ) ) ; <nl> - if ( a ! = adIn ) decRefArr ( adIn ) ; <nl> - continue ; <nl> - } <nl> - <nl> case KindOfPersistentArray : <nl> case KindOfArray : { <nl> auto * adIn = val ( tv ) . parr ; <nl> enable_if_lval_t < T , void > tvCastToVArrayInPlace ( T tv ) { <nl> continue ; <nl> } <nl> <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : { <nl> - auto const adIn = val ( tv ) . parr ; <nl> - assertx ( adIn - > isShape ( ) ) ; <nl> - a = MixedArray : : ToVArrayShape ( adIn , adIn - > cowCheck ( ) ) ; <nl> - assertx ( a ! = adIn ) ; <nl> - decRefArr ( adIn ) ; <nl> - continue ; <nl> - } <nl> - <nl> case KindOfPersistentArray : <nl> case KindOfArray : { <nl> auto * adIn = val ( tv ) . parr ; <nl> enable_if_lval_t < T , void > tvCastToDArrayInPlace ( T tv ) { <nl> continue ; <nl> } <nl> <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : { <nl> - auto const adIn = val ( tv ) . parr ; <nl> - assertx ( adIn - > isShape ( ) ) ; <nl> - a = MixedArray : : ToDArrayShape ( adIn , adIn - > cowCheck ( ) ) ; <nl> - if ( a ! = adIn ) decRefArr ( adIn ) ; <nl> - continue ; <nl> - } <nl> - <nl> case KindOfPersistentArray : <nl> case KindOfArray : { <nl> auto * adIn = val ( tv ) . parr ; <nl> ObjectData * tvCastToObjectData ( TypedValue tv ) { <nl> case KindOfVec : <nl> case KindOfPersistentDict : <nl> case KindOfDict : <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : { <nl> auto const arr = Array : : attach ( tv . m_data . parr - > toPHPArray ( true ) ) ; <nl> enable_if_lval_t < T , void > tvCastToObjectInPlace ( T tv ) { <nl> continue ; <nl> } <nl> <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> - if ( RuntimeOption : : EvalHackArrDVArrs ) { <nl> - tvCastToArrayInPlace ( tv ) ; <nl> - } <nl> - assign ( tv , ObjectData : : FromArray ( val ( tv ) . parr ) ) ; <nl> - return ; <nl> - <nl> case KindOfPersistentVec : <nl> case KindOfVec : <nl> case KindOfPersistentDict : <nl> enable_if_lval_t < T , void > tvCastToResourceInPlace ( T tv ) { <nl> case KindOfVec : <nl> case KindOfDict : <nl> case KindOfKeyset : <nl> - case KindOfShape : <nl> case KindOfArray : <nl> case KindOfObject : <nl> case KindOfRecord : <nl> X ( String ) <nl> X ( Vec ) <nl> X ( Dict ) <nl> X ( Keyset ) <nl> - X ( Shape ) <nl> X ( Object ) <nl> X ( NullableObject ) <nl> X ( Resource ) <nl> mmm a / hphp / runtime / base / tv - conversions . h <nl> ppp b / hphp / runtime / base / tv - conversions . h <nl> Y ( Array ) <nl> X ( Vec ) <nl> X ( Dict ) <nl> X ( Keyset ) <nl> - X ( Shape ) <nl> X ( Object ) <nl> X ( NullableObject ) <nl> X ( Resource ) <nl> mmm a / hphp / runtime / base / tv - helpers . cpp <nl> ppp b / hphp / runtime / base / tv - helpers . cpp <nl> bool cellIsPlausible ( const Cell cell ) { <nl> assertx ( cell . m_data . parr - > isKeyset ( ) ) ; <nl> assertx ( cell . m_data . parr - > isNotDVArray ( ) ) ; <nl> return ; <nl> - case KindOfPersistentShape : <nl> - if ( RuntimeOption : : EvalHackArrDVArrs ) { <nl> - assertPtr ( cell . m_data . parr ) ; <nl> - assertx ( ! cell . m_data . parr - > isRefCounted ( ) ) ; <nl> - assertx ( cell . m_data . parr - > isShape ( ) ) ; <nl> - assertx ( cell . m_data . parr - > isNotDVArray ( ) ) ; <nl> - return ; <nl> - } <nl> - assertPtr ( cell . m_data . parr ) ; <nl> - assertx ( cell . m_data . parr - > kindIsValid ( ) ) ; <nl> - assertx ( ! cell . m_data . parr - > isRefCounted ( ) ) ; <nl> - assertx ( cell . m_data . parr - > isShape ( ) ) ; <nl> - assertx ( cell . m_data . parr - > dvArraySanityCheck ( ) ) ; <nl> - return ; <nl> - case KindOfShape : <nl> - assertPtr ( cell . m_data . parr ) ; <nl> - assertx ( cell . m_data . parr - > checkCountZ ( ) ) ; <nl> - assertx ( cell . m_data . parr - > isShape ( ) ) ; <nl> - if ( RuntimeOption : : EvalHackArrDVArrs ) { <nl> - assertx ( cell . m_data . parr - > isNotDVArray ( ) ) ; <nl> - return ; <nl> - } <nl> - assertx ( cell . m_data . parr - > kindIsValid ( ) ) ; <nl> - assertx ( cell . m_data . parr - > dvArraySanityCheck ( ) ) ; <nl> - return ; <nl> case KindOfPersistentArray : <nl> assertPtr ( cell . m_data . parr ) ; <nl> assertx ( cell . m_data . parr - > kindIsValid ( ) ) ; <nl> mmm a / hphp / runtime / base / tv - refcount . h <nl> ppp b / hphp / runtime / base / tv - refcount . h <nl> enable_if_lval_t < T , void > tvDecRefArr ( T tv ) { <nl> assertx ( type ( tv ) = = KindOfArray | | <nl> type ( tv ) = = KindOfVec | | <nl> type ( tv ) = = KindOfDict | | <nl> - type ( tv ) = = KindOfShape | | <nl> type ( tv ) = = KindOfKeyset ) ; <nl> decRefArr ( val ( tv ) . parr ) ; <nl> } <nl> mmm a / hphp / runtime / base / tv - type . h <nl> ppp b / hphp / runtime / base / tv - type . h <nl> CASE ( Int ) <nl> CASE ( Double ) <nl> CASE ( String ) <nl> CASE ( Array ) <nl> - CASE ( Shape ) <nl> - CASE ( ArrayOrShape ) <nl> - CASE ( DictOrShape ) <nl> CASE ( ArrayLike ) <nl> CASE ( HackArray ) <nl> CASE ( Vec ) <nl> mmm a / hphp / runtime / base / type - array . h <nl> ppp b / hphp / runtime / base / type - array . h <nl> struct Array { <nl> return Array ( ArrayData : : CreateDict ( ) , NoIncRef { } ) ; <nl> } <nl> <nl> - static Array CreateShape ( ) { <nl> - return Array ( ArrayData : : CreateShape ( ) , NoIncRef { } ) ; <nl> - } <nl> - <nl> static Array CreateKeyset ( ) { <nl> return Array ( ArrayData : : CreateKeyset ( ) , NoIncRef { } ) ; <nl> } <nl> struct Array { <nl> Array copy ( ) const { COPY_BODY ( copy ( ) , Array { } ) } <nl> Array toVec ( ) const { COPY_BODY ( toVec ( true ) , CreateVec ( ) ) } <nl> Array toDict ( ) const { COPY_BODY ( toDict ( true ) , CreateDict ( ) ) } <nl> - Array toShape ( ) const { COPY_BODY ( toShape ( true ) , CreateShape ( ) ) } <nl> Array toKeyset ( ) const { COPY_BODY ( toKeyset ( true ) , CreateKeyset ( ) ) } <nl> Array toPHPArray ( ) const { COPY_BODY ( toPHPArray ( true ) , Array { } ) } <nl> Array toPHPArrayIntishCast ( ) const { <nl> struct Array { <nl> * / <nl> bool isVecArray ( ) const { return m_arr & & m_arr - > isVecArray ( ) ; } <nl> bool isDict ( ) const { return m_arr & & m_arr - > isDict ( ) ; } <nl> - bool isShape ( ) const { return m_arr & & m_arr - > isShape ( ) ; } <nl> bool isKeyset ( ) const { return m_arr & & m_arr - > isKeyset ( ) ; } <nl> bool isHackArray ( ) const { return m_arr & & m_arr - > isHackArray ( ) ; } <nl> bool isPHPArray ( ) const { return ! m_arr | | m_arr - > isPHPArray ( ) ; } <nl> struct Array { <nl> bool isDArray ( ) const { return m_arr & & m_arr - > isDArray ( ) ; } <nl> bool isVecOrVArray ( ) const { return m_arr & & m_arr - > isVecOrVArray ( ) ; } <nl> bool isDictOrDArray ( ) const { return m_arr & & m_arr - > isDictOrDArray ( ) ; } <nl> - bool isDictOrDArrayOrShape ( ) const { <nl> - return m_arr & & m_arr - > isDictOrDArrayOrShape ( ) ; <nl> - } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> <nl> mmm a / hphp / runtime / base / type - string . cpp <nl> ppp b / hphp / runtime / base / type - string . cpp <nl> StaticString getDataTypeString ( DataType t ) { <nl> case KindOfDict : return s_dict ; <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : return s_keyset ; <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> - return RuntimeOption : : EvalHackArrDVArrs ? s_dict : s_array ; <nl> case KindOfPersistentArray : <nl> case KindOfArray : return s_array ; <nl> case KindOfObject : return s_object ; <nl> mmm a / hphp / runtime / base / type - structure - helpers . cpp <nl> ppp b / hphp / runtime / base / type - structure - helpers . cpp <nl> bool cellInstanceOfImpl ( const Cell * tv , F lookupClass ) { <nl> return cls & & interface_supports_keyset ( cls - > name ( ) ) ; <nl> } <nl> <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : { <nl> - auto const cls = lookupClass ( ) ; <nl> - if ( RuntimeOption : : EvalHackArrDVArrs ) { <nl> - return cls & & interface_supports_dict ( cls - > name ( ) ) ; <nl> - } <nl> - return cls & & interface_supports_array ( cls - > name ( ) ) ; <nl> - } <nl> - <nl> case KindOfPersistentArray : <nl> case KindOfArray : { <nl> auto const cls = lookupClass ( ) ; <nl> bool checkTypeStructureMatchesCellImpl ( <nl> raise_hackarr_compat_notice ( Strings : : HACKARR_COMPAT_DARR_IS_DICT ) ; <nl> } <nl> } <nl> - result = isDictOrShapeType ( type ) ; <nl> + result = isDictType ( type ) ; <nl> if ( result & & UNLIKELY ( RuntimeOption : : EvalLogArrayProvenance ) ) { <nl> raise_array_serialization_notice ( " is_dict " , data . parr ) ; <nl> } <nl> bool checkTypeStructureMatchesCellImpl ( <nl> } <nl> break ; <nl> } <nl> - result = isVecType ( type ) | | isDictOrShapeType ( type ) ; <nl> + result = isVecType ( type ) | | isDictType ( type ) ; <nl> if ( result & & UNLIKELY ( RuntimeOption : : EvalLogArrayProvenance ) ) { <nl> raise_array_serialization_notice ( isVecType ( type ) ? " is_vec " : " is_dict " , <nl> data . parr ) ; <nl> bool checkTypeStructureMatchesCellImpl ( <nl> break ; <nl> } <nl> result = isArrayType ( type ) | | isVecType ( type ) | | <nl> - isDictType ( type ) | | isShapeType ( type ) | | isKeysetType ( type ) ; <nl> + isDictType ( type ) | | isKeysetType ( type ) ; <nl> break ; <nl> case TypeStructure : : Kind : : T_enum : { <nl> assertx ( ts . exists ( s_classname ) ) ; <nl> mmm a / hphp / runtime / base / type - variant . cpp <nl> ppp b / hphp / runtime / base / type - variant . cpp <nl> void objReleaseWrapper ( ObjectData * obj ) noexcept { <nl> } <nl> <nl> static_assert ( typeToDestrIdx ( KindOfArray ) = = 0 , " Array destruct index " ) ; <nl> - static_assert ( typeToDestrIdx ( KindOfShape ) = = 1 , " Shape destruct index " ) ; <nl> - static_assert ( typeToDestrIdx ( KindOfKeyset ) = = 2 , " Keyset destruct index " ) ; <nl> - static_assert ( typeToDestrIdx ( KindOfDict ) = = 3 , " Dict destruct index " ) ; <nl> - static_assert ( typeToDestrIdx ( KindOfVec ) = = 4 , " Vec destruct index " ) ; <nl> - static_assert ( typeToDestrIdx ( KindOfRecord ) = = 5 , " Record destruct index " ) ; <nl> - static_assert ( typeToDestrIdx ( KindOfString ) = = 6 , " String destruct index " ) ; <nl> - static_assert ( typeToDestrIdx ( KindOfObject ) = = 8 , " Object destruct index " ) ; <nl> - static_assert ( typeToDestrIdx ( KindOfResource ) = = 9 , " Resource destruct index " ) ; <nl> - static_assert ( typeToDestrIdx ( KindOfRef ) = = 10 , " Ref destruct index " ) ; <nl> + static_assert ( typeToDestrIdx ( KindOfKeyset ) = = 1 , " Keyset destruct index " ) ; <nl> + static_assert ( typeToDestrIdx ( KindOfDict ) = = 2 , " Dict destruct index " ) ; <nl> + static_assert ( typeToDestrIdx ( KindOfVec ) = = 3 , " Vec destruct index " ) ; <nl> + static_assert ( typeToDestrIdx ( KindOfRecord ) = = 4 , " Record destruct index " ) ; <nl> + static_assert ( typeToDestrIdx ( KindOfString ) = = 5 , " String destruct index " ) ; <nl> + static_assert ( typeToDestrIdx ( KindOfObject ) = = 7 , " Object destruct index " ) ; <nl> + static_assert ( typeToDestrIdx ( KindOfResource ) = = 8 , " Resource destruct index " ) ; <nl> + static_assert ( typeToDestrIdx ( KindOfRef ) = = 9 , " Ref destruct index " ) ; <nl> # ifndef USE_LOWPTR <nl> - static_assert ( typeToDestrIdx ( KindOfClsMeth ) = = 11 , " ClsMeth destruct index " ) ; <nl> + static_assert ( typeToDestrIdx ( KindOfClsMeth ) = = 10 , " ClsMeth destruct index " ) ; <nl> # endif <nl> <nl> - static_assert ( kDestrTableSize = = ( use_lowptr ? 11 : 12 ) , <nl> + static_assert ( kDestrTableSize = = ( use_lowptr ? 10 : 11 ) , <nl> " size of g_destructors [ ] must be kDestrTableSize " ) ; <nl> <nl> RawDestructor g_destructors [ ] = { <nl> ( RawDestructor ) getMethodPtr ( & ArrayData : : release ) , / / KindOfArray <nl> - ( RawDestructor ) & MixedArray : : Release , / / KindOfShape <nl> ( RawDestructor ) & SetArray : : Release , / / KindOfKeyset <nl> ( RawDestructor ) & MixedArray : : Release , / / KindOfDict <nl> ( RawDestructor ) & PackedArray : : Release , / / KindOfVec <nl> DataType Variant : : toNumeric ( int64_t & ival , double & dval , <nl> case KindOfDict : <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> case KindOfObject : <nl> bool Variant : : isScalar ( ) const noexcept { <nl> case KindOfDict : <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> case KindOfObject : <nl> static bool isAllowedAsConstantValueImpl ( TypedValue tv ) { <nl> case KindOfPersistentDict : <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : <nl> - case KindOfPersistentShape : <nl> case KindOfPersistentArray : <nl> case KindOfResource : <nl> return true ; <nl> <nl> case KindOfVec : <nl> case KindOfDict : <nl> - case KindOfShape : <nl> case KindOfArray : { <nl> if ( tv . m_data . parr - > isGlobalsArray ( ) ) return false ; <nl> <nl> bool Variant : : toBooleanHelper ( ) const { <nl> case KindOfDict : <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> case KindOfPersistentArray : <nl> case KindOfArray : return ! m_data . parr - > empty ( ) ; <nl> case KindOfObject : return m_data . pobj - > toBoolean ( ) ; <nl> int64_t Variant : : toInt64Helper ( int base / * = 10 * / ) const { <nl> case KindOfDict : <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> case KindOfPersistentArray : <nl> case KindOfArray : return m_data . parr - > empty ( ) ? 0 : 1 ; <nl> case KindOfObject : return m_data . pobj - > toInt64 ( ) ; <nl> double Variant : : toDoubleHelper ( ) const { <nl> case KindOfDict : <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> case KindOfPersistentArray : <nl> case KindOfArray : return ( double ) toInt64 ( ) ; <nl> case KindOfObject : return m_data . pobj - > toDouble ( ) ; <nl> Array Variant : : toPHPArrayHelper ( ) const { <nl> case KindOfVec : <nl> case KindOfPersistentDict : <nl> case KindOfDict : <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : return ArrNR { m_data . parr } . asArray ( ) . toPHPArray ( ) ; <nl> case KindOfPersistentArray : <nl> Resource Variant : : toResourceHelper ( ) const { <nl> case KindOfDict : <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> case KindOfObject : <nl> void Variant : : setEvalScalar ( ) { <nl> do_array ( ) ; <nl> return ; <nl> <nl> - case KindOfShape : <nl> - m_type = KindOfPersistentShape ; <nl> - case KindOfPersistentShape : <nl> - do_array ( ) ; <nl> - return ; <nl> - <nl> case KindOfArray : <nl> m_type = KindOfPersistentArray ; <nl> case KindOfPersistentArray : <nl> mmm a / hphp / runtime / base / type - variant . h <nl> ppp b / hphp / runtime / base / type - variant . h <nl> struct base { <nl> bool isString ( ) const { return isStringType ( getType ( ) ) ; } <nl> bool isArray ( ) const { return isArrayLikeType ( getType ( ) ) ; } <nl> bool isPHPArray ( ) const { return isArrayType ( getType ( ) ) ; } <nl> - bool isPHPArrayOrShape ( ) const { return isArrayOrShapeType ( getType ( ) ) ; } <nl> bool isVecArray ( ) const { return isVecType ( getType ( ) ) ; } <nl> bool isDict ( ) const { return isDictType ( getType ( ) ) ; } <nl> - bool isDictOrShape ( ) const { return isDictOrShapeType ( getType ( ) ) ; } <nl> bool isKeyset ( ) const { return isKeysetType ( getType ( ) ) ; } <nl> bool isHackArray ( ) const { return isHackArrayType ( getType ( ) ) ; } <nl> bool isObject ( ) const { return isObjectType ( getType ( ) ) ; } <nl> struct Variant : private TypedValue { <nl> bool isPHPArray ( ) const { <nl> return isArrayType ( getType ( ) ) ; <nl> } <nl> - bool isPHPArrayOrShape ( ) const { <nl> - return isArrayOrShapeType ( getType ( ) ) ; <nl> - } <nl> bool isVecArray ( ) const { <nl> return isVecType ( getType ( ) ) ; <nl> } <nl> bool isDict ( ) const { <nl> return isDictType ( getType ( ) ) ; <nl> } <nl> - bool isDictOrShape ( ) const { <nl> - return isDictOrShapeType ( getType ( ) ) ; <nl> - } <nl> - bool isShape ( ) const { <nl> - return isShapeType ( getType ( ) ) ; <nl> - } <nl> bool isKeyset ( ) const { <nl> return isKeysetType ( getType ( ) ) ; <nl> } <nl> struct Variant : private TypedValue { <nl> case KindOfDict : <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> case KindOfFunc : <nl> struct VarNR : private TypedValueAux { <nl> case KindOfVec : <nl> case KindOfDict : <nl> case KindOfKeyset : <nl> - case KindOfShape : <nl> case KindOfArray : <nl> assertx ( m_data . parr - > checkCount ( ) ) ; <nl> return ; <nl> mmm a / hphp / runtime / base / typed - value . h <nl> ppp b / hphp / runtime / base / typed - value . h <nl> union Value { <nl> int64_t num ; / / KindOfInt64 , KindOfBool ( must be zero - extended ) <nl> double dbl ; / / KindOfDouble <nl> StringData * pstr ; / / KindOfString , KindOfPersistentString <nl> - ArrayData * parr ; / / KindOfArray , KindOfVec , KindOfDict , KindOfShape , KindOfKeyset <nl> + ArrayData * parr ; / / KindOfArray , KindOfVec , KindOfDict , KindOfKeyset <nl> ObjectData * pobj ; / / KindOfObject <nl> ResourceHdr * pres ; / / KindOfResource <nl> RefData * pref ; / / KindOfRef <nl> X ( KindOfNull , void ) ; <nl> X ( KindOfBoolean , bool ) ; <nl> X ( KindOfInt64 , int64_t ) ; <nl> X ( KindOfDouble , double ) ; <nl> - X ( KindOfShape , ArrayData * ) ; <nl> - X ( KindOfPersistentShape , const ArrayData * ) ; <nl> X ( KindOfArray , ArrayData * ) ; <nl> X ( KindOfPersistentArray , const ArrayData * ) ; <nl> X ( KindOfVec , ArrayData * ) ; <nl> mmm a / hphp / runtime / base / variable - serializer . cpp <nl> ppp b / hphp / runtime / base / variable - serializer . cpp <nl> VariableSerializer : : getKind ( const ArrayData * arr ) const { <nl> ( arr - > isLegacyArray ( ) & & getType ( ) = = Type : : Serialize ) ) ) { <nl> return VariableSerializer : : ArrayKind : : PHP ; <nl> } <nl> - if ( arr - > isShape ( ) & & getType ( ) = = Type : : Internal ) { <nl> - return VariableSerializer : : ArrayKind : : Shape ; <nl> - } <nl> - if ( arr - > isDictOrShape ( ) ) return VariableSerializer : : ArrayKind : : Dict ; <nl> - if ( arr - > isVecArray ( ) ) return VariableSerializer : : ArrayKind : : Vec ; <nl> - if ( arr - > isKeyset ( ) ) return VariableSerializer : : ArrayKind : : Keyset ; <nl> + if ( arr - > isDict ( ) ) return VariableSerializer : : ArrayKind : : Dict ; <nl> + if ( arr - > isVecArray ( ) ) return VariableSerializer : : ArrayKind : : Vec ; <nl> + if ( arr - > isKeyset ( ) ) return VariableSerializer : : ArrayKind : : Keyset ; <nl> assertx ( arr - > isPHPArray ( ) ) ; <nl> if ( m_keepDVArrays ) { <nl> if ( arr - > isVArray ( ) ) return VariableSerializer : : ArrayKind : : VArray ; <nl> void VariableSerializer : : writeArrayHeader ( int size , bool isVectorData , <nl> case ArrayKind : : Dict : <nl> m_buf - > append ( " Dict \ n " ) ; <nl> break ; <nl> - case ArrayKind : : Shape : <nl> - always_assert_flog ( false , " Shapes should not be serialized externally " ) ; <nl> case ArrayKind : : Vec : <nl> m_buf - > append ( " Vec \ n " ) ; <nl> break ; <nl> void VariableSerializer : : writeArrayHeader ( int size , bool isVectorData , <nl> m_buf - > append ( " NULL " ) ; <nl> } else { <nl> switch ( kind ) { <nl> - case ArrayKind : : Shape : <nl> - always_assert_flog ( false , " Shapes should not be serialized externally " ) ; <nl> case ArrayKind : : Dict : <nl> if ( m_type = = Type : : PHPOutput & & m_dvOverrides ) { <nl> m_buf - > append ( <nl> void VariableSerializer : : writeArrayHeader ( int size , bool isVectorData , <nl> case ArrayKind : : Dict : <nl> m_buf - > append ( " dict " ) ; <nl> break ; <nl> - case ArrayKind : : Shape : <nl> - always_assert_flog ( false , " Shapes should not be serialized externally " ) ; <nl> case ArrayKind : : Vec : <nl> m_buf - > append ( " vec " ) ; <nl> break ; <nl> void VariableSerializer : : writeArrayHeader ( int size , bool isVectorData , <nl> case ArrayKind : : Dict : <nl> m_buf - > append ( " D : " ) ; <nl> break ; <nl> - case ArrayKind : : Shape : <nl> - if ( m_type = = Type : : Internal ) { <nl> - m_buf - > append ( " H : " ) ; <nl> - } else { <nl> - always_assert_flog ( false , <nl> - " Shapes should not be serialized externally " ) ; <nl> - } <nl> - break ; <nl> case ArrayKind : : Vec : <nl> m_buf - > append ( " v : " ) ; <nl> break ; <nl> void VariableSerializer : : writeArrayFooter ( <nl> } <nl> } else if ( m_rsrcName . empty ( ) ) { / / for rsrc , only write NULL in arrayHeader <nl> switch ( kind ) { <nl> - case ArrayKind : : Shape : <nl> - always_assert_flog ( false , " Shapes should not be serialized externally " ) ; <nl> case ArrayKind : : Dict : <nl> case ArrayKind : : Vec : <nl> case ArrayKind : : Keyset : <nl> void VariableSerializer : : serializeVariant ( tv_rval tv , <nl> serializeArray ( val ( tv ) . parr , skipNestCheck ) ; <nl> return ; <nl> <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> - assertx ( ! isArrayKey ) ; <nl> - assertx ( val ( tv ) . parr - > isShape ( ) ) ; <nl> - serializeArray ( val ( tv ) . parr , skipNestCheck ) ; <nl> - return ; <nl> - <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> assertx ( ! isArrayKey ) ; <nl> void VariableSerializer : : serializeArray ( const ArrayData * arr , <nl> raise_hack_arr_compat_serialize_notice ( arr ) ; <nl> m_hasHackWarned = true ; <nl> } <nl> - if ( UNLIKELY ( m_dictWarn & & ! m_hasDictWarned & & arr - > isDictOrShape ( ) ) ) { <nl> + if ( UNLIKELY ( m_dictWarn & & ! m_hasDictWarned & & arr - > isDict ( ) ) ) { <nl> raise_hack_arr_compat_serialize_notice ( arr ) ; <nl> m_hasDictWarned = true ; <nl> } <nl> mmm a / hphp / runtime / base / variable - serializer . h <nl> ppp b / hphp / runtime / base / variable - serializer . h <nl> struct VariableSerializer { <nl> / / ignore uninitialized late init props and do not attempt to serialize them <nl> void setIgnoreLateInit ( ) { m_ignoreLateInit = true ; } <nl> <nl> - enum class ArrayKind { PHP , Dict , Shape , Vec , Keyset , VArray , DArray } ; <nl> + enum class ArrayKind { PHP , Dict , Vec , Keyset , VArray , DArray } ; <nl> <nl> / / One entry for each vec or dict in the value being serialized ( in a <nl> / / pre - order walk ) . If the bool is true , and mode is PHPOutput , the vec or <nl> mmm a / hphp / runtime / base / variable - unserializer . cpp <nl> ppp b / hphp / runtime / base / variable - unserializer . cpp <nl> VariableUnserializer : : RefInfo : : makeDictValue ( tv_lval v ) { <nl> return RefInfo { v , Type : : DictValue } ; <nl> } <nl> <nl> - VariableUnserializer : : RefInfo <nl> - VariableUnserializer : : RefInfo : : makeShapeValue ( tv_lval v ) { <nl> - return RefInfo { v , Type : : ShapeValue } ; <nl> - } <nl> - <nl> tv_lval VariableUnserializer : : RefInfo : : var ( ) const { <nl> return m_data . drop_tag ( ) ; <nl> } <nl> void VariableUnserializer : : add ( tv_lval v , UnserializeMode mode ) { <nl> m_refs . emplace_back ( RefInfo : : makeDictValue ( v ) ) ; <nl> } else if ( mode = = UnserializeMode : : ColValue ) { <nl> m_refs . emplace_back ( RefInfo : : makeColValue ( v ) ) ; <nl> - } else if ( mode = = UnserializeMode : : ShapeValue ) { <nl> - m_refs . emplace_back ( RefInfo : : makeShapeValue ( v ) ) ; <nl> } else { <nl> assertx ( mode = = UnserializeMode : : ColKey ) ; <nl> / / We don ' t currently support using the ' R ' encoding to refer to collection <nl> void VariableUnserializer : : unserializeVariant ( <nl> throwUnknownType ( type ) ; <nl> } <nl> break ; <nl> - case ' H ' : / / Shape <nl> - { <nl> - check_recursion_throw ( ) ; <nl> - auto a = unserializeShape ( ) ; <nl> - tvMove ( make_array_like_tv ( a . detach ( ) ) , self ) ; <nl> - } <nl> - return ; / / Shape has ' } ' terminating <nl> case ' a ' : / / PHP array <nl> case ' D ' : / / Dict <nl> { <nl> Array VariableUnserializer : : unserializeDArray ( ) { <nl> return arr ; <nl> } <nl> <nl> - Array VariableUnserializer : : unserializeShape ( ) { <nl> - / / Shapes need to behave like DArrays externally in the serializer . Calling <nl> - / / unserializeDict here produces incompatible behaviour with getDefaultValueText ( ) <nl> - auto arr = unserializeDArray ( ) ; <nl> - arr = arr - > toShapeInPlaceIfCompatible ( ) ; <nl> - return arr ; <nl> - } <nl> - <nl> Array VariableUnserializer : : unserializeKeyset ( ) { <nl> int64_t size = readInt ( ) ; <nl> expectChar ( ' : ' ) ; <nl> mmm a / hphp / runtime / base / variable - unserializer . h <nl> ppp b / hphp / runtime / base / variable - unserializer . h <nl> enum class UnserializeMode { <nl> ColKey = 3 , <nl> VecValue = 4 , <nl> DictValue = 5 , <nl> - ShapeValue = 6 , <nl> } ; <nl> <nl> struct InvalidAllowedClassesException : Exception { <nl> struct VariableUnserializer { <nl> static RefInfo makeColValue ( tv_lval v ) ; <nl> static RefInfo makeVecValue ( tv_lval v ) ; <nl> static RefInfo makeDictValue ( tv_lval v ) ; <nl> - static RefInfo makeShapeValue ( tv_lval v ) ; <nl> <nl> tv_lval var ( ) const ; <nl> <nl> struct VariableUnserializer { <nl> ColValue , <nl> VecValue , <nl> DictValue , <nl> - ShapeValue <nl> } ; <nl> RefInfo ( tv_lval , Type ) ; <nl> / / tv_lval with a Type tag . <nl> struct VariableUnserializer { <nl> Array unserializeKeyset ( ) ; <nl> Array unserializeVArray ( ) ; <nl> Array unserializeDArray ( ) ; <nl> - Array unserializeShape ( ) ; <nl> folly : : StringPiece unserializeStringPiece ( char delimiter0 = ' " ' , <nl> char delimiter1 = ' " ' ) ; <nl> String unserializeString ( char delimiter0 = ' " ' , char delimiter1 = ' " ' ) ; <nl> mmm a / hphp / runtime / ext / array / ext_array . cpp <nl> ppp b / hphp / runtime / ext / array / ext_array . cpp <nl> bool HHVM_FUNCTION ( array_key_exists , <nl> case KindOfDict : <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> case KindOfObject : <nl> TypedValue HHVM_FUNCTION ( array_product , <nl> case KindOfDict : <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> case KindOfObject : <nl> TypedValue HHVM_FUNCTION ( array_product , <nl> case KindOfVec : <nl> case KindOfDict : <nl> case KindOfKeyset : <nl> - case KindOfShape : <nl> case KindOfArray : <nl> case KindOfClsMeth : <nl> case KindOfObject : <nl> TypedValue HHVM_FUNCTION ( array_sum , <nl> case KindOfDict : <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> case KindOfObject : <nl> TypedValue HHVM_FUNCTION ( array_sum , <nl> case KindOfVec : <nl> case KindOfDict : <nl> case KindOfKeyset : <nl> - case KindOfShape : <nl> case KindOfArray : <nl> case KindOfClsMeth : <nl> case KindOfObject : <nl> int64_t HHVM_FUNCTION ( count , <nl> case KindOfDict : <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> if ( ( CountMode ) mode = = CountMode : : RECURSIVE ) { <nl> TypedValue HHVM_FUNCTION ( HH_array_key_cast , const Variant & input ) { <nl> SystemLib : : throwInvalidArgumentExceptionObject ( <nl> " Keysets cannot be cast to an array - key " <nl> ) ; <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> - if ( RuntimeOption : : EvalHackArrDVArrs ) { <nl> - SystemLib : : throwInvalidArgumentExceptionObject ( <nl> - " Dicts cannot be cast to an array - key " <nl> - ) ; <nl> - } else { <nl> - SystemLib : : throwInvalidArgumentExceptionObject ( <nl> - " Arrays cannot be cast to an array - key " <nl> - ) ; <nl> - } <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> SystemLib : : throwInvalidArgumentExceptionObject ( <nl> mmm a / hphp / runtime / ext / asio / ext_await - all - wait - handle . cpp <nl> ppp b / hphp / runtime / ext / asio / ext_await - all - wait - handle . cpp <nl> Object HHVM_STATIC_METHOD ( AwaitAllWaitHandle , fromArray , <nl> PackedArray : : IterateV ( ad , fn ) ; <nl> } ) ; <nl> <nl> - case ArrayData : : kShapeKind : <nl> - if ( RuntimeOption : : EvalHackArrDVArrs ) { <nl> - not_reached ( ) ; <nl> - } <nl> - / / Fallthrough <nl> - <nl> case ArrayData : : kMixedKind : <nl> return c_AwaitAllWaitHandle : : Create < true > ( [ = ] ( auto fn ) { <nl> MixedArray : : IterateV ( MixedArray : : asMixed ( ad ) , fn ) ; <nl> Object HHVM_STATIC_METHOD ( AwaitAllWaitHandle , fromContainer , <nl> case KindOfPersistentDict : <nl> case KindOfDict : <nl> return c_AwaitAllWaitHandle_ns_fromDict ( self_ , dependencies . asCArrRef ( ) ) ; <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> return c_AwaitAllWaitHandle_ns_fromArray ( self_ , dependencies . asCArrRef ( ) ) ; <nl> mmm a / hphp / runtime / ext / fb / VariantController . h <nl> ppp b / hphp / runtime / ext / fb / VariantController . h <nl> struct VariantControllerImpl { <nl> case KindOfPersistentString : <nl> case KindOfString : return HPHP : : serialize : : Type : : STRING ; <nl> case KindOfObject : return HPHP : : serialize : : Type : : OBJECT ; <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : { / / TODO ( T31134050 ) <nl> - if ( RuntimeOption : : EvalHackArrDVArrs & & <nl> - HackArraysMode = = VariantControllerHackArraysMode : : OFF ) { <nl> - throw HPHP : : serialize : : HackArraySerializeError { } ; <nl> - } <nl> - return HPHP : : serialize : : Type : : MAP ; <nl> - } <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> if ( HackArraysMode = = VariantControllerHackArraysMode : : MIGRATORY ) { <nl> mmm a / hphp / runtime / ext / fb / ext_fb . cpp <nl> ppp b / hphp / runtime / ext / fb / ext_fb . cpp <nl> static int fb_compact_serialize_variant ( <nl> return 0 ; <nl> } <nl> <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : { / / TODO ( T31134050 ) <nl> - Array arr = var . toArray ( ) ; <nl> - assertx ( arr - > isDictOrDArray ( ) ) ; <nl> - fb_compact_serialize_array_as_map ( sb , std : : move ( arr ) , depth ) ; <nl> - return 0 ; <nl> - } <nl> - <nl> case KindOfPersistentArray : <nl> case KindOfArray : { <nl> Array arr = var . toArray ( ) ; <nl> mmm a / hphp / runtime / ext / gmp / ext_gmp . cpp <nl> ppp b / hphp / runtime / ext / gmp / ext_gmp . cpp <nl> static bool variantToGMPData ( const char * const fnCaller , <nl> case KindOfDict : <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> case KindOfRef : <nl> mmm a / hphp / runtime / ext / hh / ext_hh . cpp <nl> ppp b / hphp / runtime / ext / hh / ext_hh . cpp <nl> void serialize_memoize_tv ( StringBuffer & sb , int depth , TypedValue tv ) { <nl> case KindOfDict : <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> serialize_memoize_array ( sb , depth , tv . m_data . parr ) ; <nl> mmm a / hphp / runtime / ext / json / JSON_parser . cpp <nl> ppp b / hphp / runtime / ext / json / JSON_parser . cpp <nl> static void json_create_zval ( Variant & z , UncheckedBuffer & buf , DataType type , <nl> case KindOfUninit : <nl> case KindOfNull : <nl> case KindOfPersistentString : <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> case KindOfPersistentVec : <nl> mmm a / hphp / runtime / ext / memcache / ext_memcache . cpp <nl> ppp b / hphp / runtime / ext / memcache / ext_memcache . cpp <nl> static uint32_t memcache_get_flag_for_type ( const Variant & var ) { <nl> case KindOfDict : <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> case KindOfObject : <nl> mmm a / hphp / runtime / ext / objprof / ext_heapgraph . cpp <nl> ppp b / hphp / runtime / ext / objprof / ext_heapgraph . cpp <nl> CapturedPtr getEdgeInfo ( const HeapGraph & g , int ptr ) { <nl> / / Known generalized cases that don ' t really need pointer kind <nl> case HeaderKind : : Mixed : <nl> case HeaderKind : : Dict : <nl> - case HeaderKind : : Shape : <nl> case HeaderKind : : Keyset : { <nl> if ( edge . offset > = sizeof ( MixedArray ) ) { <nl> using Elm = MixedArray : : Elm ; <nl> mmm a / hphp / runtime / ext / objprof / ext_objprof . cpp <nl> ppp b / hphp / runtime / ext / objprof / ext_objprof . cpp <nl> std : : pair < int , double > sizeOfArray ( <nl> case KindOfBoolean : <nl> case KindOfPersistentDict : <nl> case KindOfDouble : <nl> - case KindOfPersistentShape : <nl> case KindOfPersistentArray : <nl> case KindOfPersistentKeyset : <nl> case KindOfObject : <nl> std : : pair < int , double > sizeOfArray ( <nl> case KindOfVec : <nl> case KindOfDict : <nl> case KindOfRef : <nl> - case KindOfShape : <nl> case KindOfArray : <nl> case KindOfKeyset : <nl> case KindOfFunc : <nl> void stringsOfArray ( <nl> case KindOfBoolean : <nl> case KindOfPersistentDict : <nl> case KindOfDouble : <nl> - case KindOfPersistentShape : <nl> case KindOfPersistentArray : <nl> case KindOfPersistentKeyset : <nl> case KindOfObject : <nl> void stringsOfArray ( <nl> case KindOfVec : <nl> case KindOfDict : <nl> case KindOfRef : <nl> - case KindOfShape : <nl> case KindOfArray : <nl> case KindOfKeyset : <nl> case KindOfFunc : <nl> std : : pair < int , double > tvGetSize ( <nl> case KindOfDict : <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> case KindOfPersistentArray : <nl> case KindOfArray : { <nl> ArrayData * arr = tv . m_data . parr ; <nl> void tvGetStrings ( <nl> case HPHP : : KindOfDict : <nl> case HPHP : : KindOfPersistentKeyset : <nl> case HPHP : : KindOfKeyset : <nl> - case HPHP : : KindOfPersistentShape : <nl> - case HPHP : : KindOfShape : <nl> case HPHP : : KindOfPersistentArray : <nl> case HPHP : : KindOfArray : { <nl> auto * arr = tv . m_data . parr ; <nl> mmm a / hphp / runtime / ext / pdo / ext_pdo . cpp <nl> ppp b / hphp / runtime / ext / pdo / ext_pdo . cpp <nl> int pdo_parse_params ( sp_PDOStatement stmt , const String & in , String & out ) { <nl> case KindOfDict : <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> case KindOfObject : <nl> mmm a / hphp / runtime / ext / soap / encoding . h <nl> ppp b / hphp / runtime / ext / soap / encoding . h <nl> inline int dataTypeToSoap ( DataType dt ) { <nl> case KindOfDict : <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> case KindOfClsMeth : return SOAP_ENC_ARRAY_DT ; <nl> mmm a / hphp / runtime / ext / std / ext_std_file . cpp <nl> ppp b / hphp / runtime / ext / std / ext_std_file . cpp <nl> Variant HHVM_FUNCTION ( file_put_contents , <nl> case KindOfDict : <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> case KindOfClsMeth : { <nl> mmm a / hphp / runtime / ext / std / ext_std_math . cpp <nl> ppp b / hphp / runtime / ext / std / ext_std_math . cpp <nl> static MaybeDataType convert_for_pow ( const Variant & val , <nl> case KindOfDict : <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> case KindOfClsMeth : <nl> mmm a / hphp / runtime / ext / std / ext_std_variable . cpp <nl> ppp b / hphp / runtime / ext / std / ext_std_variable . cpp <nl> bool HHVM_FUNCTION ( is_scalar , const Variant & v ) { <nl> <nl> bool HHVM_FUNCTION ( is_array , const Variant & v ) { <nl> if ( UNLIKELY ( RuntimeOption : : EvalHackArrCompatIsArrayNotices ) ) { <nl> - if ( v . isPHPArrayOrShape ( ) ) { <nl> + if ( v . isPHPArray ( ) ) { <nl> return true ; <nl> } else if ( v . isVecArray ( ) ) { <nl> raise_hackarr_compat_notice ( Strings : : HACKARR_COMPAT_VEC_IS_ARR ) ; <nl> - } else if ( v . isDictOrShape ( ) ) { <nl> + } else if ( v . isDict ( ) ) { <nl> raise_hackarr_compat_notice ( Strings : : HACKARR_COMPAT_DICT_IS_ARR ) ; <nl> } else if ( v . isKeyset ( ) ) { <nl> raise_hackarr_compat_notice ( Strings : : HACKARR_COMPAT_KEYSET_IS_ARR ) ; <nl> bool HHVM_FUNCTION ( HH_is_vec , const Variant & v ) { <nl> <nl> bool HHVM_FUNCTION ( HH_is_dict , const Variant & v ) { <nl> if ( UNLIKELY ( RuntimeOption : : EvalHackArrCompatIsVecDictNotices ) ) { <nl> - if ( v . isPHPArrayOrShape ( ) ) { <nl> + if ( v . isPHPArray ( ) ) { <nl> auto const & arr = v . toCArrRef ( ) ; <nl> if ( arr . isDArray ( ) ) { <nl> raise_hackarr_compat_notice ( Strings : : HACKARR_COMPAT_DARR_IS_DICT ) ; <nl> bool HHVM_FUNCTION ( HH_is_darray , const Variant & val ) { <nl> auto const cell = val . asTypedValue ( ) ; <nl> if ( RuntimeOption : : EvalHackArrDVArrs ) return is_dict ( cell ) ; <nl> if ( UNLIKELY ( RuntimeOption : : EvalHackArrCompatIsVecDictNotices ) ) { <nl> - if ( val . isDictOrShape ( ) ) { <nl> + if ( val . isDict ( ) ) { <nl> raise_hackarr_compat_notice ( Strings : : HACKARR_COMPAT_DICT_IS_DARR ) ; <nl> return false ; <nl> } <nl> } <nl> - return tvIsArrayOrShape ( cell ) & & cell - > m_data . parr - > isDArray ( ) ; <nl> + return tvIsArray ( cell ) & & cell - > m_data . parr - > isDArray ( ) ; <nl> } <nl> <nl> bool HHVM_FUNCTION ( HH_is_any_array , const Variant & val ) { <nl> ALWAYS_INLINE String serialize_impl ( const Variant & value , <nl> break ; <nl> } <nl> <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : { / / TODO ( T31134050 ) <nl> - if ( RuntimeOption : : EvalHackArrDVArrs ) { <nl> - ArrayData * arr = value . getArrayData ( ) ; <nl> - assertx ( arr - > isShape ( ) ) ; <nl> - if ( arr - > empty ( ) ) { <nl> - return UNLIKELY ( arr - > isLegacyArray ( ) ) <nl> - ? s_EmptyArray <nl> - : empty_hack ( arr , s_EmptyDictArray ) ; <nl> - } <nl> - break ; <nl> - } <nl> - / / Fallthrough <nl> - } <nl> - <nl> case KindOfPersistentArray : <nl> case KindOfArray : { <nl> ArrayData * arr = value . getArrayData ( ) ; <nl> mmm a / hphp / runtime / ext / string / ext_string . cpp <nl> ppp b / hphp / runtime / ext / string / ext_string . cpp <nl> String HHVM_FUNCTION ( chr , const Variant & ascii ) { <nl> case KindOfPersistentVec : <nl> case KindOfPersistentDict : <nl> case KindOfPersistentKeyset : <nl> - case KindOfPersistentShape : <nl> case KindOfPersistentArray : <nl> case KindOfVec : <nl> case KindOfDict : <nl> case KindOfKeyset : <nl> - case KindOfShape : <nl> case KindOfArray : <nl> case KindOfObject : <nl> case KindOfResource : <nl> mmm a / hphp / runtime / ext / xmlreader / ext_xmlreader . cpp <nl> ppp b / hphp / runtime / ext / xmlreader / ext_xmlreader . cpp <nl> Variant HHVM_METHOD ( XMLReader , __get , <nl> case KindOfNull : <nl> case KindOfDouble : <nl> case KindOfPersistentString : <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> case KindOfArray : <nl> case KindOfPersistentArray : <nl> case KindOfVec : <nl> mmm a / hphp / runtime / test / type . cpp <nl> ppp b / hphp / runtime / test / type . cpp <nl> TEST ( Type , KnownDataType ) { <nl> TPersistentDict , <nl> TStaticDict , <nl> TCountedDict , <nl> - TShape , <nl> - TPersistentShape , <nl> - TCountedShape , <nl> TKeyset , <nl> TPersistentKeyset , <nl> TStaticKeyset , <nl> mmm a / hphp / runtime / vm / bytecode . cpp <nl> ppp b / hphp / runtime / vm / bytecode . cpp <nl> static std : : string toStringElm ( const TypedValue * tv ) { <nl> case KindOfDict : <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> case KindOfObject : <nl> static std : : string toStringElm ( const TypedValue * tv ) { <nl> print_count ( ) ; <nl> os < < " : Keyset " ; <nl> continue ; <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> - assertx ( tv - > m_data . parr - > isShape ( ) ) ; <nl> - assertx ( tv - > m_data . parr - > checkCount ( ) ) ; <nl> - os < < tv - > m_data . parr ; <nl> - print_count ( ) ; <nl> - os < < " : Shape " ; <nl> - continue ; <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> assertx ( tv - > m_data . parr - > isPHPArray ( ) ) ; <nl> void iopSwitch ( PC origpc , PC & pc , SwitchKind kind , int64_t base , <nl> case KindOfDict : <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> case KindOfObject : <nl> void iopSwitch ( PC origpc , PC & pc , SwitchKind kind , int64_t base , <nl> match = SwitchMatch : : DEFAULT ; <nl> return ; <nl> <nl> - case KindOfShape : <nl> - tvDecRefArr ( val ) ; <nl> - case KindOfPersistentShape : <nl> - match = SwitchMatch : : DEFAULT ; <nl> - return ; <nl> - <nl> case KindOfArray : <nl> tvDecRefArr ( val ) ; <nl> case KindOfPersistentArray : <nl> OPTBLD_INLINE static bool isTypeHelper ( Cell * val , IsTypeOp op ) { <nl> ! RuntimeOption : : EvalLogArrayProvenance ) | | <nl> vmfp ( ) - > m_func - > isBuiltin ( ) ) { <nl> return is_array ( val ) ; <nl> - } else if ( isArrayOrShapeType ( val - > m_type ) ) { <nl> + } else if ( isArrayType ( val - > m_type ) ) { <nl> return true ; <nl> } else if ( isVecType ( val - > m_type ) ) { <nl> if ( RuntimeOption : : EvalHackArrCompatIsArrayNotices ) { <nl> OPTBLD_INLINE static bool isTypeHelper ( Cell * val , IsTypeOp op ) { <nl> if ( RuntimeOption : : EvalLogArrayProvenance ) { <nl> raise_array_serialization_notice ( " is_array " , val - > m_data . parr ) ; <nl> } <nl> - } else if ( isDictOrShapeType ( val - > m_type ) ) { <nl> + } else if ( isDictType ( val - > m_type ) ) { <nl> if ( RuntimeOption : : EvalHackArrCompatIsArrayNotices ) { <nl> raise_hackarr_compat_notice ( Strings : : HACKARR_COMPAT_DICT_IS_ARR ) ; <nl> } <nl> OPTBLD_INLINE static bool isTypeHelper ( Cell * val , IsTypeOp op ) { <nl> } <nl> case IsTypeOp : : Dict : { <nl> if ( UNLIKELY ( RuntimeOption : : EvalHackArrCompatIsVecDictNotices ) ) { <nl> - if ( isArrayOrShapeType ( val - > m_type ) ) { <nl> + if ( isArrayType ( val - > m_type ) ) { <nl> if ( val - > m_data . parr - > isDArray ( ) ) { <nl> raise_hackarr_compat_notice ( Strings : : HACKARR_COMPAT_DARR_IS_DICT ) ; <nl> } <nl> mmm a / hphp / runtime / vm / class . cpp <nl> ppp b / hphp / runtime / vm / class . cpp <nl> bool Class : : compatibleTraitPropInit ( const TypedValue & tv1 , <nl> case KindOfDict : <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> case KindOfObject : <nl> mmm a / hphp / runtime / vm / globals - array . h <nl> ppp b / hphp / runtime / vm / globals - array . h <nl> struct GlobalsArray final : ArrayData , <nl> static constexpr auto ToKeyset = & ArrayCommon : : ToKeyset ; <nl> static constexpr auto ToVArray = & ArrayCommon : : ToVArray ; <nl> static constexpr auto ToDArray = & ArrayCommon : : ToDArray ; <nl> - static constexpr auto ToShape = & ArrayCommon : : ToShape ; <nl> <nl> private : <nl> static GlobalsArray * asGlobals ( ArrayData * ad ) ; <nl> mmm a / hphp / runtime / vm / hhbc . cpp <nl> ppp b / hphp / runtime / vm / hhbc . cpp <nl> FlavorDesc instrInputFlavor ( PC op , uint32_t idx ) { <nl> void staticArrayStreamer ( const ArrayData * ad , std : : string & out ) { <nl> if ( ad - > isVecArray ( ) ) out + = " vec ( " ; <nl> else if ( ad - > isDict ( ) ) out + = " dict ( " ; <nl> - else if ( ad - > isShape ( ) ) out + = " shape ( " ; <nl> else if ( ad - > isKeyset ( ) ) out + = " keyset ( " ; <nl> else { <nl> assertx ( ad - > isPHPArray ( ) ) ; <nl> void staticStreamer ( const TypedValue * tv , std : : string & out ) { <nl> case KindOfDict : <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> staticArrayStreamer ( tv - > m_data . parr , out ) ; <nl> mmm a / hphp / runtime / vm / jit / dce . cpp <nl> ppp b / hphp / runtime / vm / jit / dce . cpp <nl> bool canDCE ( IRInstruction * inst ) { <nl> case CountArrayFast : <nl> case CountVec : <nl> case CountDict : <nl> - case CountShape : <nl> case CountKeyset : <nl> case CountCollection : <nl> case Nop : <nl> bool canDCE ( IRInstruction * inst ) { <nl> case ConvStrToArr : <nl> case ConvVecToArr : <nl> case ConvDictToArr : <nl> - case ConvShapeToArr : <nl> case ConvKeysetToArr : <nl> case ConvArrToNonDVArr : <nl> case ConvObjToDbl : <nl> bool canDCE ( IRInstruction * inst ) { <nl> case ConvCellToStr : <nl> case ConvArrToVec : <nl> case ConvDictToVec : <nl> - case ConvShapeToVec : <nl> case ConvKeysetToVec : <nl> case ConvObjToVec : <nl> case ConvArrToDict : <nl> - case ConvShapeToDict : <nl> case ConvVecToDict : <nl> case ConvKeysetToDict : <nl> case ConvObjToDict : <nl> case ConvArrToKeyset : <nl> case ConvVecToKeyset : <nl> case ConvDictToKeyset : <nl> - case ConvShapeToKeyset : <nl> case ConvObjToKeyset : <nl> case ConvArrToVArr : <nl> case ConvVecToVArr : <nl> case ConvDictToVArr : <nl> - case ConvShapeToVArr : <nl> case ConvKeysetToVArr : <nl> case ConvObjToVArr : <nl> case ConvArrToDArr : <nl> case ConvVecToDArr : <nl> case ConvDictToDArr : <nl> - case ConvShapeToDArr : <nl> case ConvKeysetToDArr : <nl> case ConvObjToDArr : <nl> case LdOutAddr : <nl> bool canDCE ( IRInstruction * inst ) { <nl> case EqArr : <nl> case NeqArr : <nl> case CmpArr : <nl> - case GtShape : <nl> - case GteShape : <nl> - case LtShape : <nl> - case LteShape : <nl> - case EqShape : <nl> - case NeqShape : <nl> - case CmpShape : <nl> case GtVec : <nl> case GteVec : <nl> case LtVec : <nl> bool canDCE ( IRInstruction * inst ) { <nl> case ProfileProp : <nl> return false ; <nl> <nl> - case SameShape : <nl> - case NSameShape : <nl> case SameArr : <nl> case NSameArr : <nl> case SameVec : <nl> mmm a / hphp / runtime / vm / jit / gvn . cpp <nl> ppp b / hphp / runtime / vm / jit / gvn . cpp <nl> bool supportsGVN ( const IRInstruction * inst ) { <nl> case CountArrayFast : <nl> case CountVec : <nl> case CountDict : <nl> - case CountShape : <nl> case CountKeyset : <nl> case Select : <nl> case StrictlyIntegerConv : <nl> mmm a / hphp / runtime / vm / jit / ir - opcode . cpp <nl> ppp b / hphp / runtime / vm / jit / ir - opcode . cpp <nl> folly : : Optional < Opcode > negateCmpOp ( Opcode opc ) { <nl> case SameArr : return NSameArr ; <nl> case NSameArr : return SameArr ; <nl> <nl> - case EqShape : return NeqShape ; <nl> - case NeqShape : return EqShape ; <nl> - case SameShape : return NSameShape ; <nl> - case NSameShape : return SameShape ; <nl> - <nl> case EqVec : return NeqVec ; <nl> case NeqVec : return EqVec ; <nl> case SameVec : return NSameVec ; <nl> folly : : Optional < Opcode > negateCmpOp ( Opcode opc ) { <nl> <nl> bool opcodeMayRaise ( Opcode opc ) { <nl> switch ( opc ) { <nl> - case NSameShape : <nl> - case SameShape : <nl> case NSameArr : <nl> case SameArr : <nl> case NSameDict : <nl> bool opcodeMayRaise ( Opcode opc ) { <nl> case CheckSurpriseFlagsEnter : <nl> case Clone : <nl> case CmpArr : <nl> - case CmpShape : <nl> case CmpObj : <nl> case CmpVec : <nl> case ConcatIntStr : <nl> bool opcodeMayRaise ( Opcode opc ) { <nl> case ConvArrToDict : <nl> case ConvArrToKeyset : <nl> case ConvArrToVec : <nl> - case ConvShapeToDict : <nl> case ConvCellToArr : <nl> case ConvCellToBool : <nl> case ConvCellToDbl : <nl> bool opcodeMayRaise ( Opcode opc ) { <nl> case ConvDictToArr : <nl> case ConvDictToDArr : <nl> case ConvDictToKeyset : <nl> - case ConvShapeToArr : <nl> - case ConvShapeToDArr : <nl> - case ConvShapeToKeyset : <nl> case ConvKeysetToArr : <nl> case ConvKeysetToDArr : <nl> case ConvObjToArr : <nl> bool opcodeMayRaise ( Opcode opc ) { <nl> case EmptyElem : <nl> case EmptyProp : <nl> case EqArr : <nl> - case EqShape : <nl> case EqDict : <nl> case EqObj : <nl> case EqVec : <nl> case GetMemoKey : <nl> case GtArr : <nl> case GteArr : <nl> - case GtShape : <nl> - case GteShape : <nl> case GteObj : <nl> case GteVec : <nl> case GtObj : <nl> bool opcodeMayRaise ( Opcode opc ) { <nl> case LookupFuncCached : <nl> case LtArr : <nl> case LteArr : <nl> - case LtShape : <nl> - case LteShape : <nl> case LteObj : <nl> case LteVec : <nl> case LtObj : <nl> bool opcodeMayRaise ( Opcode opc ) { <nl> case MapSet : <nl> case NativeImpl : <nl> case NeqArr : <nl> - case NeqShape : <nl> case NeqDict : <nl> case NeqObj : <nl> case NeqVec : <nl> bool opcodeMayRaise ( Opcode opc ) { <nl> case ConvDictToVArr : <nl> case ConvDictToVec : <nl> case ConvFuncToArr : <nl> - case ConvShapeToVArr : <nl> - case ConvShapeToVec : <nl> case ConvIntToArr : <nl> case ConvIntToBool : <nl> case ConvIntToDbl : <nl> bool opcodeMayRaise ( Opcode opc ) { <nl> case CountArrayFast : <nl> case CountCollection : <nl> case CountDict : <nl> - case CountShape : <nl> case CountKeyset : <nl> case CountVec : <nl> case CountWHNotDone : <nl> mmm a / hphp / runtime / vm / jit / irgen - arith . cpp <nl> ppp b / hphp / runtime / vm / jit / irgen - arith . cpp <nl> Opcode toArrCmpOpcode ( Op op ) { <nl> } <nl> } <nl> <nl> - Opcode toShapeCmpOpcode ( Op op ) { <nl> - switch ( op ) { <nl> - case Op : : Gt : return GtShape ; <nl> - case Op : : Gte : return GteShape ; <nl> - case Op : : Lt : return LtShape ; <nl> - case Op : : Lte : return LteShape ; <nl> - case Op : : Eq : return EqShape ; <nl> - case Op : : Same : return SameShape ; <nl> - case Op : : Neq : return NeqShape ; <nl> - case Op : : NSame : return NSameShape ; <nl> - case Op : : Cmp : return CmpShape ; <nl> - default : always_assert ( false ) ; <nl> - } <nl> - } <nl> - <nl> Opcode toVecCmpOpcode ( Op op ) { <nl> switch ( op ) { <nl> case Op : : Gt : return GtVec ; <nl> void implDblCmp ( IRGS & env , Op op , SSATmp * left , SSATmp * right ) { <nl> } <nl> } <nl> <nl> - void implShapeCmp ( IRGS & env , Op op , SSATmp * left , SSATmp * right ) { <nl> - if ( ! RuntimeOption : : EvalHackArrDVArrs ) { <nl> - assertx ( left - > type ( ) < = TArr | | left - > type ( ) < = TShape ) ; <nl> - assertx ( right - > type ( ) < = TArr | | right - > type ( ) < = TShape ) ; <nl> - } else { <nl> - assertx ( left - > type ( ) < = TDict | | left - > type ( ) < = TShape ) ; <nl> - assertx ( right - > type ( ) < = TDict | | right - > type ( ) < = TShape ) ; <nl> - } <nl> - <nl> - push ( env , gen ( env , toShapeCmpOpcode ( op ) , left , right ) ) ; <nl> - } <nl> - <nl> const StaticString <nl> s_funcToStringWarning ( Strings : : FUNC_TO_STRING ) , <nl> s_clsToStringWarning ( Strings : : CLASS_TO_STRING ) ; <nl> void implArrCmp ( IRGS & env , Op op , SSATmp * left , SSATmp * right ) { <nl> push ( env , emitMixedDictCmp ( env , op ) ) ; <nl> } else if ( rightTy < = TKeyset ) { <nl> push ( env , emitMixedKeysetCmp ( env , op ) ) ; <nl> - } else if ( rightTy < = TShape ) { <nl> - if ( RuntimeOption : : EvalHackArrDVArrs ) { <nl> - push ( env , emitMixedDictCmp ( env , op ) ) ; <nl> - } else { <nl> - implShapeCmp ( env , op , left , right ) ; <nl> - } <nl> } else if ( rightTy < = TClsMeth ) { <nl> raiseClsMethToVecWarningHelper ( env ) ; <nl> if ( RuntimeOption : : EvalHackArrDVArrs ) { <nl> void implDictCmp ( IRGS & env , Op op , SSATmp * left , SSATmp * right ) { <nl> ) ; <nl> push ( env , cns ( env , false ) ) ; <nl> } <nl> - } else if ( rightTy < = TShape ) { <nl> - implShapeCmp ( env , op , left , right ) ; <nl> } else { <nl> push ( env , emitMixedDictCmp ( env , op ) ) ; <nl> } <nl> void implCmp ( IRGS & env , Op op ) { <nl> else if ( leftTy < = TInt ) implIntCmp ( env , op , left , right ) ; <nl> else if ( leftTy < = TDbl ) implDblCmp ( env , op , left , right ) ; <nl> else if ( leftTy < = TArr ) implArrCmp ( env , op , left , right ) ; <nl> - else if ( leftTy < = TShape ) implShapeCmp ( env , op , left , right ) ; <nl> else if ( leftTy < = TVec ) implVecCmp ( env , op , left , right ) ; <nl> else if ( leftTy < = TDict ) implDictCmp ( env , op , left , right ) ; <nl> else if ( leftTy < = TKeyset ) implKeysetCmp ( env , op , left , right ) ; <nl> mmm a / hphp / runtime / vm / jit / irgen - basic . cpp <nl> ppp b / hphp / runtime / vm / jit / irgen - basic . cpp <nl> void emitCastVArray ( IRGS & env ) { <nl> } <nl> if ( src - > isA ( TVec ) ) return gen ( env , ConvVecToVArr , src ) ; <nl> if ( src - > isA ( TDict ) ) return gen ( env , ConvDictToVArr , src ) ; <nl> - if ( src - > isA ( TShape ) ) return gen ( env , ConvShapeToVArr , src ) ; <nl> if ( src - > isA ( TKeyset ) ) return gen ( env , ConvKeysetToVArr , src ) ; <nl> if ( src - > isA ( TClsMeth ) ) return gen ( env , ConvClsMethToVArr , src ) ; <nl> if ( src - > isA ( TObj ) ) return gen ( env , ConvObjToVArr , src ) ; <nl> void emitCastDArray ( IRGS & env ) { <nl> } <nl> if ( src - > isA ( TVec ) ) return gen ( env , ConvVecToDArr , src ) ; <nl> if ( src - > isA ( TDict ) ) return gen ( env , ConvDictToDArr , src ) ; <nl> - if ( src - > isA ( TShape ) ) return gen ( env , ConvShapeToDArr , src ) ; <nl> if ( src - > isA ( TKeyset ) ) return gen ( env , ConvKeysetToDArr , src ) ; <nl> if ( src - > isA ( TClsMeth ) ) return gen ( env , ConvClsMethToDArr , src ) ; <nl> if ( src - > isA ( TObj ) ) return gen ( env , ConvObjToDArr , src ) ; <nl> void emitCastVec ( IRGS & env ) { <nl> if ( src - > isA ( TVec ) ) return src ; <nl> if ( src - > isA ( TArr ) ) return gen ( env , ConvArrToVec , src ) ; <nl> if ( src - > isA ( TDict ) ) return gen ( env , ConvDictToVec , src ) ; <nl> - if ( src - > isA ( TShape ) ) return gen ( env , ConvShapeToVec , src ) ; <nl> if ( src - > isA ( TKeyset ) ) return gen ( env , ConvKeysetToVec , src ) ; <nl> if ( src - > isA ( TClsMeth ) ) return gen ( env , ConvClsMethToVec , src ) ; <nl> if ( src - > isA ( TObj ) ) return gen ( env , ConvObjToVec , src ) ; <nl> void emitCastDict ( IRGS & env ) { <nl> env , <nl> [ & ] { <nl> if ( src - > isA ( TDict ) ) return src ; <nl> - if ( src - > isA ( TShape ) ) return gen ( env , ConvShapeToDict , src ) ; <nl> if ( src - > isA ( TArr ) ) return gen ( env , ConvArrToDict , src ) ; <nl> if ( src - > isA ( TVec ) ) return gen ( env , ConvVecToDict , src ) ; <nl> if ( src - > isA ( TKeyset ) ) return gen ( env , ConvKeysetToDict , src ) ; <nl> void emitCastKeyset ( IRGS & env ) { <nl> if ( src - > isA ( TArr ) ) return gen ( env , ConvArrToKeyset , src ) ; <nl> if ( src - > isA ( TVec ) ) return gen ( env , ConvVecToKeyset , src ) ; <nl> if ( src - > isA ( TDict ) ) return gen ( env , ConvDictToKeyset , src ) ; <nl> - if ( src - > isA ( TShape ) ) return gen ( env , ConvShapeToKeyset , src ) ; <nl> if ( src - > isA ( TClsMeth ) ) return gen ( env , ConvClsMethToKeyset , src ) ; <nl> if ( src - > isA ( TObj ) ) return gen ( env , ConvObjToKeyset , src ) ; <nl> if ( src - > isA ( TRecord ) ) PUNT ( CastKeysetRecord ) ; / / TODO : T53309767 <nl> mmm a / hphp / runtime / vm / jit / irgen - builtin . cpp <nl> ppp b / hphp / runtime / vm / jit / irgen - builtin . cpp <nl> SSATmp * opt_foldable ( IRGS & env , <nl> env , <nl> make_tv < KindOfPersistentKeyset > ( scalar_array ( ) ) <nl> ) ; <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> - return cns ( <nl> - env , <nl> - make_tv < KindOfPersistentShape > ( scalar_array ( ) ) <nl> - ) ; <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> return cns ( <nl> SSATmp * builtinCall ( IRGS & env , <nl> if ( ty - > maybe ( TPersistentVec ) ) * ty | = TVec ; <nl> if ( ty - > maybe ( TPersistentDict ) ) * ty | = TDict ; <nl> if ( ty - > maybe ( TPersistentKeyset ) ) * ty | = TKeyset ; <nl> - if ( ty - > maybe ( TPersistentShape ) ) * ty | = TShape ; <nl> if ( ty - > maybe ( TPersistentStr ) ) * ty | = TStr ; <nl> } <nl> if ( params . forNativeImpl ) { <nl> mmm a / hphp / runtime / vm / jit / irgen - cns . cpp <nl> ppp b / hphp / runtime / vm / jit / irgen - cns . cpp <nl> SSATmp * staticTVCns ( IRGS & env , const TypedValue * tv ) { <nl> case KindOfDict : <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> case KindOfPersistentArray : <nl> case KindOfArray : return cns ( env , tv - > m_data . parr ) ; <nl> <nl> mmm a / hphp / runtime / vm / jit / irgen - interpone . cpp <nl> ppp b / hphp / runtime / vm / jit / irgen - interpone . cpp <nl> Type arithOpResult ( Type t1 , Type t2 ) { <nl> auto both = t1 | t2 ; <nl> if ( both . maybe ( TDbl ) ) return TDbl ; <nl> if ( both . maybe ( TArr ) ) return TArr ; <nl> - if ( both . maybe ( TShape ) ) return TShape ; <nl> if ( both . maybe ( TVec ) ) return TVec ; <nl> if ( both . maybe ( TDict ) ) return TDict ; <nl> if ( both . maybe ( TKeyset ) ) return TKeyset ; <nl> mmm a / hphp / runtime / vm / jit / irgen - types . cpp <nl> ppp b / hphp / runtime / vm / jit / irgen - types . cpp <nl> SSATmp * isDictImpl ( IRGS & env , SSATmp * src ) { <nl> <nl> static auto const tycheck = InstrumentedTypecheck < IsDictLogging > { <nl> { TDict , true , mask & ProvLogging } , <nl> - { RO : : EvalHackArrDVArrs , <nl> - TShape , true , mask & ProvLogging } , <nl> { TArr , false , mask & DVArrayLogging } , <nl> } ; <nl> <nl> SSATmp * isArrayImpl ( IRGS & env , SSATmp * src ) { <nl> / * cases for shapes and clsmeth * / <nl> { ! RO : : EvalHackArrDVArrs & & RO : : EvalIsCompatibleClsMethType , <nl> TClsMeth , true , mask & ClsMethNotice } , <nl> - { ! RO : : EvalHackArrDVArrs , <nl> - TShape , true , None } , <nl> / * HAC logging * / <nl> { TVec , false , mask & ( ProvLogging | VecLogging ) } , <nl> { TDict , false , mask & ( ProvLogging | DictLogging ) } , <nl> SSATmp * isArrayImpl ( IRGS & env , SSATmp * src ) { <nl> { TArr , true , None } , <nl> { ! RO : : EvalHackArrDVArrs & & RO : : EvalIsCompatibleClsMethType , <nl> TClsMeth , true , mask & ClsMethNotice } , <nl> - { ! RO : : EvalHackArrDVArrs , <nl> - TShape , true , None } , <nl> } ; <nl> <nl> auto const instrumentation = [ & ] ( IsArrayLogging type , SSATmp * src ) { <nl> mmm a / hphp / runtime / vm / jit / irlower - array . cpp <nl> ppp b / hphp / runtime / vm / jit / irlower - array . cpp <nl> void cgCountDict ( IRLS & env , const IRInstruction * inst ) { <nl> implCountArrayLike ( env , inst ) ; <nl> } <nl> <nl> - void cgCountShape ( IRLS & env , const IRInstruction * inst ) { <nl> - implCountArrayLike ( env , inst ) ; <nl> - } <nl> - <nl> void cgCountKeyset ( IRLS & env , const IRInstruction * inst ) { <nl> implCountArrayLike ( env , inst ) ; <nl> } <nl> mmm a / hphp / runtime / vm / jit / irlower - cmp . cpp <nl> ppp b / hphp / runtime / vm / jit / irlower - cmp . cpp <nl> IMPL_OPCODE_CALL ( SameArr ) ; <nl> IMPL_OPCODE_CALL ( NSameArr ) ; <nl> IMPL_OPCODE_CALL ( CmpArr ) ; <nl> <nl> - IMPL_OPCODE_CALL ( GtShape ) ; <nl> - IMPL_OPCODE_CALL ( GteShape ) ; <nl> - IMPL_OPCODE_CALL ( LtShape ) ; <nl> - IMPL_OPCODE_CALL ( LteShape ) ; <nl> - IMPL_OPCODE_CALL ( EqShape ) ; <nl> - IMPL_OPCODE_CALL ( NeqShape ) ; <nl> - IMPL_OPCODE_CALL ( SameShape ) ; <nl> - IMPL_OPCODE_CALL ( NSameShape ) ; <nl> - IMPL_OPCODE_CALL ( CmpShape ) ; <nl> - <nl> IMPL_OPCODE_CALL ( GtVec ) ; <nl> IMPL_OPCODE_CALL ( GteVec ) ; <nl> IMPL_OPCODE_CALL ( LtVec ) ; <nl> mmm a / hphp / runtime / vm / jit / irlower - cns . cpp <nl> ppp b / hphp / runtime / vm / jit / irlower - cns . cpp <nl> void cgLdCns ( IRLS & env , const IRInstruction * inst ) { <nl> case KindOfPersistentVec : <nl> case KindOfPersistentDict : <nl> case KindOfPersistentKeyset : <nl> - case KindOfPersistentShape : <nl> case KindOfPersistentArray : <nl> case KindOfString : <nl> case KindOfVec : <nl> case KindOfDict : <nl> case KindOfKeyset : <nl> - case KindOfShape : <nl> case KindOfArray : <nl> case KindOfObject : <nl> case KindOfResource : <nl> mmm a / hphp / runtime / vm / jit / irlower - conv . cpp <nl> ppp b / hphp / runtime / vm / jit / irlower - conv . cpp <nl> static ArrayData * convDictToVArrImpl ( ArrayData * adIn ) { <nl> return a ; <nl> } <nl> <nl> - static ArrayData * convShapeToVArrImpl ( ArrayData * adIn ) { <nl> - ArrayData * a ; <nl> - assertx ( adIn - > isShape ( ) ) ; <nl> - a = MixedArray : : ToVArrayShape ( adIn , adIn - > cowCheck ( ) ) ; <nl> - assertx ( a ! = adIn ) ; <nl> - assertx ( a - > isPacked ( ) ) ; <nl> - assertx ( a - > isVArray ( ) ) ; <nl> - decRefArr ( adIn ) ; <nl> - return a ; <nl> - } <nl> - <nl> static ArrayData * convKeysetToVArrImpl ( ArrayData * adIn ) { <nl> assertx ( ! RuntimeOption : : EvalHackArrDVArrs ) ; <nl> assertx ( adIn - > isKeyset ( ) ) ; <nl> void cgConvDictToVArr ( IRLS & env , const IRInstruction * inst ) { <nl> convToVArrHelper ( env , inst , CallSpec : : direct ( convDictToVArrImpl ) , false ) ; <nl> } <nl> <nl> - void cgConvShapeToVArr ( IRLS & env , const IRInstruction * inst ) { <nl> - convToVArrHelper ( env , inst , CallSpec : : direct ( convShapeToVArrImpl ) , false ) ; <nl> - } <nl> - <nl> void cgConvKeysetToVArr ( IRLS & env , const IRInstruction * inst ) { <nl> convToVArrHelper ( env , inst , CallSpec : : direct ( convKeysetToVArrImpl ) , false ) ; <nl> } <nl> static ArrayData * convDictToDArrImpl ( ArrayData * adIn ) { <nl> return a ; <nl> } <nl> <nl> - static ArrayData * convShapeToDArrImpl ( ArrayData * adIn ) { <nl> - assertx ( adIn - > isShape ( ) ) ; <nl> - auto a = MixedArray : : ToDArrayShape ( adIn , adIn - > cowCheck ( ) ) ; <nl> - assertx ( a - > isMixed ( ) ) ; <nl> - assertx ( a - > isDArray ( ) ) ; <nl> - if ( a ! = adIn ) decRefArr ( adIn ) ; <nl> - return a ; <nl> - } <nl> - <nl> static ArrayData * convKeysetToDArrImpl ( ArrayData * adIn ) { <nl> assertx ( ! RuntimeOption : : EvalHackArrDVArrs ) ; <nl> assertx ( adIn - > isKeyset ( ) ) ; <nl> void cgConvDictToDArr ( IRLS & env , const IRInstruction * inst ) { <nl> convToDArrHelper ( env , inst , CallSpec : : direct ( convDictToDArrImpl ) , true ) ; <nl> } <nl> <nl> - void cgConvShapeToDArr ( IRLS & env , const IRInstruction * inst ) { <nl> - convToDArrHelper ( env , inst , CallSpec : : direct ( convShapeToDArrImpl ) , true ) ; <nl> - } <nl> - <nl> void cgConvKeysetToDArr ( IRLS & env , const IRInstruction * inst ) { <nl> / / These have to sync because of Hack array compat notices <nl> convToDArrHelper ( env , inst , CallSpec : : direct ( convKeysetToDArrImpl ) , true ) ; <nl> IMPL_OPCODE_CALL ( ConvStrToArr ) ; <nl> IMPL_OPCODE_CALL ( ConvFuncToArr ) ; <nl> IMPL_OPCODE_CALL ( ConvVecToArr ) ; <nl> IMPL_OPCODE_CALL ( ConvDictToArr ) ; <nl> - IMPL_OPCODE_CALL ( ConvShapeToArr ) ; <nl> IMPL_OPCODE_CALL ( ConvKeysetToArr ) ; <nl> IMPL_OPCODE_CALL ( ConvClsMethToArr ) ; <nl> IMPL_OPCODE_CALL ( ConvObjToArr ) ; <nl> IMPL_OPCODE_CALL ( ConvClsMethToDArr ) ; <nl> <nl> IMPL_OPCODE_CALL ( ConvArrToVec ) ; <nl> IMPL_OPCODE_CALL ( ConvDictToVec ) ; <nl> - IMPL_OPCODE_CALL ( ConvShapeToVec ) ; <nl> IMPL_OPCODE_CALL ( ConvKeysetToVec ) ; <nl> IMPL_OPCODE_CALL ( ConvClsMethToVec ) ; <nl> IMPL_OPCODE_CALL ( ConvObjToVec ) ; <nl> <nl> IMPL_OPCODE_CALL ( ConvArrToDict ) ; <nl> - IMPL_OPCODE_CALL ( ConvShapeToDict ) ; <nl> IMPL_OPCODE_CALL ( ConvVecToDict ) ; <nl> IMPL_OPCODE_CALL ( ConvKeysetToDict ) ; <nl> IMPL_OPCODE_CALL ( ConvClsMethToDict ) ; <nl> IMPL_OPCODE_CALL ( ConvObjToDict ) ; <nl> IMPL_OPCODE_CALL ( ConvArrToKeyset ) ; <nl> IMPL_OPCODE_CALL ( ConvVecToKeyset ) ; <nl> IMPL_OPCODE_CALL ( ConvDictToKeyset ) ; <nl> - IMPL_OPCODE_CALL ( ConvShapeToKeyset ) ; <nl> IMPL_OPCODE_CALL ( ConvClsMethToKeyset ) ; <nl> IMPL_OPCODE_CALL ( ConvObjToKeyset ) ; <nl> <nl> mmm a / hphp / runtime / vm / jit / irlower - internal - inl . h <nl> ppp b / hphp / runtime / vm / jit / irlower - internal - inl . h <nl> void emitTypeTest ( Vout & v , IRLS & env , Type type , <nl> if ( type < = TPersistentStr ) return cmp ( KindOfPersistentString , CC_E ) ; <nl> if ( type < = TStr ) return cmp ( KindOfPersistentString , CC_AE ) ; <nl> if ( type < = TArr ) return cmp ( KindOfArray , CC_LE ) ; <nl> - if ( type < = TShape ) return persistent_type ( KindOfPersistentShape ) ; <nl> if ( type < = TVec ) return persistent_type ( KindOfPersistentVec ) ; <nl> if ( type < = TDict ) return persistent_type ( KindOfPersistentDict ) ; <nl> if ( type < = TKeyset ) return persistent_type ( KindOfPersistentKeyset ) ; <nl> mmm a / hphp / runtime / vm / jit / irlower - refcount . cpp <nl> ppp b / hphp / runtime / vm / jit / irlower - refcount . cpp <nl> CallSpec getDtorCallSpec ( Vout & v , Vreg obj , DataType type , ArgGroup & args ) { <nl> switch ( type ) { <nl> case KindOfString : <nl> return CallSpec : : method ( & StringData : : release ) ; <nl> - case KindOfShape : <nl> - return CallSpec : : direct ( MixedArray : : Release ) ; <nl> case KindOfArray : <nl> return CallSpec : : method ( & ArrayData : : release ) ; <nl> case KindOfVec : <nl> mmm a / hphp / runtime / vm / jit / memory - effects . cpp <nl> ppp b / hphp / runtime / vm / jit / memory - effects . cpp <nl> MemEffects memory_effects_impl ( const IRInstruction & inst ) { <nl> case NSameKeyset : <nl> return may_load_store ( AElemAny , AEmpty ) ; <nl> <nl> - case SameShape : <nl> - case NSameShape : <nl> - return may_load_store ( <nl> - RuntimeOption : : EvalHackArrDVArrs ? AElemAny : AEmpty , <nl> - AEmpty ) ; <nl> - <nl> case AKExistsObj : <nl> return may_load_store ( AHeapAny , AHeapAny ) ; <nl> <nl> MemEffects memory_effects_impl ( const IRInstruction & inst ) { <nl> case CountArrayFast : <nl> case CountVec : <nl> case CountDict : <nl> - case CountShape : <nl> case CountKeyset : <nl> case InstanceOf : <nl> case InstanceOfBitmask : <nl> MemEffects memory_effects_impl ( const IRInstruction & inst ) { <nl> case EqArr : <nl> case NeqArr : <nl> case CmpArr : <nl> - case GtShape : <nl> - case GteShape : <nl> - case LtShape : <nl> - case LteShape : <nl> - case EqShape : <nl> - case NeqShape : <nl> - case CmpShape : <nl> case GtVec : <nl> case GteVec : <nl> case LtVec : <nl> MemEffects memory_effects_impl ( const IRInstruction & inst ) { <nl> case ConvCellToDbl : <nl> case ConvArrToVec : <nl> case ConvArrToDict : <nl> - case ConvShapeToDict : <nl> case ConvObjToVec : <nl> case ConvObjToDict : <nl> case ConvObjToKeyset : <nl> MemEffects memory_effects_impl ( const IRInstruction & inst ) { <nl> case ConvArrToKeyset : / / Decrefs input values <nl> case ConvVecToKeyset : <nl> case ConvDictToKeyset : <nl> - case ConvShapeToKeyset : <nl> case ConvDictToDArr : / / These 4 may raise Hack array compat notices <nl> - case ConvShapeToDArr : <nl> case ConvKeysetToDArr : <nl> case ConvDictToArr : <nl> - case ConvShapeToArr : <nl> case ConvKeysetToArr : <nl> case ConvClsMethToArr : <nl> case ConvClsMethToDArr : <nl> MemEffects memory_effects_impl ( const IRInstruction & inst ) { <nl> case ConvVecToArr : <nl> case ConvArrToNonDVArr : <nl> case ConvDictToVec : <nl> - case ConvShapeToVec : <nl> case ConvKeysetToVec : <nl> case ConvVecToDict : <nl> case ConvKeysetToDict : <nl> case ConvArrToVArr : <nl> case ConvVecToVArr : <nl> case ConvDictToVArr : <nl> - case ConvShapeToVArr : <nl> case ConvKeysetToVArr : <nl> case ConvArrToDArr : <nl> case ConvVecToDArr : <nl> mmm a / hphp / runtime / vm / jit / native - calls . cpp <nl> ppp b / hphp / runtime / vm / jit / native - calls . cpp <nl> static CallMap s_callMap { <nl> / / These two need to sync because of Hack array compat notices <nl> { ConvDictToArr , convDictToArrHelper , DSSA , SSync , <nl> { { SSA , 0 } } } , <nl> - { ConvShapeToArr , convShapeToArrHelper , DSSA , SSync , <nl> - { { SSA , 0 } } } , <nl> { ConvKeysetToArr , convKeysetToArrHelper , DSSA , SSync , <nl> { { SSA , 0 } } } , <nl> { ConvCellToArr , convCellToArrHelper , DSSA , SSync , <nl> static CallMap s_callMap { <nl> { { SSA , 0 } } } , <nl> { ConvDictToVec , convDictToVecHelper , DSSA , SSync , <nl> { { SSA , 0 } } } , <nl> - { ConvShapeToVec , convShapeToVecHelper , DSSA , SSync , <nl> - { { SSA , 0 } } } , <nl> { ConvKeysetToVec , convKeysetToVecHelper , DSSA , SSync , <nl> { { SSA , 0 } } } , <nl> { ConvClsMethToVec , convClsMethToVecHealper , DSSA , SSync , <nl> static CallMap s_callMap { <nl> <nl> { ConvArrToDict , convArrToDictHelper , DSSA , SSync , <nl> { { SSA , 0 } } } , <nl> - { ConvShapeToDict , convShapeToDictHelper , DSSA , SSync , <nl> - { { SSA , 0 } } } , <nl> { ConvVecToDict , convVecToDictHelper , DSSA , SSync , <nl> { { SSA , 0 } } } , <nl> { ConvKeysetToDict , convKeysetToDictHelper , DSSA , SSync , <nl> static CallMap s_callMap { <nl> { { SSA , 0 } } } , <nl> { ConvDictToKeyset , convDictToKeysetHelper , DSSA , SSync , <nl> { { SSA , 0 } } } , <nl> - { ConvShapeToKeyset , convShapeToKeysetHelper , DSSA , SSync , <nl> - { { SSA , 0 } } } , <nl> { ConvClsMethToKeyset , convClsMethToKeysetHealper , DSSA , SSync , <nl> { { SSA , 0 } } } , <nl> { ConvObjToKeyset , convObjToKeysetHelper , DSSA , SSync , <nl> static CallMap s_callMap { <nl> { { SSA , 0 } , { SSA , 1 } } } , <nl> { CmpArr , ArrayData : : Compare , DSSA , SSync , <nl> { { SSA , 0 } , { SSA , 1 } } } , <nl> - { GtShape , MixedArray : : ShapeGt , DSSA , SSync , <nl> - { { SSA , 0 } , { SSA , 1 } } } , <nl> - { GteShape , MixedArray : : ShapeGte , DSSA , SSync , <nl> - { { SSA , 0 } , { SSA , 1 } } } , <nl> - { LtShape , MixedArray : : ShapeLt , DSSA , SSync , <nl> - { { SSA , 0 } , { SSA , 1 } } } , <nl> - { LteShape , MixedArray : : ShapeLte , DSSA , SSync , <nl> - { { SSA , 0 } , { SSA , 1 } } } , <nl> - { EqShape , MixedArray : : ShapeEqual , DSSA , SSync , <nl> - { { SSA , 0 } , { SSA , 1 } } } , <nl> - { NeqShape , MixedArray : : ShapeNotEqual , DSSA , SSync , <nl> - { { SSA , 0 } , { SSA , 1 } } } , <nl> - { SameShape , MixedArray : : ShapeSame , DSSA , SSync , <nl> - { { SSA , 0 } , { SSA , 1 } } } , <nl> - { NSameShape , MixedArray : : ShapeNotSame , DSSA , SSync , <nl> - { { SSA , 0 } , { SSA , 1 } } } , <nl> - { CmpShape , MixedArray : : ShapeCompare , DSSA , SSync , <nl> - { { SSA , 0 } , { SSA , 1 } } } , <nl> { GtVec , PackedArray : : VecGt , DSSA , SSync , <nl> { { SSA , 0 } , { SSA , 1 } } } , <nl> { GteVec , PackedArray : : VecGte , DSSA , SSync , <nl> mmm a / hphp / runtime / vm / jit / simplify . cpp <nl> ppp b / hphp / runtime / vm / jit / simplify . cpp <nl> SSATmp * isTypeImpl ( State & env , const IRInstruction * inst ) { <nl> / / the distinction matters to you here , be careful . <nl> assertx ( IMPLIES ( type < = TStr , type = = TStr ) ) ; <nl> assertx ( IMPLIES ( type < = TArr , type = = TArr ) ) ; <nl> - assertx ( IMPLIES ( type < = TShape , type = = TShape ) ) ; <nl> assertx ( IMPLIES ( type < = TVec , type = = TVec ) ) ; <nl> assertx ( IMPLIES ( type < = TDict , type = = TDict ) ) ; <nl> assertx ( IMPLIES ( type < = TKeyset , type = = TKeyset ) ) ; <nl> X ( Arr , arrVal , NonDVArr ) <nl> SSATmp * simplifyConvCellToArr ( State & env , const IRInstruction * inst ) { <nl> auto const src = inst - > src ( 0 ) ; <nl> if ( src - > isA ( TArr ) ) return gen ( env , ConvArrToNonDVArr , src ) ; <nl> - if ( src - > isA ( TShape ) ) return gen ( env , ConvShapeToArr , inst - > taken ( ) , src ) ; <nl> if ( src - > isA ( TVec ) ) return gen ( env , ConvVecToArr , src ) ; <nl> if ( src - > isA ( TDict ) ) return gen ( env , ConvDictToArr , inst - > taken ( ) , src ) ; <nl> if ( src - > isA ( TKeyset ) ) return gen ( env , ConvKeysetToArr , inst - > taken ( ) , src ) ; <nl> SSATmp * simplifyConvCellToBool ( State & env , const IRInstruction * inst ) { <nl> auto const length = gen ( env , CountDict , src ) ; <nl> return gen ( env , NeqInt , length , cns ( env , 0 ) ) ; <nl> } <nl> - if ( srcType < = TShape ) { <nl> - auto const length = gen ( env , CountShape , src ) ; <nl> - return gen ( env , NeqInt , length , cns ( env , 0 ) ) ; <nl> - } <nl> if ( srcType < = TKeyset ) { <nl> auto const length = gen ( env , CountKeyset , src ) ; <nl> return gen ( env , NeqInt , length , cns ( env , 0 ) ) ; <nl> SSATmp * simplifyConvCellToStr ( State & env , const IRInstruction * inst ) { <nl> ) ; <nl> } <nl> if ( srcType < = TNull ) return cns ( env , staticEmptyString ( ) ) ; <nl> - if ( srcType < = TArr | | <nl> - ( ! RuntimeOption : : EvalHackArrDVArrs & & srcType < = TShape ) ) { <nl> + if ( srcType < = TArr ) { <nl> gen ( env , RaiseNotice , catchTrace , cns ( env , s_msgArrToStr . get ( ) ) ) ; <nl> return cns ( env , s_Array . get ( ) ) ; <nl> } <nl> SSATmp * simplifyConvCellToStr ( State & env , const IRInstruction * inst ) { <nl> gen ( env , RaiseNotice , catchTrace , cns ( env , s_msgVecToStr . get ( ) ) ) ; <nl> return cns ( env , s_Vec . get ( ) ) ; <nl> } <nl> - if ( srcType < = TDict | | <nl> - ( RuntimeOption : : EvalHackArrDVArrs & & srcType < = TShape ) ) { <nl> + if ( srcType < = TDict ) { <nl> gen ( env , RaiseNotice , catchTrace , cns ( env , s_msgDictToStr . get ( ) ) ) ; <nl> return cns ( env , s_Dict . get ( ) ) ; <nl> } <nl> SSATmp * simplifyConvCellToInt ( State & env , const IRInstruction * inst ) { <nl> auto const length = gen ( env , Count , src ) ; <nl> return gen ( env , Select , length , cns ( env , 1 ) , cns ( env , 0 ) ) ; <nl> } <nl> - if ( srcType < = TShape ) { <nl> - auto const length = gen ( env , CountShape , src ) ; <nl> - return gen ( env , Select , length , cns ( env , 1 ) , cns ( env , 0 ) ) ; <nl> - } <nl> if ( srcType < = TVec ) { <nl> auto const length = gen ( env , CountVec , src ) ; <nl> return gen ( env , Select , length , cns ( env , 1 ) , cns ( env , 0 ) ) ; <nl> SSATmp * simplifyConvCellToDbl ( State & env , const IRInstruction * inst ) { <nl> if ( srcType < = TDbl ) return src ; <nl> if ( srcType < = TNull ) return cns ( env , 0 . 0 ) ; <nl> if ( srcType < = TArr ) return gen ( env , ConvArrToDbl , src ) ; <nl> - if ( srcType < = TShape ) { <nl> - auto const length = gen ( env , CountShape , src ) ; <nl> - return gen ( env , ConvBoolToDbl , gen ( env , ConvIntToBool , length ) ) ; <nl> - } <nl> if ( srcType < = TVec ) { <nl> auto const length = gen ( env , CountVec , src ) ; <nl> return gen ( env , ConvBoolToDbl , gen ( env , ConvIntToBool , length ) ) ; <nl> SSATmp * simplifyCount ( State & env , const IRInstruction * inst ) { <nl> if ( ty < = oneTy ) return cns ( env , 1 ) ; <nl> <nl> if ( ty < = TArr ) return gen ( env , CountArray , val ) ; <nl> - if ( ty < = TShape ) return gen ( env , CountShape , val ) ; <nl> if ( ty < = TVec ) return gen ( env , CountVec , val ) ; <nl> if ( ty < = TDict ) return gen ( env , CountDict , val ) ; <nl> if ( ty < = TKeyset ) return gen ( env , CountKeyset , val ) ; <nl> SSATmp * simplifyCountDict ( State & env , const IRInstruction * inst ) { <nl> return simplifyCountHelper ( env , inst , TDict , & SSATmp : : dictVal ) ; <nl> } <nl> <nl> - SSATmp * simplifyCountShape ( State & env , const IRInstruction * inst ) { <nl> - return simplifyCountHelper ( env , inst , TShape , & SSATmp : : shapeVal ) ; <nl> - } <nl> - <nl> SSATmp * simplifyCountKeyset ( State & env , const IRInstruction * inst ) { <nl> return simplifyCountHelper ( env , inst , TKeyset , & SSATmp : : keysetVal ) ; <nl> } <nl> SSATmp * simplifyWork ( State & env , const IRInstruction * inst ) { <nl> X ( CountArrayFast ) <nl> X ( CountVec ) <nl> X ( CountDict ) <nl> - X ( CountShape ) <nl> X ( CountKeyset ) <nl> X ( DecRef ) <nl> X ( DecRefNZ ) <nl> mmm a / hphp / runtime / vm / jit / ssa - tmp . cpp <nl> ppp b / hphp / runtime / vm / jit / ssa - tmp . cpp <nl> Variant SSATmp : : variantVal ( ) const { <nl> case KindOfPersistentKeyset : <nl> return Variant { keysetVal ( ) , KindOfPersistentKeyset , <nl> Variant : : PersistentArrInit { } } ; <nl> - case KindOfPersistentShape : <nl> - return Variant { shapeVal ( ) , KindOfPersistentShape , <nl> - Variant : : PersistentArrInit { } } ; <nl> case KindOfPersistentArray : <nl> return Variant { arrVal ( ) , KindOfPersistentArray , <nl> Variant : : PersistentArrInit { } } ; <nl> Variant SSATmp : : variantVal ( ) const { <nl> case KindOfVec : <nl> case KindOfDict : <nl> case KindOfKeyset : <nl> - case KindOfShape : <nl> case KindOfArray : <nl> case KindOfObject : <nl> case KindOfResource : <nl> mmm a / hphp / runtime / vm / jit / translator - runtime . cpp <nl> ppp b / hphp / runtime / vm / jit / translator - runtime . cpp <nl> ArrayData * convDictToArrHelper ( ArrayData * adIn ) { <nl> return a ; <nl> } <nl> <nl> - ArrayData * convShapeToArrHelper ( ArrayData * adIn ) { <nl> - assertx ( adIn - > isShape ( ) ) ; <nl> - auto a = MixedArray : : ToPHPArrayShape ( adIn , adIn - > cowCheck ( ) ) ; <nl> - if ( a ! = adIn ) decRefArr ( adIn ) ; <nl> - assertx ( a - > isPHPArray ( ) ) ; <nl> - assertx ( a - > isNotDVArray ( ) ) ; <nl> - return a ; <nl> - } <nl> - <nl> ArrayData * convKeysetToArrHelper ( ArrayData * adIn ) { <nl> assertx ( adIn - > isKeyset ( ) ) ; <nl> auto a = SetArray : : ToPHPArray ( adIn , adIn - > cowCheck ( ) ) ; <nl> ArrayData * convDictToVecHelper ( ArrayData * adIn ) { <nl> return a ; <nl> } <nl> <nl> - ArrayData * convShapeToVecHelper ( ArrayData * adIn ) { <nl> - assertx ( adIn - > isShape ( ) ) ; <nl> - auto a = MixedArray : : ToVecShape ( adIn , adIn - > cowCheck ( ) ) ; <nl> - assertx ( a ! = adIn ) ; <nl> - decRefArr ( adIn ) ; <nl> - return a ; <nl> - } <nl> - <nl> ArrayData * convKeysetToVecHelper ( ArrayData * adIn ) { <nl> assertx ( adIn - > isKeyset ( ) ) ; <nl> auto a = SetArray : : ToVec ( adIn , adIn - > cowCheck ( ) ) ; <nl> ArrayData * convArrToDictHelper ( ArrayData * adIn ) { <nl> return a ; <nl> } <nl> <nl> - ArrayData * convShapeToDictHelper ( ArrayData * adIn ) { <nl> - assertx ( adIn - > isShape ( ) ) ; <nl> - auto a = MixedArray : : ToDictShape ( adIn , adIn - > cowCheck ( ) ) ; <nl> - if ( a ! = adIn ) decRefArr ( adIn ) ; <nl> - return a ; <nl> - } <nl> - <nl> ArrayData * convVecToDictHelper ( ArrayData * adIn ) { <nl> assertx ( adIn - > isVecArray ( ) ) ; <nl> auto a = PackedArray : : ToDictVec ( adIn , adIn - > cowCheck ( ) ) ; <nl> ArrayData * convDictToKeysetHelper ( ArrayData * adIn ) { <nl> return a ; <nl> } <nl> <nl> - ArrayData * convShapeToKeysetHelper ( ArrayData * adIn ) { <nl> - assertx ( adIn - > isShape ( ) ) ; <nl> - auto a = MixedArray : : ToKeysetShape ( adIn , adIn - > cowCheck ( ) ) ; <nl> - if ( a ! = adIn ) decRefArr ( adIn ) ; <nl> - return a ; <nl> - } <nl> - <nl> ArrayData * convObjToKeysetHelper ( ObjectData * obj ) { <nl> auto a = castObjToKeyset ( obj ) ; <nl> assertx ( a - > isKeyset ( ) ) ; <nl> int64_t switchStringHelper ( StringData * s , int64_t base , int64_t nTargets ) { <nl> case KindOfDict : <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> case KindOfObject : <nl> mmm a / hphp / runtime / vm / jit / translator - runtime . h <nl> ppp b / hphp / runtime / vm / jit / translator - runtime . h <nl> ArrayData * convCellToArrHelper ( TypedValue tv ) ; <nl> ArrayData * convArrToNonDVArrHelper ( ArrayData * a ) ; <nl> ArrayData * convVecToArrHelper ( ArrayData * a ) ; <nl> ArrayData * convDictToArrHelper ( ArrayData * a ) ; <nl> - ArrayData * convShapeToArrHelper ( ArrayData * a ) ; <nl> ArrayData * convKeysetToArrHelper ( ArrayData * a ) ; <nl> ArrayData * convArrToVecHelper ( ArrayData * a ) ; <nl> ArrayData * convDictToVecHelper ( ArrayData * a ) ; <nl> - ArrayData * convShapeToVecHelper ( ArrayData * a ) ; <nl> ArrayData * convKeysetToVecHelper ( ArrayData * a ) ; <nl> ArrayData * convObjToVecHelper ( ObjectData * o ) ; <nl> ArrayData * convCellToVecHelper ( TypedValue tv ) ; <nl> ArrayData * convArrToDictHelper ( ArrayData * a ) ; <nl> - ArrayData * convShapeToDictHelper ( ArrayData * a ) ; <nl> ArrayData * convVecToDictHelper ( ArrayData * a ) ; <nl> ArrayData * convKeysetToDictHelper ( ArrayData * a ) ; <nl> ArrayData * convObjToDictHelper ( ObjectData * o ) ; <nl> ArrayData * convCellToDictHelper ( TypedValue tv ) ; <nl> ArrayData * convArrToKeysetHelper ( ArrayData * a ) ; <nl> ArrayData * convVecToKeysetHelper ( ArrayData * a ) ; <nl> ArrayData * convDictToKeysetHelper ( ArrayData * a ) ; <nl> - ArrayData * convShapeToKeysetHelper ( ArrayData * a ) ; <nl> ArrayData * convObjToKeysetHelper ( ObjectData * o ) ; <nl> ArrayData * convCellToKeysetHelper ( TypedValue tv ) ; <nl> ArrayData * convClsMethToArrHealper ( ClsMethDataRef clsmeth ) ; <nl> mmm a / hphp / runtime / vm / jit / type - inl . h <nl> ppp b / hphp / runtime / vm / jit / type - inl . h <nl> inline Type for_const ( const StringData * sd ) { <nl> } <nl> inline Type for_const ( const ArrayData * ad ) { <nl> assertx ( ad - > isStatic ( ) ) ; <nl> - if ( ad - > isShape ( ) ) return TPersistentShape ; <nl> if ( ad - > isPHPArray ( ) ) return Type : : StaticArray ( ad - > kind ( ) ) ; <nl> if ( ad - > isVecArray ( ) ) return TStaticVec ; <nl> if ( ad - > isDict ( ) ) return TStaticDict ; <nl> inline bool Type : : isKnownDataType ( ) const { <nl> assertx ( * this < = TGen ) ; <nl> <nl> / / Some unions correspond to single KindOfs . <nl> - return subtypeOfAny ( TStr , TArr , TVec , TDict , TShape , <nl> + return subtypeOfAny ( TStr , TArr , TVec , TDict , <nl> TKeyset , TBoxedCell ) | | ! isUnion ( ) ; <nl> } <nl> <nl> inline Type Type : : cns ( const TypedValue & tv ) { <nl> assertx ( tv . m_data . parr - > isKeyset ( ) ) ; <nl> return type_detail : : for_const ( tv . m_data . parr ) ; <nl> <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> - assertx ( tv . m_data . parr - > isShape ( ) ) ; <nl> - return type_detail : : for_const ( tv . m_data . parr ) ; <nl> - <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> assertx ( tv . m_data . parr - > isPHPArray ( ) ) ; <nl> IMPLEMENT_CNS_VAL ( TStaticStr , str , const StringData * ) <nl> IMPLEMENT_CNS_VAL ( TStaticArr , arr , const ArrayData * ) <nl> IMPLEMENT_CNS_VAL ( TStaticVec , vec , const ArrayData * ) <nl> IMPLEMENT_CNS_VAL ( TStaticDict , dict , const ArrayData * ) <nl> - IMPLEMENT_CNS_VAL ( TPersistentShape , shape , const ArrayData * ) <nl> IMPLEMENT_CNS_VAL ( TStaticKeyset , keyset , const ArrayData * ) <nl> IMPLEMENT_CNS_VAL ( TFunc , func , const HPHP : : Func * ) <nl> IMPLEMENT_CNS_VAL ( TCls , cls , const Class * ) <nl> inline Type Type : : Dict ( const RepoAuthType : : Array * rat ) { <nl> return Type ( TDict , ArraySpec ( rat ) ) ; <nl> } <nl> <nl> - inline Type Type : : Shape ( const RepoAuthType : : Array * rat ) { <nl> - return Type ( TShape , ArraySpec ( rat ) ) ; <nl> - } <nl> - <nl> inline Type Type : : Keyset ( const RepoAuthType : : Array * rat ) { <nl> return Type ( TKeyset , ArraySpec ( rat ) ) ; <nl> } <nl> inline Type Type : : StaticDict ( const RepoAuthType : : Array * rat ) { <nl> return Type ( TStaticDict , ArraySpec ( rat ) ) ; <nl> } <nl> <nl> - inline Type Type : : StaticShape ( const RepoAuthType : : Array * rat ) { <nl> - return Type ( TPersistentShape , ArraySpec ( rat ) ) ; <nl> - } <nl> - <nl> inline Type Type : : StaticKeyset ( const RepoAuthType : : Array * rat ) { <nl> return Type ( TStaticKeyset , ArraySpec ( rat ) ) ; <nl> } <nl> mmm a / hphp / runtime / vm / jit / type . cpp <nl> ppp b / hphp / runtime / vm / jit / type . cpp <nl> const ArrayData * Type : : arrLikeVal ( ) const { <nl> if ( * this < = TArr ) return m_arrVal ; <nl> if ( * this < = TVec ) return m_vecVal ; <nl> if ( * this < = TDict ) return m_dictVal ; <nl> - if ( * this < = TShape ) return m_shapeVal ; <nl> if ( * this < = TKeyset ) return m_keysetVal ; <nl> always_assert ( false ) ; <nl> } <nl> std : : string Type : : constValString ( ) const { <nl> } <nl> return folly : : format ( " Dict ( { } ) " , m_dictVal ) . str ( ) ; <nl> } <nl> - if ( * this < = TPersistentShape ) { <nl> - if ( m_shapeVal - > empty ( ) ) { <nl> - return " shape ( ) " ; <nl> - } <nl> - return folly : : format ( " Shape ( { } ) " , m_shapeVal ) . str ( ) ; <nl> - } <nl> if ( * this < = TStaticKeyset ) { <nl> if ( m_keysetVal - > empty ( ) ) { <nl> return " keyset ( ) " ; <nl> Type : : bits_t Type : : bitsFromDataType ( DataType outer , DataType inner ) { <nl> case KindOfPersistentVec : return kPersistentVec ; <nl> case KindOfPersistentDict : return kPersistentDict ; <nl> case KindOfPersistentKeyset : return kPersistentKeyset ; <nl> - case KindOfPersistentShape : return kPersistentShape ; <nl> case KindOfPersistentArray : return kPersistentArr ; <nl> case KindOfVec : return kVec ; <nl> case KindOfDict : return kDict ; <nl> case KindOfKeyset : return kKeyset ; <nl> - case KindOfShape : return kShape ; <nl> case KindOfArray : return kArr ; <nl> case KindOfResource : return kRes ; <nl> case KindOfObject : return kObj ; <nl> DataType Type : : toDataType ( ) const { <nl> if ( * this < = TStr ) return KindOfString ; <nl> if ( * this < = TPersistentArr ) return KindOfPersistentArray ; <nl> if ( * this < = TArr ) return KindOfArray ; <nl> - if ( * this < = TPersistentShape ) return KindOfPersistentShape ; <nl> - if ( * this < = TShape ) return KindOfShape ; <nl> if ( * this < = TPersistentVec ) return KindOfPersistentVec ; <nl> if ( * this < = TVec ) return KindOfVec ; <nl> if ( * this < = TPersistentDict ) return KindOfPersistentDict ; <nl> Type typeFromTV ( tv_rval tv , const Class * ctx ) { <nl> if ( outer = = KindOfPersistentString ) outer = KindOfString ; <nl> else if ( outer = = KindOfPersistentVec ) outer = KindOfVec ; <nl> else if ( outer = = KindOfPersistentDict ) outer = KindOfDict ; <nl> - else if ( outer = = KindOfPersistentShape ) outer = KindOfShape ; <nl> else if ( outer = = KindOfPersistentKeyset ) outer = KindOfKeyset ; <nl> <nl> if ( isRefType ( outer ) ) { <nl> Type typeFromTV ( tv_rval tv , const Class * ctx ) { <nl> else if ( inner = = KindOfPersistentArray ) inner = KindOfArray ; <nl> else if ( inner = = KindOfPersistentVec ) inner = KindOfVec ; <nl> else if ( inner = = KindOfPersistentDict ) inner = KindOfDict ; <nl> - else if ( inner = = KindOfPersistentShape ) inner = KindOfShape ; <nl> else if ( inner = = KindOfPersistentKeyset ) inner = KindOfKeyset ; <nl> } <nl> return Type ( outer , inner ) ; <nl> Type negativeCheckType ( Type srcType , Type typeParam ) { <nl> if ( typeParam . maybe ( TPersistent ) ) { <nl> if ( tmp . maybe ( TCountedStr ) ) tmp | = TStr ; <nl> if ( tmp . maybe ( TCountedArr ) ) tmp | = TArr ; <nl> - if ( tmp . maybe ( TCountedShape ) ) tmp | = TShape ; <nl> if ( tmp . maybe ( TCountedVec ) ) tmp | = TVec ; <nl> if ( tmp . maybe ( TCountedDict ) ) tmp | = TDict ; <nl> if ( tmp . maybe ( TCountedKeyset ) ) tmp | = TKeyset ; <nl> Type boxType ( Type t ) { <nl> t = TStr ; <nl> } else if ( t < = TArr ) { <nl> t = TArr ; <nl> - } else if ( t < = TShape ) { <nl> - t = TShape ; <nl> } else if ( t < = TVec ) { <nl> t = TVec ; <nl> } else if ( t < = TDict ) { <nl> Type relaxToGuardable ( Type ty ) { <nl> / / ty is unspecialized and we don ' t support guarding on CountedArr or <nl> / / StaticArr , so widen any subtypes of Arr to Arr . <nl> if ( ty < = TArr ) return TArr ; <nl> - if ( ty < = TShape ) return TShape ; <nl> if ( ty < = TVec ) return TVec ; <nl> if ( ty < = TDict ) return TDict ; <nl> if ( ty < = TKeyset ) return TKeyset ; <nl> mmm a / hphp / runtime / vm / jit / type . h <nl> ppp b / hphp / runtime / vm / jit / type . h <nl> constexpr bool operator > ( Mem a , Mem b ) { <nl> c ( StaticArr , bits_t : : bit < 8 > ( ) ) \ <nl> c ( UncountedArr , bits_t : : bit < 9 > ( ) ) \ <nl> c ( CountedArr , bits_t : : bit < 10 > ( ) ) \ <nl> - c ( PersistentShape , bits_t : : bit < 11 > ( ) ) \ <nl> - c ( CountedShape , bits_t : : bit < 12 > ( ) ) \ <nl> - c ( StaticVec , bits_t : : bit < 13 > ( ) ) \ <nl> - c ( UncountedVec , bits_t : : bit < 14 > ( ) ) \ <nl> - c ( CountedVec , bits_t : : bit < 15 > ( ) ) \ <nl> - c ( StaticDict , bits_t : : bit < 16 > ( ) ) \ <nl> - c ( UncountedDict , bits_t : : bit < 17 > ( ) ) \ <nl> - c ( CountedDict , bits_t : : bit < 18 > ( ) ) \ <nl> - c ( StaticKeyset , bits_t : : bit < 19 > ( ) ) \ <nl> - c ( UncountedKeyset , bits_t : : bit < 20 > ( ) ) \ <nl> - c ( CountedKeyset , bits_t : : bit < 21 > ( ) ) \ <nl> - c ( Obj , bits_t : : bit < 22 > ( ) ) \ <nl> - c ( Res , bits_t : : bit < 23 > ( ) ) \ <nl> - c ( Func , bits_t : : bit < 24 > ( ) ) \ <nl> - c ( Cls , bits_t : : bit < 25 > ( ) ) \ <nl> - c ( ClsMeth , bits_t : : bit < 26 > ( ) ) \ <nl> - c ( Record , bits_t : : bit < 27 > ( ) ) \ <nl> - c ( RecDesc , bits_t : : bit < 28 > ( ) ) \ <nl> - / / Boxed * : 29 - 57 <nl> + c ( StaticVec , bits_t : : bit < 11 > ( ) ) \ <nl> + c ( UncountedVec , bits_t : : bit < 12 > ( ) ) \ <nl> + c ( CountedVec , bits_t : : bit < 13 > ( ) ) \ <nl> + c ( StaticDict , bits_t : : bit < 14 > ( ) ) \ <nl> + c ( UncountedDict , bits_t : : bit < 15 > ( ) ) \ <nl> + c ( CountedDict , bits_t : : bit < 16 > ( ) ) \ <nl> + c ( StaticKeyset , bits_t : : bit < 17 > ( ) ) \ <nl> + c ( UncountedKeyset , bits_t : : bit < 18 > ( ) ) \ <nl> + c ( CountedKeyset , bits_t : : bit < 19 > ( ) ) \ <nl> + c ( Obj , bits_t : : bit < 20 > ( ) ) \ <nl> + c ( Res , bits_t : : bit < 21 > ( ) ) \ <nl> + c ( Func , bits_t : : bit < 22 > ( ) ) \ <nl> + c ( Cls , bits_t : : bit < 23 > ( ) ) \ <nl> + c ( ClsMeth , bits_t : : bit < 24 > ( ) ) \ <nl> + c ( Record , bits_t : : bit < 25 > ( ) ) \ <nl> + c ( RecDesc , bits_t : : bit < 26 > ( ) ) \ <nl> + / / Boxed * : 27 - 55 <nl> <nl> / * <nl> * This list should be in non - decreasing order of specificity . <nl> constexpr bool operator > ( Mem a , Mem b ) { <nl> c ( Str , kPersistentStr | kCountedStr ) \ <nl> c ( PersistentArr , kStaticArr | kUncountedArr ) \ <nl> c ( Arr , kPersistentArr | kCountedArr ) \ <nl> - c ( Shape , kPersistentShape | kCountedShape ) \ <nl> c ( PersistentVec , kStaticVec | kUncountedVec ) \ <nl> c ( Vec , kPersistentVec | kCountedVec ) \ <nl> c ( PersistentDict , kStaticDict | kUncountedDict ) \ <nl> c ( Dict , kPersistentDict | kCountedDict ) \ <nl> c ( PersistentKeyset , kStaticKeyset | kUncountedKeyset ) \ <nl> c ( Keyset , kPersistentKeyset | kCountedKeyset ) \ <nl> - c ( PersistentArrLike , kPersistentArr | kPersistentShape | kPersistentVec | kPersistentDict | kPersistentKeyset ) \ <nl> - c ( ArrLike , kArr | kShape | kVec | kDict | kKeyset ) \ <nl> + c ( PersistentArrLike , kPersistentArr | kPersistentVec | kPersistentDict | kPersistentKeyset ) \ <nl> + c ( ArrLike , kArr | kVec | kDict | kKeyset ) \ <nl> c ( NullableObj , kObj | kInitNull | kUninit ) \ <nl> c ( Persistent , kPersistentStr | kPersistentArrLike ) \ <nl> c ( UncountedInit , UNCCOUNTED_INIT_UNION ) \ <nl> constexpr bool operator > ( Mem a , Mem b ) { <nl> * / <nl> # ifdef USE_LOWPTR <nl> # define COUNTED_INIT_UNION \ <nl> - kCountedStr | kCountedArr | kCountedShape | kCountedVec | kCountedDict | kCountedKeyset | kObj | kRes | kBoxedCell | kRecord <nl> + kCountedStr | kCountedArr | kCountedVec | kCountedDict | kCountedKeyset | kObj | kRes | kBoxedCell | kRecord <nl> # else <nl> # define COUNTED_INIT_UNION \ <nl> - kCountedStr | kCountedArr | kCountedShape | kCountedVec | kCountedDict | kCountedKeyset | kObj | kRes | kBoxedCell | kRecord | kClsMeth <nl> + kCountedStr | kCountedArr | kCountedVec | kCountedDict | kCountedKeyset | kObj | kRes | kBoxedCell | kRecord | kClsMeth <nl> # endif <nl> <nl> # define IRT_SPECIAL \ <nl> constexpr bool operator > ( Mem a , Mem b ) { <nl> IRT ( Ctx , kObj | kCctx ) \ <nl> IRTX ( AnyObj , Top , kAnyObj ) \ <nl> IRTX ( AnyArr , Top , kAnyArr ) \ <nl> - IRTX ( AnyShape , Top , kAnyShape ) \ <nl> IRTX ( AnyVec , Top , kAnyVec ) \ <nl> IRTX ( AnyDict , Top , kAnyDict ) \ <nl> IRTX ( AnyKeyset , Top , kAnyKeyset ) \ <nl> struct ConstCctx { <nl> * / <nl> struct Type { <nl> private : <nl> - static constexpr size_t kBoxShift = 29 ; <nl> + static constexpr size_t kBoxShift = 27 ; <nl> static constexpr size_t kRuntime = kBoxShift * 2 ; <nl> static constexpr size_t numRuntime = 13 ; <nl> using bits_t = BitSet < kRuntime + numRuntime > ; <nl> struct Type { <nl> # undef IRTX <nl> <nl> static constexpr bits_t kAnyArr = kArr | kBoxedArr ; <nl> - static constexpr bits_t kAnyShape = kShape | kBoxedShape ; <nl> static constexpr bits_t kAnyVec = kVec | kBoxedVec ; <nl> static constexpr bits_t kAnyDict = kDict | kBoxedDict ; <nl> static constexpr bits_t kAnyKeyset = kKeyset | kBoxedKeyset ; <nl> struct Type { <nl> static Type Array ( ArrayData : : ArrayKind , const RepoAuthType : : Array * ) ; <nl> static Type Vec ( const RepoAuthType : : Array * ) ; <nl> static Type Dict ( const RepoAuthType : : Array * ) ; <nl> - static Type Shape ( const RepoAuthType : : Array * ) ; <nl> static Type Keyset ( const RepoAuthType : : Array * ) ; <nl> <nl> / * <nl> * Return a specialized TStaticArr / TStaticVec / <nl> - * TStaticDict / TPersistentShape / TStaticKeyset . <nl> + * TStaticDict / TStaticKeyset . <nl> * / <nl> static Type StaticArray ( ArrayData : : ArrayKind kind ) ; <nl> static Type StaticArray ( const RepoAuthType : : Array * rat ) ; <nl> static Type StaticArray ( ArrayData : : ArrayKind , const RepoAuthType : : Array * ) ; <nl> static Type StaticVec ( const RepoAuthType : : Array * ) ; <nl> static Type StaticDict ( const RepoAuthType : : Array * ) ; <nl> - static Type StaticShape ( ArrayData : : ArrayKind kind ) ; <nl> - static Type StaticShape ( const RepoAuthType : : Array * ) ; <nl> static Type StaticKeyset ( const RepoAuthType : : Array * ) ; <nl> <nl> / * <nl> mmm a / hphp / runtime / vm / member - operations . h <nl> ppp b / hphp / runtime / vm / member - operations . h <nl> NEVER_INLINE tv_rval ElemSlow ( TypedValue & tvRef , <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : <nl> return ElemKeyset < mode , keyType > ( base . val ( ) . parr , key ) ; <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> - return RuntimeOption : : EvalHackArrDVArrs ? <nl> - ElemDict < mode , keyType > ( base . val ( ) . parr , key ) : <nl> - ElemArray < mode , keyType > ( base . val ( ) . parr , key ) ; <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> return ElemArray < mode , keyType > ( base . val ( ) . parr , key ) ; <nl> inline tv_lval ElemDArrayPre ( tv_lval base , TypedValue key , <nl> * / <nl> template < MOpMode mode , KeyType keyType > <nl> inline tv_lval ElemDArray ( tv_lval base , key_type < keyType > key ) { <nl> - assertx ( tvIsArrayOrShape ( base ) ) ; <nl> + assertx ( tvIsArray ( base ) ) ; <nl> assertx ( tvIsPlausible ( * base ) ) ; <nl> <nl> bool defined ; <nl> tv_lval ElemD ( TypedValue & tvRef , tv_lval base , <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : <nl> return ElemDKeyset < keyType > ( base , key ) ; <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> - return RuntimeOption : : EvalHackArrDVArrs ? <nl> - ElemDDict < keyType , copyProv > ( base , key ) : <nl> - ElemDArray < mode , keyType > ( base , key ) ; <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> return ElemDArray < mode , keyType > ( base , key ) ; <nl> tv_lval ElemU ( TypedValue & tvRef , tv_lval base , key_type < keyType > key ) { <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : <nl> return ElemUKeyset < keyType > ( base , key ) ; <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> - return RuntimeOption : : EvalHackArrDVArrs <nl> - ? ElemUDict < keyType > ( base , key ) <nl> - : ElemUArray < keyType > ( base , key ) ; <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> return ElemUArray < keyType > ( base , key ) ; <nl> inline tv_lval NewElem ( TypedValue & tvRef , <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : <nl> throw_cannot_use_newelem_for_lval_read_keyset ( ) ; <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> - if ( RuntimeOption : : EvalHackArrDVArrs ) { <nl> - throw_cannot_use_newelem_for_lval_read_dict ( ) ; <nl> - } <nl> - / * FALLTHROUGH * / <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> return NewElemArray ( base ) ; <nl> inline ArrayData * SetElemArrayPre ( ArrayData * a , TypedValue key , Cell * value ) { <nl> * / <nl> template < bool setResult , KeyType keyType > <nl> inline void SetElemArray ( tv_lval base , key_type < keyType > key , Cell * value ) { <nl> - assertx ( tvIsArrayOrShape ( base ) ) ; <nl> + assertx ( tvIsArray ( base ) ) ; <nl> assertx ( tvIsPlausible ( * base ) ) ; <nl> <nl> ArrayData * a = val ( base ) . parr ; <nl> inline ArrayData * SetElemDictPre ( ArrayData * a , <nl> template < bool setResult , KeyType keyType , bool copyProv > <nl> inline void SetElemDict ( tv_lval base , key_type < keyType > key , <nl> Cell * value ) { <nl> - assertx ( tvIsDictOrShape ( base ) ) ; <nl> + assertx ( tvIsDict ( base ) ) ; <nl> assertx ( tvIsPlausible ( * base ) ) ; <nl> <nl> ArrayData * a = val ( base ) . parr ; <nl> StringData * SetElemSlow ( tv_lval base , <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : <nl> throwInvalidKeysetOperation ( ) ; <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> - if ( RuntimeOption : : EvalHackArrDVArrs ) { <nl> - SetElemDict < setResult , keyType , copyProv > ( base , key , value ) ; <nl> - } else { <nl> - SetElemArray < setResult , keyType > ( base , key , value ) ; <nl> - } <nl> - return nullptr ; <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> SetElemArray < setResult , keyType > ( base , key , value ) ; <nl> inline void SetNewElem ( tv_lval base , <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : <nl> return SetNewElemKeyset ( base , value ) ; <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> - return RuntimeOption : : EvalHackArrDVArrs <nl> - ? SetNewElemDict < copyProv > ( base , value ) <nl> - : SetNewElemArray ( base , value ) ; <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> return SetNewElemArray ( base , value ) ; <nl> inline tv_lval SetOpElem ( TypedValue & tvRef , <nl> case KindOfKeyset : <nl> throwInvalidKeysetOperation ( ) ; <nl> <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : { <nl> - if ( RuntimeOption : : EvalHackArrDVArrs ) { <nl> - auto result = [ & ] { <nl> - if ( RuntimeOption : : EvalArrayProvenance ) { <nl> - return ElemDDict < KeyType : : Any , true > ( base , key ) ; <nl> - } else { <nl> - return ElemDDict < KeyType : : Any , false > ( base , key ) ; <nl> - } <nl> - } ( ) ; <nl> - result = tvAssertCell ( result ) ; <nl> - setopBody ( result , op , rhs ) ; <nl> - return result ; <nl> - } <nl> - / / Fallthrough <nl> - } <nl> - <nl> case KindOfPersistentArray : <nl> case KindOfArray : { <nl> if ( UNLIKELY ( <nl> inline tv_lval SetOpNewElem ( TypedValue & tvRef , <nl> case KindOfKeyset : <nl> throw_cannot_use_newelem_for_lval_read_keyset ( ) ; <nl> <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : { <nl> - if ( RuntimeOption : : EvalHackArrDVArrs ) { <nl> - throw_cannot_use_newelem_for_lval_read_dict ( ) ; <nl> - } <nl> - auto result = asArrRef ( base ) . lvalAt ( ) ; <nl> - setopBody ( result , op , rhs ) ; <nl> - return result ; <nl> - } <nl> - <nl> case KindOfPersistentArray : <nl> case KindOfArray : { <nl> if ( UNLIKELY ( checkHACFalseyPromote ( ) ) ) { <nl> inline Cell IncDecElem ( <nl> case KindOfKeyset : <nl> throwInvalidKeysetOperation ( ) ; <nl> <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : { <nl> - if ( RuntimeOption : : EvalHackArrDVArrs ) { <nl> - auto result = [ & ] { <nl> - if ( RuntimeOption : : EvalArrayProvenance ) { <nl> - return ElemDDict < KeyType : : Any , true > ( base , key ) ; <nl> - } else { <nl> - return ElemDDict < KeyType : : Any , false > ( base , key ) ; <nl> - } <nl> - } ( ) ; <nl> - return IncDecBody ( op , tvAssertCell ( result ) ) ; <nl> - } <nl> - / / fallthrough <nl> - } <nl> - <nl> case KindOfPersistentArray : <nl> case KindOfArray : { <nl> if ( UNLIKELY ( <nl> inline Cell IncDecNewElem ( <nl> case KindOfKeyset : <nl> throw_cannot_use_newelem_for_lval_read_keyset ( ) ; <nl> <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : { <nl> - if ( RuntimeOption : : EvalHackArrDVArrs ) { <nl> - throw_cannot_use_newelem_for_lval_read_dict ( ) ; <nl> - } <nl> - auto result = asArrRef ( base ) . lvalAt ( ) ; <nl> - assertx ( type ( result ) = = KindOfNull ) ; <nl> - return IncDecBody ( op , result ) ; <nl> - } <nl> - <nl> case KindOfPersistentArray : <nl> case KindOfArray : { <nl> if ( UNLIKELY ( checkHACFalseyPromote ( ) ) ) { <nl> inline ArrayData * UnsetElemArrayPre ( ArrayData * a , TypedValue key ) { <nl> * / <nl> template < KeyType keyType > <nl> inline void UnsetElemArray ( tv_lval base , key_type < keyType > key ) { <nl> - assertx ( tvIsArrayOrShape ( base ) ) ; <nl> + assertx ( tvIsArray ( base ) ) ; <nl> assertx ( tvIsPlausible ( * base ) ) ; <nl> ArrayData * a = val ( base ) . parr ; <nl> ArrayData * a2 = UnsetElemArrayPre ( a , key ) ; <nl> void UnsetElemSlow ( tv_lval base , key_type < keyType > key ) { <nl> UnsetElemKeyset < keyType > ( base , key ) ; <nl> return ; <nl> <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> - if ( RuntimeOption : : EvalHackArrDVArrs ) { <nl> - UnsetElemDict < keyType > ( base , key ) ; <nl> - } else { <nl> - UnsetElemArray < keyType > ( base , key ) ; <nl> - } <nl> - return ; <nl> - <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> UnsetElemArray < keyType > ( base , key ) ; <nl> NEVER_INLINE bool IssetEmptyElemSlow ( tv_rval base , key_type < keyType > key ) { <nl> case KindOfKeyset : <nl> return IssetEmptyElemKeyset < useEmpty , keyType > ( val ( base ) . parr , key ) ; <nl> <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> - return RuntimeOption : : EvalHackArrDVArrs ? <nl> - IssetEmptyElemDict < useEmpty , keyType > ( val ( base ) . parr , key ) : <nl> - IssetEmptyElemArray < useEmpty , keyType > ( <nl> - val ( base ) . parr , key <nl> - ) ; <nl> - <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> return IssetEmptyElemArray < useEmpty , keyType > ( <nl> tv_lval propPre ( TypedValue & tvRef , tv_lval base , MInstrPropState * pState ) { <nl> case KindOfDict : <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> case KindOfClsMeth : <nl> inline tv_lval nullSafeProp ( TypedValue & tvRef , <nl> case KindOfDict : <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> case KindOfFunc : <nl> inline void SetProp ( Class * ctx , tv_lval base , key_type < keyType > key , <nl> case KindOfDict : <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> case KindOfResource : <nl> inline tv_lval SetOpProp ( TypedValue & tvRef , <nl> case KindOfDict : <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> case KindOfResource : <nl> inline Cell IncDecProp ( <nl> case KindOfDict : <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> case KindOfResource : <nl> mmm a / hphp / runtime / vm / native . cpp <nl> ppp b / hphp / runtime / vm / native . cpp <nl> void callFunc ( const Func * const func , const void * const ctx , <nl> case KindOfDict : <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> case KindOfClsMeth : <nl> void coerceFCallArgs ( TypedValue * args , <nl> <nl> if ( LIKELY ( ! RuntimeOption : : EvalHackArrCompatTypeHintNotices ) | | <nl> ! tc . isArray ( ) | | <nl> - ! isArrayOrShapeType ( c - > m_type ) ) continue ; <nl> + ! isArrayType ( c - > m_type ) ) continue ; <nl> <nl> auto const raise = [ & ] { <nl> if ( tc . isVArray ( ) ) { <nl> static folly : : Optional < TypedValue > builtinInValue ( <nl> case KindOfDict : return make_tv < KindOfDict > ( ArrayData : : CreateDict ( ) ) ; <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : return make_tv < KindOfNull > ( ) ; <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : return make_array_like_tv ( ArrayData : : CreateShape ( ) ) ; <nl> case KindOfPersistentArray : <nl> case KindOfArray : return make_tv < KindOfArray > ( ArrayData : : Create ( ) ) ; <nl> case KindOfUninit : <nl> static bool tcCheckNative ( const TypeConstraint & tc , const NativeSig : : Type ty ) { <nl> case KindOfDict : return ty = = T : : Array | | ty = = T : : ArrayArg ; <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : return ty = = T : : Array | | ty = = T : : ArrayArg ; <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : return ty = = T : : Array | | ty = = T : : ArrayArg ; <nl> case KindOfPersistentArray : <nl> case KindOfArray : return ty = = T : : Array | | ty = = T : : ArrayArg ; <nl> case KindOfResource : return ty = = T : : Resource | | ty = = T : : ResourceArg ; <nl> static bool tcCheckNativeIO ( <nl> case KindOfDict : return ty = = T : : ArrayIO ; <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : return ty = = T : : ArrayIO ; <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : return ty = = T : : ArrayIO ; <nl> case KindOfPersistentArray : <nl> case KindOfArray : return ty = = T : : ArrayIO ; <nl> case KindOfResource : return ty = = T : : ResourceIO ; <nl> mmm a / hphp / runtime / vm / type - constraint . cpp <nl> ppp b / hphp / runtime / vm / type - constraint . cpp <nl> bool TypeConstraint : : checkNamedTypeNonObj ( tv_rval val ) const { <nl> c = td - > klass ; <nl> break ; <nl> case AnnotAction : : VArrayCheck : <nl> - assertx ( tvIsArrayOrShape ( val ) ) ; <nl> + assertx ( tvIsArray ( val ) ) ; <nl> return Assert | | val . val ( ) . parr - > isVArray ( ) ; <nl> case AnnotAction : : DArrayCheck : <nl> - assertx ( tvIsArrayOrShape ( val ) ) ; <nl> + assertx ( tvIsArray ( val ) ) ; <nl> return Assert | | val . val ( ) . parr - > isDArray ( ) ; <nl> case AnnotAction : : VArrayOrDArrayCheck : <nl> - assertx ( tvIsArrayOrShape ( val ) ) ; <nl> + assertx ( tvIsArray ( val ) ) ; <nl> return ( Assert | | ( <nl> ! RuntimeOption : : EvalHackArrCompatTypeHintPolymorphism & & <nl> ! val . val ( ) . parr - > isNotDVArray ( ) <nl> ) ) ; <nl> case AnnotAction : : NonVArrayOrDArrayCheck : <nl> - assertx ( tvIsArrayOrShape ( val ) ) ; <nl> + assertx ( tvIsArray ( val ) ) ; <nl> return Assert | | val . val ( ) . parr - > isNotDVArray ( ) ; <nl> case AnnotAction : : WarnFunc : <nl> case AnnotAction : : WarnClass : <nl> bool TypeConstraint : : checkImpl ( tv_rval val , <nl> case AnnotAction : : VArrayCheck : <nl> / / Since d / varray type - hints are always soft , we can never assert on their <nl> / / correctness . <nl> - assertx ( tvIsArrayOrShape ( val ) ) ; <nl> + assertx ( tvIsArray ( val ) ) ; <nl> return isAssert | | val . val ( ) . parr - > isVArray ( ) ; <nl> case AnnotAction : : DArrayCheck : <nl> - assertx ( tvIsArrayOrShape ( val ) ) ; <nl> + assertx ( tvIsArray ( val ) ) ; <nl> return isAssert | | val . val ( ) . parr - > isDArray ( ) ; <nl> case AnnotAction : : VArrayOrDArrayCheck : <nl> - assertx ( tvIsArrayOrShape ( val ) ) ; <nl> + assertx ( tvIsArray ( val ) ) ; <nl> return ( isAssert | | ( <nl> ! RuntimeOption : : EvalHackArrCompatTypeHintPolymorphism & & <nl> ! val . val ( ) . parr - > isNotDVArray ( ) <nl> ) ) ; <nl> case AnnotAction : : NonVArrayOrDArrayCheck : <nl> - assertx ( tvIsArrayOrShape ( val ) ) ; <nl> + assertx ( tvIsArray ( val ) ) ; <nl> return isAssert | | val . val ( ) . parr - > isNotDVArray ( ) ; <nl> case AnnotAction : : WarnFunc : <nl> case AnnotAction : : WarnClass : <nl> std : : string describe_actual_type ( tv_rval val , bool isHHType ) { <nl> case KindOfDict : return " HH \ \ dict " ; <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : return " HH \ \ keyset " ; <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> - return RuntimeOption : : EvalHackArrDVArrs ? " HH \ \ dict " : " array " ; <nl> case KindOfPersistentArray : <nl> case KindOfArray : return " array " ; <nl> case KindOfResource : <nl> MemoKeyConstraint memoKeyConstraintFromTC ( const TypeConstraint & tc ) { <nl> case KindOfDict : <nl> case KindOfPersistentKeyset : <nl> case KindOfKeyset : <nl> - case KindOfPersistentShape : <nl> - case KindOfShape : <nl> case KindOfPersistentArray : <nl> case KindOfArray : <nl> case KindOfClsMeth : <nl> | Nuke KindOfShape from orbit | facebook/hhvm | 040a46737104e580dd37b99338fbceef966108fd | 2019-09-16T15:48:59Z |
mmm a / xbmc / interfaces / json - rpc / PVROperations . cpp <nl> ppp b / xbmc / interfaces / json - rpc / PVROperations . cpp <nl> JSONRPC_STATUS CPVROperations : : GetChannelGroups ( const std : : string & method , ITran <nl> <nl> int start , end ; <nl> <nl> - std : : vector < CPVRChannelGroupPtr > groupList = channelGroups - > GetMembers ( ) ; <nl> + std : : vector < CPVRChannelGroupPtr > groupList = channelGroups - > GetMembers ( true ) ; <nl> HandleLimits ( parameterObject , result , groupList . size ( ) , start , end ) ; <nl> for ( int index = start ; index < end ; index + + ) <nl> FillChannelGroupDetails ( groupList . at ( index ) , parameterObject , result [ " channelgroups " ] , true ) ; <nl> mmm a / xbmc / pvr / channels / PVRChannelGroups . cpp <nl> ppp b / xbmc / pvr / channels / PVRChannelGroups . cpp <nl> CPVRChannelGroupPtr CPVRChannelGroups : : GetLastPlayedGroup ( int iChannelID / * = - 1 <nl> return group ; <nl> } <nl> <nl> - std : : vector < CPVRChannelGroupPtr > CPVRChannelGroups : : GetMembers ( ) const <nl> + std : : vector < CPVRChannelGroupPtr > CPVRChannelGroups : : GetMembers ( bool bExcludeHidden / * = false * / ) const <nl> { <nl> CSingleLock lock ( m_critSection ) ; <nl> - std : : vector < CPVRChannelGroupPtr > groups ( m_groups . begin ( ) , m_groups . end ( ) ) ; <nl> + std : : vector < CPVRChannelGroupPtr > groups ; <nl> + for ( CPVRChannelGroupPtr group : m_groups ) <nl> + { <nl> + if ( ! bExcludeHidden | | ! group - > IsHidden ( ) ) <nl> + groups . push_back ( group ) ; <nl> + } <nl> return groups ; <nl> } <nl> <nl> mmm a / xbmc / pvr / channels / PVRChannelGroups . h <nl> ppp b / xbmc / pvr / channels / PVRChannelGroups . h <nl> namespace PVR <nl> / * ! <nl> * @ brief Get the list of groups . <nl> * @ param groups The list to store the results in . <nl> + * @ param bExcludeHidden Whenever to exclude hidden channel groups . <nl> * @ return The amount of items that were added . <nl> * / <nl> - std : : vector < CPVRChannelGroupPtr > GetMembers ( ) const ; <nl> + std : : vector < CPVRChannelGroupPtr > GetMembers ( bool bExcludeHidden = false ) const ; <nl> <nl> / * ! <nl> * @ brief Get the list of groups . <nl> | Merge pull request from xhaggi / fix / pvr - populate - visible - groups - only - via - jsonrpc | xbmc/xbmc | aaf271d517a65fc1e15496e18bf918e249084e61 | 2015-12-29T22:17:36Z |
mmm a / src / core / core . cpp <nl> ppp b / src / core / core . cpp <nl> struct System : : Impl { <nl> gpu_core - > Start ( ) ; <nl> cpu_core_manager . StartThreads ( ) ; <nl> <nl> + / / Initialize cheat engine <nl> + if ( cheat_engine ) { <nl> + cheat_engine - > Initialize ( ) ; <nl> + } <nl> + <nl> / / All threads are started , begin main process execution , now that we ' re in the clear . <nl> main_process - > Run ( load_parameters - > main_thread_priority , <nl> load_parameters - > main_thread_stack_size ) ; <nl> | core : Initialize cheats after load to avoid VMManager crash | yuzu-emu/yuzu | 3e729c13cc70af6fb8fef6e38bd5deba7a8a0d6e | 2019-09-22T01:45:05Z |
mmm a / CHANGELOG <nl> ppp b / CHANGELOG <nl> devel <nl> <nl> * Fixed agency nodes to not create bogus keys on delete / observe / unobserve <nl> <nl> + * Fixed an agency bug found in Windows tests . <nl> + <nl> v3 . 5 . 0 - rc . 7 ( 2019 - 08 - 01 ) <nl> mmmmmmmmmmmmmmmmmmmmmmmm <nl> <nl> mmm a / arangod / Agency / Agent . cpp <nl> ppp b / arangod / Agency / Agent . cpp <nl> trans_ret_t Agent : : transact ( query_t const & queries ) { <nl> return trans_ret_t ( false , NO_LEADER ) ; <nl> } <nl> <nl> + term_t currentTerm = term ( ) ; / / this is the term we will be working with <nl> + <nl> + / / Check that we are actually still the leader : <nl> + if ( ! leading ( ) ) { <nl> + return trans_ret_t ( false , NO_LEADER ) ; <nl> + } <nl> + <nl> _tiLock . assertNotLockedByCurrentThread ( ) ; <nl> MUTEX_LOCKER ( ioLocker , _ioLock ) ; <nl> <nl> for ( const auto & query : VPackArrayIterator ( qs ) ) { <nl> + / / Check that we are actually still the leader : <nl> + if ( ! leading ( ) ) { <nl> + return trans_ret_t ( false , NO_LEADER ) ; <nl> + } <nl> if ( query [ 0 ] . isObject ( ) ) { <nl> check_ret_t res = _spearhead . applyTransaction ( query ) ; <nl> if ( res . successful ( ) ) { <nl> maxind = ( query . length ( ) = = 3 & & query [ 2 ] . isString ( ) ) <nl> - ? _state . logLeaderSingle ( query [ 0 ] , term ( ) , query [ 2 ] . copyString ( ) ) <nl> - : _state . logLeaderSingle ( query [ 0 ] , term ( ) ) ; <nl> + ? _state . logLeaderSingle ( query [ 0 ] , currentTerm , query [ 2 ] . copyString ( ) ) <nl> + : _state . logLeaderSingle ( query [ 0 ] , currentTerm ) ; <nl> ret - > add ( VPackValue ( maxind ) ) ; <nl> } else { <nl> _spearhead . read ( res . failed - > slice ( ) , * ret ) ; <nl> write_ret_t Agent : : write ( query_t const & query , WriteMode const & wmode ) { <nl> npacks + + ; <nl> } <nl> <nl> + term_t currentTerm = term ( ) ; / / this is the term we will be working with <nl> + <nl> + / / Check that we are actually still the leader : <nl> + if ( ! leading ( ) ) { <nl> + return write_ret_t ( false , NO_LEADER ) ; <nl> + } <nl> + <nl> / / Apply to spearhead and get indices for log entries <nl> / / Avoid keeping lock indefinitely <nl> for ( size_t i = 0 , l = 0 ; i < npacks ; + + i ) { <nl> write_ret_t Agent : : write ( query_t const & query , WriteMode const & wmode ) { <nl> return write_ret_t ( false , NO_LEADER ) ; <nl> } <nl> <nl> + / / Check that we are actually still the leader : <nl> + if ( ! leading ( ) ) { <nl> + return write_ret_t ( false , NO_LEADER ) ; <nl> + } <nl> + <nl> _tiLock . assertNotLockedByCurrentThread ( ) ; <nl> MUTEX_LOCKER ( ioLocker , _ioLock ) ; <nl> <nl> applied = _spearhead . applyTransactions ( chunk , wmode ) ; <nl> - auto tmp = _state . logLeaderMulti ( chunk , applied , term ( ) ) ; <nl> + auto tmp = _state . logLeaderMulti ( chunk , applied , currentTerm ) ; <nl> indices . insert ( indices . end ( ) , tmp . begin ( ) , tmp . end ( ) ) ; <nl> } <nl> } <nl> mmm a / arangod / Agency / State . cpp <nl> ppp b / arangod / Agency / State . cpp <nl> std : : vector < index_t > State : : inquire ( query_t const & query ) const { <nl> <nl> auto ret = _clientIdLookupTable . equal_range ( i . copyString ( ) ) ; <nl> index_t index = 0 ; <nl> + / / Look for the maximum index : <nl> for ( auto it = ret . first ; it ! = ret . second ; + + it ) { <nl> - if ( it - > second < _log [ 0 ] . index ) { <nl> - continue ; <nl> - } <nl> - if ( index < _log . at ( it - > second - _cur ) . index ) { <nl> - index = _log . at ( it - > second - _cur ) . index ; <nl> + if ( it - > second > index ) { <nl> + index = it - > second ; <nl> } <nl> } <nl> result . push_back ( index ) ; <nl> mmm a / arangod / Cluster / TakeoverShardLeadership . cpp <nl> ppp b / arangod / Cluster / TakeoverShardLeadership . cpp <nl> static void sendLeaderChangeRequests ( std : : vector < ServerID > const & currentServers <nl> if ( srv = = sid ) { <nl> continue ; / / ignore ourself <nl> } <nl> - LOG_DEVEL < < " Sending " < < bodyBuilder . toJson ( ) < < " to " < < srv ; <nl> + LOG_TOPIC ( " 42516 " , DEBUG , Logger : : MAINTENANCE ) <nl> + < < " Sending " < < bodyBuilder . toJson ( ) < < " to " < < srv ; <nl> requests . emplace_back ( " server : " + srv , RequestType : : PUT , url , body ) ; <nl> } <nl> <nl> mmm a / tests / js / client / agency / agency - test . js <nl> ppp b / tests / js / client / agency / agency - test . js <nl> function agencyTestSuite ( ) { <nl> } <nl> } ) ; <nl> <nl> + var startTime = new Date ( ) ; <nl> while ( true ) { <nl> <nl> + if ( new Date ( ) - startTime > 600000 ) { <nl> + assertTrue ( false , " Hit global timeout of 10 minutes in accessAgency . " ) ; <nl> + } <nl> + <nl> if ( ! inquire ) { <nl> res = request ( { url : agencyLeader + " / _api / agency / " + api , <nl> method : " POST " , followRedirect : false , <nl> function agencyTestSuite ( ) { <nl> headers : { " Content - Type " : " application / json " } , <nl> timeout : timeout / * essentially for the huge trx package <nl> running under ASAN in the CI * / } ) ; <nl> + require ( ' console ' ) . topic ( " agency = debug " , ' Sent out agency request , statusCode : ' , res . statusCode ) ; <nl> } else { / / inquire . Remove successful commits . For later retries <nl> res = request ( { url : agencyLeader + " / _api / agency / inquire " , <nl> method : " POST " , followRedirect : false , <nl> function agencyTestSuite ( ) { <nl> headers : { " Content - Type " : " application / json " } , <nl> timeout : timeout <nl> } ) ; <nl> + require ( ' console ' ) . topic ( " agency = info " , ' Sent out agency inquiry , statusCode : ' , res . statusCode ) ; <nl> } <nl> <nl> if ( res . statusCode = = = 307 ) { <nl> function agencyTestSuite ( ) { <nl> } <nl> require ( ' console ' ) . topic ( " agency = info " , ' Redirected to ' + agencyLeader ) ; <nl> continue ; <nl> - } else if ( res . statusCode = = = 503 ) { <nl> - require ( ' console ' ) . topic ( " agency = info " , ' Waiting for leader . . . ' ) ; <nl> + } else if ( res . statusCode = = = 503 | | res . statusCode = = = 500 ) { <nl> + / / 503 covers service not available and 500 covers timeout <nl> + require ( ' console ' ) . topic ( " agency = info " , ' Got status code ' , res . statusCode , ' , waiting for leader . . . ' ) ; <nl> if ( clientIds . length > 0 & & api = = = ' write ' ) { <nl> inquire = true ; <nl> } <nl> function agencyTestSuite ( ) { <nl> var done = 0 ; <nl> res . bodyParsed = JSON . parse ( res . body ) ; <nl> res . bodyParsed . results . forEach ( function ( index ) { <nl> + var noZeroYet = true ; <nl> if ( index > 0 ) { <nl> done + + ; <nl> + assertTrue ( noZeroYet ) ; <nl> + } else { <nl> + noZeroYet = false ; <nl> } <nl> } ) ; <nl> + require ( ' console ' ) . topic ( " agency = info " , ' Inquiry analysis : done = ' , done , ' body : ' , res . body ) ; <nl> if ( done = = = clientIds . length ) { <nl> + require ( ' console ' ) . topic ( " agency = info " , ' Inquiry analysis , accepting result as good ! ' ) ; <nl> break ; <nl> } else { <nl> list = list . slice ( done ) ; <nl> + clientIds = clientIds . slice ( done ) ; <nl> inquire = false ; <nl> + require ( ' console ' ) . topic ( " agency = info " , ' Inquiry analysis : have accepted ' , done , ' transactions as done , continuing with this list : ' , JSON . stringify ( list ) ) ; <nl> } <nl> } <nl> try { <nl> function agencyTestSuite ( ) { <nl> let trxs = [ ] ; <nl> for ( i = start ; i < start + count ; + + i ) { <nl> let key = " / key " + i ; <nl> - let trx = [ { } , { } , " clientid " + counter + + ] ; <nl> + let trx = [ { } , { } , " clientid " + start + counter + + ] ; <nl> trx [ 0 ] [ key ] = " value " + i ; <nl> trxs . push ( trx ) ; <nl> if ( trxs . length > = 200 | | i = = = start + count - 1 ) { <nl> | Fix agency bugs . ( ) | arangodb/arangodb | dc095f70c7f089ca2983a9d8c57692393fb9e651 | 2019-08-16T09:16:28Z |
mmm a / cocos2d_libs . xcodeproj / project . pbxproj . REMOVED . git - id <nl> ppp b / cocos2d_libs . xcodeproj / project . pbxproj . REMOVED . git - id <nl> @ @ - 1 + 1 @ @ <nl> - fd0dee451420604712c1327679395cd1faca91b6 <nl> \ No newline at end of file <nl> + 0bd142a09d7aacd3dbff8f23c7a14966430e9c01 <nl> \ No newline at end of file <nl> mmm a / cocos2dx / CCDirector . cpp <nl> ppp b / cocos2dx / CCDirector . cpp <nl> void Director : : createStatsLabel ( ) <nl> CC_SAFE_RELEASE_NULL ( _FPSLabel ) ; <nl> CC_SAFE_RELEASE_NULL ( _SPFLabel ) ; <nl> CC_SAFE_RELEASE_NULL ( _drawsLabel ) ; <nl> - textureCache - > removeTextureForKey ( " cc_fps_images " ) ; <nl> + textureCache - > removeTextureForKey ( " / cc_fps_images " ) ; <nl> FileUtils : : getInstance ( ) - > purgeCachedEntries ( ) ; <nl> } <nl> <nl> void Director : : createStatsLabel ( ) <nl> return ; <nl> } <nl> <nl> - texture = textureCache - > addUIImage ( image , " cc_fps_images " ) ; <nl> + texture = textureCache - > addImage ( image , " / cc_fps_images " ) ; <nl> CC_SAFE_RELEASE ( image ) ; <nl> <nl> / * <nl> mmm a / cocos2dx / base_nodes / CCNode . cpp <nl> ppp b / cocos2dx / base_nodes / CCNode . cpp <nl> Node : : Node ( void ) <nl> <nl> ScriptEngineProtocol * pEngine = ScriptEngineManager : : getInstance ( ) - > getScriptEngine ( ) ; <nl> _scriptType = pEngine ! = NULL ? pEngine - > getScriptType ( ) : kScriptTypeNone ; <nl> - _componentContainer = new ComponentContainer ( this ) ; <nl> } <nl> <nl> Node : : ~ Node ( ) <nl> Node : : ~ Node ( ) <nl> / / children <nl> CC_SAFE_RELEASE ( _children ) ; <nl> <nl> - / / _comsContainer <nl> - _componentContainer - > removeAll ( ) ; <nl> CC_SAFE_DELETE ( _componentContainer ) ; <nl> } <nl> <nl> void Node : : updateTransform ( ) <nl> <nl> Component * Node : : getComponent ( const char * pName ) <nl> { <nl> - return _componentContainer - > get ( pName ) ; <nl> + if ( _componentContainer ) <nl> + return _componentContainer - > get ( pName ) ; <nl> + return nullptr ; <nl> } <nl> <nl> bool Node : : addComponent ( Component * pComponent ) <nl> { <nl> + / / lazy alloc <nl> + if ( ! _componentContainer ) <nl> + _componentContainer = new ComponentContainer ( this ) ; <nl> return _componentContainer - > add ( pComponent ) ; <nl> } <nl> <nl> bool Node : : removeComponent ( const char * pName ) <nl> { <nl> - return _componentContainer - > remove ( pName ) ; <nl> + if ( _componentContainer ) <nl> + return _componentContainer - > remove ( pName ) ; <nl> + return false ; <nl> } <nl> <nl> void Node : : removeAllComponents ( ) <nl> { <nl> - _componentContainer - > removeAll ( ) ; <nl> + if ( _componentContainer ) <nl> + _componentContainer - > removeAll ( ) ; <nl> } <nl> <nl> / / NodeRGBA <nl> mmm a / cocos2dx / particle_nodes / CCParticleExamples . cpp <nl> ppp b / cocos2dx / particle_nodes / CCParticleExamples . cpp <nl> static Texture2D * getDefaultTexture ( ) <nl> do <nl> { <nl> bool bRet = false ; <nl> - const char * key = " __firePngData " ; <nl> - texture = TextureCache : : getInstance ( ) - > textureForKey ( key ) ; <nl> + const char * key = " / __firePngData " ; <nl> + texture = TextureCache : : getInstance ( ) - > getTextureForKey ( key ) ; <nl> CC_BREAK_IF ( texture ! = NULL ) ; <nl> <nl> pImage = new Image ( ) ; <nl> static Texture2D * getDefaultTexture ( ) <nl> bRet = pImage - > initWithImageData ( __firePngData , sizeof ( __firePngData ) ) ; <nl> CC_BREAK_IF ( ! bRet ) ; <nl> <nl> - texture = TextureCache : : getInstance ( ) - > addUIImage ( pImage , key ) ; <nl> + texture = TextureCache : : getInstance ( ) - > addImage ( pImage , key ) ; <nl> } while ( 0 ) ; <nl> <nl> CC_SAFE_RELEASE ( pImage ) ; <nl> mmm a / cocos2dx / particle_nodes / CCParticleSystem . cpp <nl> ppp b / cocos2dx / particle_nodes / CCParticleSystem . cpp <nl> bool ParticleSystem : : initWithDictionary ( Dictionary * dictionary , const char * dirn <nl> CCASSERT ( isOK , " CCParticleSystem : error init image with Data " ) ; <nl> CC_BREAK_IF ( ! isOK ) ; <nl> <nl> - setTexture ( TextureCache : : getInstance ( ) - > addUIImage ( image , textureName . c_str ( ) ) ) ; <nl> + setTexture ( TextureCache : : getInstance ( ) - > addImage ( image , textureName . c_str ( ) ) ) ; <nl> <nl> image - > release ( ) ; <nl> } <nl> mmm a / cocos2dx / platform / CCFileUtils . cpp <nl> ppp b / cocos2dx / platform / CCFileUtils . cpp <nl> static tinyxml2 : : XMLElement * generateElementForArray ( cocos2d : : Array * array , tiny <nl> # else <nl> NS_CC_BEGIN <nl> <nl> - / * The subclass FileUtilsIOS and FileUtilsMac should override these two method . * / <nl> + / * The subclass FileUtilsApple should override these two method . * / <nl> Dictionary * FileUtils : : createDictionaryWithContentsOfFile ( const std : : string & filename ) { return NULL ; } <nl> bool FileUtils : : writeToFile ( cocos2d : : Dictionary * dict , const std : : string & fullPath ) { return NULL ; } <nl> Array * FileUtils : : createArrayWithContentsOfFile ( const std : : string & filename ) { return NULL ; } <nl> Array * FileUtils : : createArrayWithContentsOfFile ( const std : : string & filename ) { re <nl> <nl> FileUtils * FileUtils : : s_sharedFileUtils = NULL ; <nl> <nl> - / / XXX : deprecated <nl> - FileUtils * FileUtils : : sharedFileUtils ( ) <nl> - { <nl> - return FileUtils : : getInstance ( ) ; <nl> - } <nl> <nl> void FileUtils : : destroyInstance ( ) <nl> { <nl> CC_SAFE_DELETE ( s_sharedFileUtils ) ; <nl> } <nl> <nl> - / / XXX : deprecated <nl> - void FileUtils : : purgeFileUtils ( ) <nl> - { <nl> - FileUtils : : destroyInstance ( ) ; <nl> - } <nl> - <nl> FileUtils : : FileUtils ( ) <nl> : _filenameLookupDict ( NULL ) <nl> { <nl> void FileUtils : : purgeCachedEntries ( ) <nl> _fullPathCache . clear ( ) ; <nl> } <nl> <nl> - unsigned char * FileUtils : : getFileData ( const char * filename , const char * pszMode , unsigned long * pSize ) <nl> + unsigned char * FileUtils : : getFileData ( const char * filename , const char * mode , unsigned long * size ) <nl> { <nl> - unsigned char * pBuffer = NULL ; <nl> - CCASSERT ( filename ! = NULL & & pSize ! = NULL & & pszMode ! = NULL , " Invalid parameters . " ) ; <nl> - * pSize = 0 ; <nl> + unsigned char * buffer = NULL ; <nl> + CCASSERT ( filename ! = NULL & & size ! = NULL & & mode ! = NULL , " Invalid parameters . " ) ; <nl> + * size = 0 ; <nl> do <nl> { <nl> / / read the file from hardware <nl> std : : string fullPath = fullPathForFilename ( filename ) ; <nl> - FILE * fp = fopen ( fullPath . c_str ( ) , pszMode ) ; <nl> + FILE * fp = fopen ( fullPath . c_str ( ) , mode ) ; <nl> CC_BREAK_IF ( ! fp ) ; <nl> <nl> fseek ( fp , 0 , SEEK_END ) ; <nl> - * pSize = ftell ( fp ) ; <nl> + * size = ftell ( fp ) ; <nl> fseek ( fp , 0 , SEEK_SET ) ; <nl> - pBuffer = new unsigned char [ * pSize ] ; <nl> - * pSize = fread ( pBuffer , sizeof ( unsigned char ) , * pSize , fp ) ; <nl> + buffer = new unsigned char [ * size ] ; <nl> + * size = fread ( buffer , sizeof ( unsigned char ) , * size , fp ) ; <nl> fclose ( fp ) ; <nl> } while ( 0 ) ; <nl> <nl> - if ( ! pBuffer ) <nl> + if ( ! buffer ) <nl> { <nl> std : : string msg = " Get data from file ( " ; <nl> msg . append ( filename ) . append ( " ) failed ! " ) ; <nl> <nl> CCLOG ( " % s " , msg . c_str ( ) ) ; <nl> } <nl> - return pBuffer ; <nl> + return buffer ; <nl> } <nl> <nl> - unsigned char * FileUtils : : getFileDataFromZip ( const char * pszZipFilePath , const char * filename , unsigned long * pSize ) <nl> + unsigned char * FileUtils : : getFileDataFromZip ( const char * zipFilePath , const char * filename , unsigned long * size ) <nl> { <nl> - unsigned char * pBuffer = NULL ; <nl> + unsigned char * buffer = NULL ; <nl> unzFile pFile = NULL ; <nl> - * pSize = 0 ; <nl> + * size = 0 ; <nl> <nl> do <nl> { <nl> - CC_BREAK_IF ( ! pszZipFilePath | | ! filename ) ; <nl> - CC_BREAK_IF ( strlen ( pszZipFilePath ) = = 0 ) ; <nl> + CC_BREAK_IF ( ! zipFilePath | | ! filename ) ; <nl> + CC_BREAK_IF ( strlen ( zipFilePath ) = = 0 ) ; <nl> <nl> - pFile = unzOpen ( pszZipFilePath ) ; <nl> + pFile = unzOpen ( zipFilePath ) ; <nl> CC_BREAK_IF ( ! pFile ) ; <nl> <nl> int nRet = unzLocateFile ( pFile , filename , 1 ) ; <nl> unsigned char * FileUtils : : getFileDataFromZip ( const char * pszZipFilePath , const c <nl> nRet = unzOpenCurrentFile ( pFile ) ; <nl> CC_BREAK_IF ( UNZ_OK ! = nRet ) ; <nl> <nl> - pBuffer = new unsigned char [ FileInfo . uncompressed_size ] ; <nl> - int CC_UNUSED nSize = unzReadCurrentFile ( pFile , pBuffer , FileInfo . uncompressed_size ) ; <nl> + buffer = new unsigned char [ FileInfo . uncompressed_size ] ; <nl> + int CC_UNUSED nSize = unzReadCurrentFile ( pFile , buffer , FileInfo . uncompressed_size ) ; <nl> CCASSERT ( nSize = = 0 | | nSize = = ( int ) FileInfo . uncompressed_size , " the file size is wrong " ) ; <nl> <nl> - * pSize = FileInfo . uncompressed_size ; <nl> + * size = FileInfo . uncompressed_size ; <nl> unzCloseCurrentFile ( pFile ) ; <nl> } while ( 0 ) ; <nl> <nl> unsigned char * FileUtils : : getFileDataFromZip ( const char * pszZipFilePath , const c <nl> unzClose ( pFile ) ; <nl> } <nl> <nl> - return pBuffer ; <nl> + return buffer ; <nl> } <nl> <nl> - std : : string FileUtils : : getNewFilename ( const char * filename ) <nl> + std : : string FileUtils : : getNewFilename ( const std : : string & filename ) <nl> { <nl> - const char * pszNewFileName = NULL ; <nl> + std : : string newFileName ; <nl> + <nl> / / in Lookup Filename dictionary ? <nl> String * fileNameFound = _filenameLookupDict ? ( String * ) _filenameLookupDict - > objectForKey ( filename ) : NULL ; <nl> if ( NULL = = fileNameFound | | fileNameFound - > length ( ) = = 0 ) { <nl> - pszNewFileName = filename ; <nl> + newFileName = filename ; <nl> } <nl> else { <nl> - pszNewFileName = fileNameFound - > getCString ( ) ; <nl> - / / CCLOG ( " FOUND NEW FILE NAME : % s . " , pszNewFileName ) ; <nl> + newFileName = fileNameFound - > getCString ( ) ; <nl> } <nl> - return pszNewFileName ; <nl> + return newFileName ; <nl> } <nl> <nl> std : : string FileUtils : : getPathForFilename ( const std : : string & filename , const std : : string & resolutionDirectory , const std : : string & searchPath ) <nl> std : : string FileUtils : : getPathForFilename ( const std : : string & filename , const std <nl> } <nl> <nl> <nl> - std : : string FileUtils : : fullPathForFilename ( const char * filename ) <nl> + std : : string FileUtils : : fullPathForFilename ( const std : : string & filename ) <nl> { <nl> - CCASSERT ( filename ! = NULL , " CCFileUtils : Invalid path " ) ; <nl> - <nl> - std : : string strFileName = filename ; <nl> if ( isAbsolutePath ( filename ) ) <nl> { <nl> - / / CCLOG ( " Return absolute path ( % s ) directly . " , filename ) ; <nl> return filename ; <nl> } <nl> - <nl> + <nl> / / Already Cached ? <nl> - std : : map < std : : string , std : : string > : : iterator cacheIter = _fullPathCache . find ( filename ) ; <nl> - if ( cacheIter ! = _fullPathCache . end ( ) ) <nl> + auto cacheIter = _fullPathCache . find ( filename ) ; <nl> + if ( cacheIter ! = _fullPathCache . end ( ) ) <nl> { <nl> - / / CCLOG ( " Return full path from cache : % s " , cacheIter - > second . c_str ( ) ) ; <nl> return cacheIter - > second ; <nl> } <nl> <nl> / / Get the new file name . <nl> - std : : string newFilename = getNewFilename ( filename ) ; <nl> + std : : string newFilename ( getNewFilename ( filename ) ) ; <nl> <nl> string fullpath = " " ; <nl> <nl> - for ( auto searchPathsIter = _searchPathArray . begin ( ) ; <nl> - searchPathsIter ! = _searchPathArray . end ( ) ; + + searchPathsIter ) { <nl> - for ( auto resOrderIter = _searchResolutionsOrderArray . begin ( ) ; <nl> - resOrderIter ! = _searchResolutionsOrderArray . end ( ) ; + + resOrderIter ) { <nl> - <nl> - / / CCLOG ( " SEARCHING : % s \ n " , std : : string ( * searchPathsIter + * resOrderIter + newFilename ) . c_str ( ) ) ; <nl> + for ( auto searchIt = _searchPathArray . begin ( ) ; searchIt ! = _searchPathArray . end ( ) ; + + searchIt ) { <nl> + for ( auto resolutionIt = _searchResolutionsOrderArray . begin ( ) ; resolutionIt ! = _searchResolutionsOrderArray . end ( ) ; + + resolutionIt ) { <nl> <nl> - fullpath = this - > getPathForFilename ( newFilename , * resOrderIter , * searchPathsIter ) ; <nl> + fullpath = this - > getPathForFilename ( newFilename , * resolutionIt , * searchIt ) ; <nl> <nl> if ( fullpath . length ( ) > 0 ) <nl> { <nl> / / Using the filename passed in as key . <nl> _fullPathCache . insert ( std : : pair < std : : string , std : : string > ( filename , fullpath ) ) ; <nl> - / / CCLOG ( " Returning path : % s \ n " , fullpath . c_str ( ) ) ; <nl> return fullpath ; <nl> } <nl> } <nl> } <nl> <nl> - / / CCLOG ( " cocos2d : fullPathForFilename : No file found at % s . Possible missing file . " , filename ) ; <nl> + CCLOG ( " cocos2d : fullPathForFilename : No file found at % s . Possible missing file . " , filename . c_str ( ) ) ; <nl> <nl> + / / XXX : Should it return nullptr ? or an empty string ? <nl> / / The file wasn ' t found , return the file name passed in . <nl> return filename ; <nl> } <nl> <nl> - const char * FileUtils : : fullPathFromRelativeFile ( const char * filename , const char * pszRelativeFile ) <nl> + std : : string FileUtils : : fullPathFromRelativeFile ( const std : : string & filename , const std : : string & relativeFile ) <nl> { <nl> - std : : string relativeFile = pszRelativeFile ; <nl> - String * pRet = String : : create ( " " ) ; <nl> - pRet - > _string = relativeFile . substr ( 0 , relativeFile . rfind ( ' / ' ) + 1 ) ; <nl> - pRet - > _string + = getNewFilename ( filename ) ; <nl> - return pRet - > getCString ( ) ; <nl> + return relativeFile . substr ( 0 , relativeFile . rfind ( ' / ' ) + 1 ) + getNewFilename ( filename ) ; <nl> } <nl> <nl> void FileUtils : : setSearchResolutionsOrder ( const std : : vector < std : : string > & searchResolutionsOrder ) <nl> { <nl> - bool bExistDefault = false ; <nl> + bool existDefault = false ; <nl> _fullPathCache . clear ( ) ; <nl> _searchResolutionsOrderArray . clear ( ) ; <nl> - for ( std : : vector < std : : string > : : const_iterator iter = searchResolutionsOrder . begin ( ) ; iter ! = searchResolutionsOrder . end ( ) ; + + iter ) <nl> + for ( auto iter = searchResolutionsOrder . begin ( ) ; iter ! = searchResolutionsOrder . end ( ) ; + + iter ) <nl> { <nl> std : : string resolutionDirectory = * iter ; <nl> - if ( ! bExistDefault & & resolutionDirectory = = " " ) <nl> + if ( ! existDefault & & resolutionDirectory = = " " ) <nl> { <nl> - bExistDefault = true ; <nl> + existDefault = true ; <nl> } <nl> <nl> if ( resolutionDirectory . length ( ) > 0 & & resolutionDirectory [ resolutionDirectory . length ( ) - 1 ] ! = ' / ' ) <nl> void FileUtils : : setSearchResolutionsOrder ( const std : : vector < std : : string > & search <nl> <nl> _searchResolutionsOrderArray . push_back ( resolutionDirectory ) ; <nl> } <nl> - if ( ! bExistDefault ) <nl> + if ( ! existDefault ) <nl> { <nl> _searchResolutionsOrderArray . push_back ( " " ) ; <nl> } <nl> } <nl> <nl> - void FileUtils : : addSearchResolutionsOrder ( const char * order ) <nl> + void FileUtils : : addSearchResolutionsOrder ( const std : : string & order ) <nl> { <nl> _searchResolutionsOrderArray . push_back ( order ) ; <nl> } <nl> const std : : vector < std : : string > & FileUtils : : getSearchResolutionsOrder ( ) <nl> return _searchResolutionsOrderArray ; <nl> } <nl> <nl> - const std : : vector < std : : string > & FileUtils : : getSearchPaths ( ) <nl> + const std : : vector < std : : string > & FileUtils : : getSearchPaths ( ) const <nl> { <nl> return _searchPathArray ; <nl> } <nl> <nl> void FileUtils : : setSearchPaths ( const std : : vector < std : : string > & searchPaths ) <nl> { <nl> - bool bExistDefaultRootPath = false ; <nl> + bool existDefaultRootPath = false ; <nl> <nl> _fullPathCache . clear ( ) ; <nl> _searchPathArray . clear ( ) ; <nl> - for ( std : : vector < std : : string > : : const_iterator iter = searchPaths . begin ( ) ; iter ! = searchPaths . end ( ) ; + + iter ) <nl> + for ( auto iter = searchPaths . begin ( ) ; iter ! = searchPaths . end ( ) ; + + iter ) <nl> { <nl> std : : string strPrefix ; <nl> std : : string path ; <nl> + <nl> if ( ! isAbsolutePath ( * iter ) ) <nl> { / / Not an absolute path <nl> strPrefix = _defaultResRootPath ; <nl> } <nl> - path = strPrefix + ( * iter ) ; <nl> + path = strPrefix + ( * iter ) ; <nl> if ( path . length ( ) > 0 & & path [ path . length ( ) - 1 ] ! = ' / ' ) <nl> { <nl> path + = " / " ; <nl> } <nl> - if ( ! bExistDefaultRootPath & & path = = _defaultResRootPath ) <nl> + if ( ! existDefaultRootPath & & path = = _defaultResRootPath ) <nl> { <nl> - bExistDefaultRootPath = true ; <nl> + existDefaultRootPath = true ; <nl> } <nl> _searchPathArray . push_back ( path ) ; <nl> } <nl> <nl> - if ( ! bExistDefaultRootPath ) <nl> + if ( ! existDefaultRootPath ) <nl> { <nl> / / CCLOG ( " Default root path doesn ' t exist , adding it . " ) ; <nl> _searchPathArray . push_back ( _defaultResRootPath ) ; <nl> } <nl> } <nl> <nl> - void FileUtils : : addSearchPath ( const char * path_ ) <nl> + void FileUtils : : addSearchPath ( const std : : string & searchpath ) <nl> { <nl> std : : string strPrefix ; <nl> - std : : string path ( path_ ) ; <nl> - if ( ! isAbsolutePath ( path ) ) <nl> - { / / Not an absolute path <nl> + if ( ! isAbsolutePath ( searchpath ) ) <nl> strPrefix = _defaultResRootPath ; <nl> - } <nl> - path = strPrefix + path ; <nl> + <nl> + std : : string path = strPrefix + searchpath ; <nl> if ( path . length ( ) > 0 & & path [ path . length ( ) - 1 ] ! = ' / ' ) <nl> { <nl> path + = " / " ; <nl> void FileUtils : : setFilenameLookupDictionary ( Dictionary * pFilenameLookupDict ) <nl> CC_SAFE_RETAIN ( _filenameLookupDict ) ; <nl> } <nl> <nl> - void FileUtils : : loadFilenameLookupDictionaryFromFile ( const char * filename ) <nl> + void FileUtils : : loadFilenameLookupDictionaryFromFile ( const std : : string & filename ) <nl> { <nl> - std : : string fullPath = this - > fullPathForFilename ( filename ) ; <nl> + std : : string fullPath = fullPathForFilename ( filename ) ; <nl> if ( fullPath . length ( ) > 0 ) <nl> { <nl> - Dictionary * pDict = Dictionary : : createWithContentsOfFile ( fullPath . c_str ( ) ) ; <nl> - if ( pDict ) <nl> + Dictionary * dict = Dictionary : : createWithContentsOfFile ( fullPath . c_str ( ) ) ; <nl> + if ( dict ) <nl> { <nl> - Dictionary * pMetadata = ( Dictionary * ) pDict - > objectForKey ( " metadata " ) ; <nl> - int version = ( ( String * ) pMetadata - > objectForKey ( " version " ) ) - > intValue ( ) ; <nl> + Dictionary * metadata = static_cast < Dictionary * > ( dict - > objectForKey ( " metadata " ) ) ; <nl> + int version = static_cast < String * > ( metadata - > objectForKey ( " version " ) ) - > intValue ( ) ; <nl> if ( version ! = 1 ) <nl> { <nl> - CCLOG ( " cocos2d : ERROR : Invalid filenameLookup dictionary version : % ld . Filename : % s " , ( long ) version , filename ) ; <nl> + CCLOG ( " cocos2d : ERROR : Invalid filenameLookup dictionary version : % ld . Filename : % s " , ( long ) version , filename . c_str ( ) ) ; <nl> return ; <nl> } <nl> - setFilenameLookupDictionary ( ( Dictionary * ) pDict - > objectForKey ( " filenames " ) ) ; <nl> + setFilenameLookupDictionary ( static_cast < Dictionary * > ( dict - > objectForKey ( " filenames " ) ) ) ; <nl> } <nl> } <nl> } <nl> std : : string FileUtils : : getFullPathForDirectoryAndFilename ( const std : : string & str <nl> return ret ; <nl> } <nl> <nl> - bool FileUtils : : isAbsolutePath ( const std : : string & strPath ) <nl> + bool FileUtils : : isAbsolutePath ( const std : : string & strPath ) const <nl> { <nl> - return strPath [ 0 ] = = ' / ' ? true : false ; <nl> + return ( strPath [ 0 ] = = ' / ' ) ; <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> / / Notification support when getFileData from invalid file path . <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - static bool s_bPopupNotify = true ; <nl> + static bool s_popupNotify = true ; <nl> <nl> - void FileUtils : : setPopupNotify ( bool bNotify ) <nl> + void FileUtils : : setPopupNotify ( bool notify ) <nl> { <nl> - s_bPopupNotify = bNotify ; <nl> + s_popupNotify = notify ; <nl> } <nl> <nl> bool FileUtils : : isPopupNotify ( ) <nl> { <nl> - return s_bPopupNotify ; <nl> + return s_popupNotify ; <nl> } <nl> <nl> NS_CC_END <nl> mmm a / cocos2dx / platform / CCFileUtils . h <nl> ppp b / cocos2dx / platform / CCFileUtils . h <nl> class CC_DLL FileUtils <nl> static void destroyInstance ( ) ; <nl> <nl> / * * @ deprecated Use getInstance ( ) instead * / <nl> - CC_DEPRECATED_ATTRIBUTE static FileUtils * sharedFileUtils ( ) ; <nl> + CC_DEPRECATED_ATTRIBUTE static FileUtils * sharedFileUtils ( ) { return getInstance ( ) ; } <nl> <nl> / * * @ deprecated Use destroyInstance ( ) instead * / <nl> - CC_DEPRECATED_ATTRIBUTE static void purgeFileUtils ( ) ; <nl> + CC_DEPRECATED_ATTRIBUTE static void purgeFileUtils ( ) { destroyInstance ( ) ; } <nl> <nl> / * * <nl> * The destructor of FileUtils . <nl> class CC_DLL FileUtils <nl> * @ return Upon success , a pointer to the data is returned , otherwise NULL . <nl> * @ warning Recall : you are responsible for calling delete [ ] on any Non - NULL pointer returned . <nl> * / <nl> - virtual unsigned char * getFileData ( const char * filename , const char * pszMode , unsigned long * pSize ) ; <nl> + virtual unsigned char * getFileData ( const char * filename , const char * mode , unsigned long * size ) ; <nl> <nl> / * * <nl> * Gets resource file data from a zip file . <nl> class CC_DLL FileUtils <nl> * @ return Upon success , a pointer to the data is returned , otherwise NULL . <nl> * @ warning Recall : you are responsible for calling delete [ ] on any Non - NULL pointer returned . <nl> * / <nl> - virtual unsigned char * getFileDataFromZip ( const char * pszZipFilePath , const char * filename , unsigned long * size ) ; <nl> + virtual unsigned char * getFileDataFromZip ( const char * zipFilePath , const char * filename , unsigned long * size ) ; <nl> <nl> <nl> / * * Returns the fullpath for a given filename . <nl> class CC_DLL FileUtils <nl> <nl> @ since v2 . 1 <nl> * / <nl> - virtual std : : string fullPathForFilename ( const char * filename ) ; <nl> + virtual std : : string fullPathForFilename ( const std : : string & filename ) ; <nl> <nl> / * * <nl> * Loads the filenameLookup dictionary from the contents of a filename . <nl> class CC_DLL FileUtils <nl> * <nl> @ since v2 . 1 <nl> * / <nl> - virtual void loadFilenameLookupDictionaryFromFile ( const char * filename ) ; <nl> + virtual void loadFilenameLookupDictionaryFromFile ( const std : : string & filename ) ; <nl> <nl> / * * <nl> * Sets the filenameLookup dictionary . <nl> class CC_DLL FileUtils <nl> * @ param pFilenameLookupDict The dictionary for replacing filename . <nl> * @ since v2 . 1 <nl> * / <nl> - virtual void setFilenameLookupDictionary ( Dictionary * pFilenameLookupDict ) ; <nl> + virtual void setFilenameLookupDictionary ( Dictionary * filenameLookupDict ) ; <nl> <nl> / * * <nl> * Gets full path from a file name and the path of the reletive file . <nl> class CC_DLL FileUtils <nl> * Return : / User / path1 / path2 / hello . pvr ( If there a a key ( hello . png ) - value ( hello . pvr ) in FilenameLookup dictionary . ) <nl> * <nl> * / <nl> - virtual const char * fullPathFromRelativeFile ( const char * filename , const char * pszRelativeFile ) ; <nl> + virtual std : : string fullPathFromRelativeFile ( const std : : string & filename , const std : : string & relativeFile ) ; <nl> <nl> / * * <nl> * Sets the array that contains the search order of the resources . <nl> class CC_DLL FileUtils <nl> * @ see setSearchResolutionsOrder ( ) , fullPathForFilename ( ) . <nl> * @ since v2 . 1 <nl> * / <nl> - virtual void addSearchResolutionsOrder ( const char * order ) ; <nl> + virtual void addSearchResolutionsOrder ( const std : : string & order ) ; <nl> <nl> / * * <nl> * Gets the array that contains the search order of the resources . <nl> class CC_DLL FileUtils <nl> * <nl> * @ since v2 . 1 <nl> * / <nl> - void addSearchPath ( const char * path ) ; <nl> + void addSearchPath ( const std : : string & path ) ; <nl> <nl> / * * <nl> * Gets the array of search paths . <nl> class CC_DLL FileUtils <nl> * @ return The array of search paths . <nl> * @ see fullPathForFilename ( const char * ) . <nl> * / <nl> - virtual const std : : vector < std : : string > & getSearchPaths ( ) ; <nl> + virtual const std : : vector < std : : string > & getSearchPaths ( ) const ; <nl> <nl> / * * <nl> * Gets the writable path . <nl> * @ return The path that can be write / read a file in <nl> * / <nl> - virtual std : : string getWritablePath ( ) = 0 ; <nl> + virtual std : : string getWritablePath ( ) const = 0 ; <nl> <nl> / * * <nl> * Checks whether a file exists . <nl> class CC_DLL FileUtils <nl> * @ param strFilePath The path of the file , it could be a relative or absolute path . <nl> * @ return true if the file exists , otherwise it will return false . <nl> * / <nl> - virtual bool isFileExist ( const std : : string & strFilePath ) = 0 ; <nl> + virtual bool isFileExist ( const std : : string & filePath ) const = 0 ; <nl> <nl> / * * <nl> * Checks whether the path is an absolute path . <nl> class CC_DLL FileUtils <nl> * @ param strPath The path that needs to be checked . <nl> * @ return true if it ' s an absolute path , otherwise it will return false . <nl> * / <nl> - virtual bool isAbsolutePath ( const std : : string & strPath ) ; <nl> + virtual bool isAbsolutePath ( const std : : string & path ) const ; <nl> <nl> <nl> / * * <nl> * Sets / Gets whether to pop - up a message box when failed to load an image . <nl> * / <nl> - virtual void setPopupNotify ( bool bNotify ) ; <nl> + virtual void setPopupNotify ( bool notify ) ; <nl> virtual bool isPopupNotify ( ) ; <nl> <nl> protected : <nl> class CC_DLL FileUtils <nl> <nl> / * * <nl> * Gets the new filename from the filename lookup dictionary . <nl> + * It is possible to have a override names . <nl> * @ param filename The original filename . <nl> * @ return The new filename after searching in the filename lookup dictionary . <nl> * If the original filename wasn ' t in the dictionary , it will return the original filename . <nl> * / <nl> - virtual std : : string getNewFilename ( const char * filename ) ; <nl> + virtual std : : string getNewFilename ( const std : : string & filename ) ; <nl> <nl> / * * <nl> * Gets full path for filename , resolution directory and search path . <nl> class CC_DLL FileUtils <nl> * @ param strFilename The name of the file . <nl> * @ return The full path of the file , if the file can ' t be found , it will return an empty string . <nl> * / <nl> - virtual std : : string getFullPathForDirectoryAndFilename ( const std : : string & strDirectory , const std : : string & strFilename ) ; <nl> + virtual std : : string getFullPathForDirectoryAndFilename ( const std : : string & directory , const std : : string & filename ) ; <nl> <nl> / * * <nl> * Creates a dictionary by the contents of a file . <nl> mmm a / cocos2dx / platform / android / CCFileUtilsAndroid . cpp <nl> ppp b / cocos2dx / platform / android / CCFileUtilsAndroid . cpp <nl> bool FileUtilsAndroid : : init ( ) <nl> return FileUtils : : init ( ) ; <nl> } <nl> <nl> - bool FileUtilsAndroid : : isFileExist ( const std : : string & strFilePath ) <nl> + bool FileUtilsAndroid : : isFileExist ( const std : : string & strFilePath ) const <nl> { <nl> if ( 0 = = strFilePath . length ( ) ) <nl> { <nl> bool FileUtilsAndroid : : isFileExist ( const std : : string & strFilePath ) <nl> return bFound ; <nl> } <nl> <nl> - bool FileUtilsAndroid : : isAbsolutePath ( const std : : string & strPath ) <nl> + bool FileUtilsAndroid : : isAbsolutePath ( const std : : string & strPath ) const <nl> { <nl> / / On Android , there are two situations for full path . <nl> / / 1 ) Files in APK , e . g . assets / path / path / file . png <nl> unsigned char * FileUtilsAndroid : : doGetFileData ( const char * filename , const char * <nl> return pData ; <nl> } <nl> <nl> - string FileUtilsAndroid : : getWritablePath ( ) <nl> + string FileUtilsAndroid : : getWritablePath ( ) const <nl> { <nl> / / Fix for Nexus 10 ( Android 4 . 2 multi - user environment ) <nl> / / the path is retrieved through Java Context . getCacheDir ( ) method <nl> mmm a / cocos2dx / platform / android / CCFileUtilsAndroid . h <nl> ppp b / cocos2dx / platform / android / CCFileUtilsAndroid . h <nl> class CC_DLL FileUtilsAndroid : public FileUtils <nl> / * override funtions * / <nl> bool init ( ) ; <nl> virtual unsigned char * getFileData ( const char * filename , const char * pszMode , unsigned long * pSize ) ; <nl> - virtual std : : string getWritablePath ( ) ; <nl> - virtual bool isFileExist ( const std : : string & strFilePath ) ; <nl> - virtual bool isAbsolutePath ( const std : : string & strPath ) ; <nl> + <nl> + virtual std : : string getWritablePath ( ) const ; <nl> + virtual bool isFileExist ( const std : : string & strFilePath ) const ; <nl> + virtual bool isAbsolutePath ( const std : : string & strPath ) const ; <nl> <nl> / * * This function is android specific . It is used for TextureCache : : addImageAsync ( ) . <nl> Don ' t use it in your codes . <nl> similarity index 80 % <nl> rename from cocos2dx / platform / ios / CCFileUtilsIOS . h <nl> rename to cocos2dx / platform / apple / CCFileUtilsApple . h <nl> mmm a / cocos2dx / platform / ios / CCFileUtilsIOS . h <nl> ppp b / cocos2dx / platform / apple / CCFileUtilsApple . h <nl> <nl> OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN <nl> THE SOFTWARE . <nl> * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - # ifndef __CC_FILEUTILS_IOS_H__ <nl> - # define __CC_FILEUTILS_IOS_H__ <nl> + # ifndef __CC_FILEUTILS_APPLE_H__ <nl> + # define __CC_FILEUTILS_APPLE_H__ <nl> <nl> # include " CCFileUtils . h " <nl> # include < string > <nl> NS_CC_BEGIN <nl> * / <nl> <nl> / / ! @ brief Helper class to handle file operations <nl> - class CC_DLL FileUtilsIOS : public FileUtils <nl> + class CC_DLL FileUtilsApple : public FileUtils <nl> { <nl> public : <nl> / * override funtions * / <nl> - virtual std : : string getWritablePath ( ) ; <nl> - virtual bool isFileExist ( const std : : string & strFilePath ) ; <nl> - virtual bool isAbsolutePath ( const std : : string & strPath ) ; <nl> - virtual std : : string getFullPathForDirectoryAndFilename ( const std : : string & strDirectory , const std : : string & strFilename ) ; <nl> + virtual std : : string getWritablePath ( ) const override ; <nl> + virtual bool isFileExist ( const std : : string & strFilePath ) const override ; <nl> + virtual std : : string getFullPathForDirectoryAndFilename ( const std : : string & strDirectory , const std : : string & strFilename ) override ; <nl> <nl> - virtual Dictionary * createDictionaryWithContentsOfFile ( const std : : string & filename ) ; <nl> - virtual bool writeToFile ( Dictionary * dict , const std : : string & fullPath ) ; <nl> + virtual Dictionary * createDictionaryWithContentsOfFile ( const std : : string & filename ) override ; <nl> + virtual bool writeToFile ( Dictionary * dict , const std : : string & fullPath ) override ; <nl> <nl> - virtual Array * createArrayWithContentsOfFile ( const std : : string & filename ) ; <nl> + virtual Array * createArrayWithContentsOfFile ( const std : : string & filename ) override ; <nl> } ; <nl> <nl> / / end of platform group <nl> class CC_DLL FileUtilsIOS : public FileUtils <nl> <nl> NS_CC_END <nl> <nl> - # endif / / __CC_FILEUTILS_IOS_H__ <nl> + # endif / / __CC_FILEUTILS_APPLE_H__ <nl> <nl> similarity index 91 % <nl> rename from cocos2dx / platform / ios / CCFileUtilsIOS . mm <nl> rename to cocos2dx / platform / apple / CCFileUtilsApple . mm <nl> mmm a / cocos2dx / platform / ios / CCFileUtilsIOS . mm <nl> ppp b / cocos2dx / platform / apple / CCFileUtilsApple . mm <nl> of this software and associated documentation files ( the " Software " ) , to deal <nl> THE SOFTWARE . <nl> * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> # import < Foundation / Foundation . h > <nl> - # import < UIKit / UIDevice . h > <nl> <nl> # include < string > <nl> # include < stack > <nl> of this software and associated documentation files ( the " Software " ) , to deal <nl> # include " CCDictionary . h " <nl> # include " support / zip_support / unzip . h " <nl> <nl> - # include " CCFileUtilsIOS . h " <nl> + # include " CCFileUtilsApple . h " <nl> <nl> NS_CC_BEGIN <nl> <nl> static void addObjectToNSDict ( const char * key , Object * object , NSMutableDiction <nl> } <nl> } <nl> <nl> + <nl> + # pragma mark - FileUtils <nl> + <nl> + static NSFileManager * s_fileManager = [ NSFileManager defaultManager ] ; <nl> + <nl> FileUtils * FileUtils : : getInstance ( ) <nl> { <nl> if ( s_sharedFileUtils = = NULL ) <nl> { <nl> - s_sharedFileUtils = new FileUtilsIOS ( ) ; <nl> + s_sharedFileUtils = new FileUtilsApple ( ) ; <nl> if ( ! s_sharedFileUtils - > init ( ) ) <nl> { <nl> delete s_sharedFileUtils ; <nl> s_sharedFileUtils = NULL ; <nl> - CCLOG ( " ERROR : Could not init CCFileUtilsIOS " ) ; <nl> + CCLOG ( " ERROR : Could not init CCFileUtilsApple " ) ; <nl> } <nl> } <nl> return s_sharedFileUtils ; <nl> } <nl> <nl> <nl> - static NSFileManager * s_fileManager = [ NSFileManager defaultManager ] ; <nl> - <nl> - std : : string FileUtilsIOS : : getWritablePath ( ) <nl> + std : : string FileUtilsApple : : getWritablePath ( ) const <nl> { <nl> / / save to document folder <nl> NSArray * paths = NSSearchPathForDirectoriesInDomains ( NSDocumentDirectory , NSUserDomainMask , YES ) ; <nl> static void addObjectToNSDict ( const char * key , Object * object , NSMutableDiction <nl> return strRet ; <nl> } <nl> <nl> - bool FileUtilsIOS : : isFileExist ( const std : : string & strFilePath ) <nl> + bool FileUtilsApple : : isFileExist ( const std : : string & strFilePath ) const <nl> { <nl> - if ( 0 = = strFilePath . length ( ) ) <nl> + if ( strFilePath . length ( ) = = 0 ) <nl> { <nl> return false ; <nl> } <nl> static void addObjectToNSDict ( const char * key , Object * object , NSMutableDiction <nl> return bRet ; <nl> } <nl> <nl> - std : : string FileUtilsIOS : : getFullPathForDirectoryAndFilename ( const std : : string & strDirectory , const std : : string & strFilename ) <nl> + std : : string FileUtilsApple : : getFullPathForDirectoryAndFilename ( const std : : string & strDirectory , const std : : string & strFilename ) <nl> { <nl> if ( strDirectory [ 0 ] ! = ' / ' ) <nl> { <nl> static void addObjectToNSDict ( const char * key , Object * object , NSMutableDiction <nl> return " " ; <nl> } <nl> <nl> - bool FileUtilsIOS : : isAbsolutePath ( const std : : string & strPath ) <nl> - { <nl> - NSString * path = [ NSString stringWithUTF8String : strPath . c_str ( ) ] ; <nl> - return [ path isAbsolutePath ] ? true : false ; <nl> - } <nl> - <nl> - Dictionary * FileUtilsIOS : : createDictionaryWithContentsOfFile ( const std : : string & filename ) <nl> + Dictionary * FileUtilsApple : : createDictionaryWithContentsOfFile ( const std : : string & filename ) <nl> { <nl> - std : : string fullPath = FileUtils : : getInstance ( ) - > fullPathForFilename ( filename . c_str ( ) ) ; <nl> + std : : string fullPath = fullPathForFilename ( filename ) ; <nl> NSString * pPath = [ NSString stringWithUTF8String : fullPath . c_str ( ) ] ; <nl> NSDictionary * pDict = [ NSDictionary dictionaryWithContentsOfFile : pPath ] ; <nl> <nl> static void addObjectToNSDict ( const char * key , Object * object , NSMutableDiction <nl> } <nl> } <nl> <nl> - bool FileUtilsIOS : : writeToFile ( Dictionary * dict , const std : : string & fullPath ) <nl> + bool FileUtilsApple : : writeToFile ( Dictionary * dict , const std : : string & fullPath ) <nl> { <nl> / / CCLOG ( " iOS | | Mac Dictionary % d write to file % s " , dict - > _ID , fullPath . c_str ( ) ) ; <nl> NSMutableDictionary * nsDict = [ NSMutableDictionary dictionary ] ; <nl> static void addObjectToNSDict ( const char * key , Object * object , NSMutableDiction <nl> return true ; <nl> } <nl> <nl> - Array * FileUtilsIOS : : createArrayWithContentsOfFile ( const std : : string & filename ) <nl> + Array * FileUtilsApple : : createArrayWithContentsOfFile ( const std : : string & filename ) <nl> { <nl> / / NSString * pPath = [ NSString stringWithUTF8String : pFileName ] ; <nl> / / NSString * pathExtension = [ pPath pathExtension ] ; <nl> / / pPath = [ pPath stringByDeletingPathExtension ] ; <nl> / / pPath = [ [ NSBundle mainBundle ] pathForResource : pPath ofType : pathExtension ] ; <nl> / / fixing cannot read data using Array : : createWithContentsOfFile <nl> - std : : string fullPath = FileUtils : : getInstance ( ) - > fullPathForFilename ( filename . c_str ( ) ) ; <nl> + std : : string fullPath = fullPathForFilename ( filename ) ; <nl> NSString * path = [ NSString stringWithUTF8String : fullPath . c_str ( ) ] ; <nl> NSArray * array = [ NSArray arrayWithContentsOfFile : path ] ; <nl> <nl> similarity index 100 % <nl> rename from cocos2dx / platform / ios / CCLock . cpp <nl> rename to cocos2dx / platform / apple / CCLock . cpp <nl> similarity index 100 % <nl> rename from cocos2dx / platform / ios / CCLock . h <nl> rename to cocos2dx / platform / apple / CCLock . h <nl> similarity index 98 % <nl> rename from cocos2dx / platform / mac / CCThread . mm <nl> rename to cocos2dx / platform / apple / CCThread . mm <nl> mmm a / cocos2dx / platform / mac / CCThread . mm <nl> ppp b / cocos2dx / platform / apple / CCThread . mm <nl> of this software and associated documentation files ( the " Software " ) , to deal <nl> OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN <nl> THE SOFTWARE . <nl> * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - <nl> + / / # import < UIKit / UIKit . h > <nl> # include " CCThread . h " <nl> <nl> NS_CC_BEGIN <nl> mmm a / cocos2dx / platform / emscripten / CCFileUtilsEmscripten . cpp <nl> ppp b / cocos2dx / platform / emscripten / CCFileUtilsEmscripten . cpp <nl> bool FileUtilsEmscripten : : init ( ) <nl> return FileUtils : : init ( ) ; <nl> } <nl> <nl> - string FileUtilsEmscripten : : getWritablePath ( ) <nl> + string FileUtilsEmscripten : : getWritablePath ( ) const <nl> { <nl> / / Let ' s write it in the current working directory ' s data folder <nl> char cwd [ FILENAME_MAX ] = { 0 } ; <nl> string FileUtilsEmscripten : : getWritablePath ( ) <nl> return path ; <nl> } <nl> <nl> - bool FileUtilsEmscripten : : isAbsolutePath ( const std : : string & strPath ) <nl> + bool FileUtilsEmscripten : : isAbsolutePath ( const std : : string & strPath ) const <nl> { <nl> if ( strPath [ 0 ] = = ' / ' | | strPath . find ( _defaultResRootPath ) = = 0 ) <nl> { <nl> bool FileUtilsEmscripten : : isAbsolutePath ( const std : : string & strPath ) <nl> return false ; <nl> } <nl> <nl> - bool FileUtilsEmscripten : : isFileExist ( const std : : string & strFilePath ) <nl> + bool FileUtilsEmscripten : : isFileExist ( const std : : string & strFilePath ) const <nl> { <nl> std : : string strPath = strFilePath ; <nl> if ( strPath [ 0 ] ! = ' / ' ) <nl> mmm a / cocos2dx / platform / emscripten / CCFileUtilsEmscripten . h <nl> ppp b / cocos2dx / platform / emscripten / CCFileUtilsEmscripten . h <nl> class CC_DLL FileUtilsEmscripten : public FileUtils <nl> { <nl> friend class FileUtils ; <nl> FileUtilsEmscripten ( ) ; <nl> + <nl> public : <nl> / * override funtions * / <nl> bool init ( ) ; <nl> - virtual std : : string getWritablePath ( ) ; <nl> - virtual bool isFileExist ( const std : : string & strFilePath ) ; <nl> - virtual bool isAbsolutePath ( const std : : string & strPath ) ; <nl> + virtual std : : string getWritablePath ( ) const ; <nl> + virtual bool isFileExist ( const std : : string & strFilePath ) const ; <nl> + virtual bool isAbsolutePath ( const std : : string & strPath ) const ; <nl> } ; <nl> <nl> / / end of platform group <nl> deleted file mode 100644 <nl> index 8948c10acc66 . . 000000000000 <nl> mmm a / cocos2dx / platform / ios / CCThread . mm <nl> ppp / dev / null <nl> <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * <nl> - Copyright ( c ) 2010 cocos2d - x . org <nl> - <nl> - http : / / www . cocos2d - x . org <nl> - <nl> - Permission is hereby granted , free of charge , to any person obtaining a copy <nl> - of this software and associated documentation files ( the " Software " ) , to deal <nl> - in the Software without restriction , including without limitation the rights <nl> - to use , copy , modify , merge , publish , distribute , sublicense , and / or sell <nl> - copies of the Software , and to permit persons to whom the Software is <nl> - furnished to do so , subject to the following conditions : <nl> - <nl> - The above copyright notice and this permission notice shall be included in <nl> - all copies or substantial portions of the Software . <nl> - <nl> - THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR <nl> - IMPLIED , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY , <nl> - FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE <nl> - AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER <nl> - LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM , <nl> - OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN <nl> - THE SOFTWARE . <nl> - * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - # import < UIKit / UIKit . h > <nl> - # include " CCThread . h " <nl> - <nl> - NS_CC_BEGIN <nl> - <nl> - Thread : : ~ Thread ( ) <nl> - { <nl> - [ ( id ) _autoReleasePool release ] ; <nl> - } <nl> - <nl> - void Thread : : createAutoreleasePool ( ) <nl> - { <nl> - _autoReleasePool = [ [ NSAutoreleasePool alloc ] init ] ; <nl> - } <nl> - <nl> - NS_CC_END <nl> mmm a / cocos2dx / platform / linux / CCFileUtilsLinux . cpp <nl> ppp b / cocos2dx / platform / linux / CCFileUtilsLinux . cpp <nl> bool FileUtilsLinux : : init ( ) <nl> return FileUtils : : init ( ) ; <nl> } <nl> <nl> - string FileUtilsLinux : : getWritablePath ( ) <nl> + string FileUtilsLinux : : getWritablePath ( ) const <nl> { <nl> struct stat st ; <nl> stat ( _writablePath . c_str ( ) , & st ) ; <nl> string FileUtilsLinux : : getWritablePath ( ) <nl> return _writablePath ; <nl> } <nl> <nl> - bool FileUtilsLinux : : isFileExist ( const std : : string & strFilePath ) <nl> + bool FileUtilsLinux : : isFileExist ( const std : : string & strFilePath ) const <nl> { <nl> if ( 0 = = strFilePath . length ( ) ) <nl> { <nl> mmm a / cocos2dx / platform / linux / CCFileUtilsLinux . h <nl> ppp b / cocos2dx / platform / linux / CCFileUtilsLinux . h <nl> class CC_DLL FileUtilsLinux : public FileUtils <nl> public : <nl> / * override funtions * / <nl> bool init ( ) ; <nl> - virtual std : : string getWritablePath ( ) ; <nl> - virtual bool isFileExist ( const std : : string & strFilePath ) ; <nl> + virtual std : : string getWritablePath ( ) const ; <nl> + virtual bool isFileExist ( const std : : string & strFilePath ) const ; <nl> } ; <nl> <nl> / / end of platform group <nl> deleted file mode 100644 <nl> index 59027609a809 . . 000000000000 <nl> mmm a / cocos2dx / platform / mac / CCFileUtilsMac . h <nl> ppp / dev / null <nl> <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * <nl> - Copyright ( c ) 2010 cocos2d - x . org <nl> - <nl> - http : / / www . cocos2d - x . org <nl> - <nl> - Permission is hereby granted , free of charge , to any person obtaining a copy <nl> - of this software and associated documentation files ( the " Software " ) , to deal <nl> - in the Software without restriction , including without limitation the rights <nl> - to use , copy , modify , merge , publish , distribute , sublicense , and / or sell <nl> - copies of the Software , and to permit persons to whom the Software is <nl> - furnished to do so , subject to the following conditions : <nl> - <nl> - The above copyright notice and this permission notice shall be included in <nl> - all copies or substantial portions of the Software . <nl> - <nl> - THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR <nl> - IMPLIED , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY , <nl> - FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE <nl> - AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER <nl> - LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM , <nl> - OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN <nl> - THE SOFTWARE . <nl> - * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - # ifndef __CC_FILEUTILSMAC_H__ <nl> - # define __CC_FILEUTILSMAC_H__ <nl> - <nl> - # include " CCFileUtils . h " <nl> - # include < string > <nl> - # include < vector > <nl> - # include " CCPlatformMacros . h " <nl> - # include " ccTypes . h " <nl> - <nl> - NS_CC_BEGIN <nl> - / * * <nl> - * @ addtogroup platform <nl> - * @ { <nl> - * / <nl> - <nl> - / / ! @ brief Helper class to handle file operations <nl> - class CC_DLL FileUtilsMac : public FileUtils <nl> - { <nl> - public : <nl> - / * override funtions * / <nl> - virtual std : : string getWritablePath ( ) ; <nl> - virtual bool isFileExist ( const std : : string & strFilePath ) ; <nl> - virtual bool isAbsolutePath ( const std : : string & strPath ) ; <nl> - virtual std : : string getFullPathForDirectoryAndFilename ( const std : : string & strDirectory , const std : : string & strFilename ) ; <nl> - <nl> - virtual Dictionary * createDictionaryWithContentsOfFile ( const std : : string & filename ) ; <nl> - virtual bool writeToFile ( Dictionary * dict , const std : : string & fullPath ) ; <nl> - <nl> - virtual Array * createArrayWithContentsOfFile ( const std : : string & filename ) ; <nl> - <nl> - } ; <nl> - <nl> - / / end of platform group <nl> - / / / @ } <nl> - <nl> - NS_CC_END <nl> - <nl> - # endif / / __CC_FILEUTILSMAC_H__ <nl> - <nl> deleted file mode 100644 <nl> index 340aa00562f0 . . 000000000000 <nl> mmm a / cocos2dx / platform / mac / CCFileUtilsMac . mm <nl> ppp / dev / null <nl> <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * <nl> - Copyright ( c ) 2010 - 2012 cocos2d - x . org <nl> - Copyright ( c ) 2011 Zynga Inc . <nl> - <nl> - http : / / www . cocos2d - x . org <nl> - <nl> - Permission is hereby granted , free of charge , to any person obtaining a copy <nl> - of this software and associated documentation files ( the " Software " ) , to deal <nl> - in the Software without restriction , including without limitation the rights <nl> - to use , copy , modify , merge , publish , distribute , sublicense , and / or sell <nl> - copies of the Software , and to permit persons to whom the Software is <nl> - furnished to do so , subject to the following conditions : <nl> - <nl> - The above copyright notice and this permission notice shall be included in <nl> - all copies or substantial portions of the Software . <nl> - <nl> - THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR <nl> - IMPLIED , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY , <nl> - FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE <nl> - AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER <nl> - LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM , <nl> - OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN <nl> - THE SOFTWARE . <nl> - * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - # include " CCFileUtilsMac . h " <nl> - # import < Foundation / Foundation . h > <nl> - # include < string > <nl> - # include < stack > <nl> - # include " cocoa / CCString . h " <nl> - # include " CCFileUtils . h " <nl> - # include " CCDirector . h " <nl> - # include " CCSAXParser . h " <nl> - # include " CCDictionary . h " <nl> - # include " support / zip_support / unzip . h " <nl> - <nl> - NS_CC_BEGIN <nl> - <nl> - static void addValueToDict ( id key , id value , Dictionary * pDict ) ; <nl> - static void addObjectToNSDict ( const char * key , Object * object , NSMutableDictionary * dict ) ; <nl> - <nl> - static void addItemToArray ( id item , Array * array ) <nl> - { <nl> - / / add string value into array <nl> - if ( [ item isKindOfClass : [ NSString class ] ] ) { <nl> - String * pValue = new String ( [ item UTF8String ] ) ; <nl> - <nl> - array - > addObject ( pValue ) ; <nl> - pValue - > release ( ) ; <nl> - return ; <nl> - } <nl> - <nl> - / / add number value into array ( such as int , float , bool and so on ) <nl> - if ( [ item isKindOfClass : [ NSNumber class ] ] ) { <nl> - NSString * pStr = [ item stringValue ] ; <nl> - String * pValue = new String ( [ pStr UTF8String ] ) ; <nl> - <nl> - array - > addObject ( pValue ) ; <nl> - pValue - > release ( ) ; <nl> - return ; <nl> - } <nl> - <nl> - / / add dictionary value into array <nl> - if ( [ item isKindOfClass : [ NSDictionary class ] ] ) { <nl> - Dictionary * pDictItem = new Dictionary ( ) ; <nl> - pDictItem - > init ( ) ; <nl> - for ( id subKey in [ item allKeys ] ) { <nl> - id subValue = [ item objectForKey : subKey ] ; <nl> - addValueToDict ( subKey , subValue , pDictItem ) ; <nl> - } <nl> - array - > addObject ( pDictItem ) ; <nl> - pDictItem - > release ( ) ; <nl> - return ; <nl> - } <nl> - <nl> - / / add array value into array <nl> - if ( [ item isKindOfClass : [ NSArray class ] ] ) { <nl> - Array * arrayItem = new Array ( ) ; <nl> - arrayItem - > initWithCapacity ( [ item count ] ) ; <nl> - for ( id subItem in item ) { <nl> - addItemToArray ( subItem , arrayItem ) ; <nl> - } <nl> - array - > addObject ( arrayItem ) ; <nl> - arrayItem - > release ( ) ; <nl> - return ; <nl> - } <nl> - } <nl> - <nl> - static void addObjectToNSArray ( Object * object , NSMutableArray * array ) <nl> - { <nl> - / / add string into array <nl> - if ( String * ccString = dynamic_cast < String * > ( object ) ) { <nl> - NSString * strElement = [ NSString stringWithCString : ccString - > getCString ( ) encoding : NSUTF8StringEncoding ] ; <nl> - [ array addObject : strElement ] ; <nl> - return ; <nl> - } <nl> - <nl> - / / add array into array <nl> - if ( Array * ccArray = dynamic_cast < Array * > ( object ) ) { <nl> - NSMutableArray * arrElement = [ NSMutableArray array ] ; <nl> - Object * element = NULL ; <nl> - CCARRAY_FOREACH ( ccArray , element ) <nl> - { <nl> - addObjectToNSArray ( element , arrElement ) ; <nl> - } <nl> - [ array addObject : arrElement ] ; <nl> - return ; <nl> - } <nl> - <nl> - / / add dictionary value into array <nl> - if ( Dictionary * ccDict = dynamic_cast < Dictionary * > ( object ) ) { <nl> - NSMutableDictionary * dictElement = [ NSMutableDictionary dictionary ] ; <nl> - DictElement * element = NULL ; <nl> - CCDICT_FOREACH ( ccDict , element ) <nl> - { <nl> - addObjectToNSDict ( element - > getStrKey ( ) , element - > getObject ( ) , dictElement ) ; <nl> - } <nl> - [ array addObject : dictElement ] ; <nl> - } <nl> - <nl> - } <nl> - <nl> - static void addValueToDict ( id key , id value , Dictionary * pDict ) <nl> - { <nl> - / / the key must be a string <nl> - CCASSERT ( [ key isKindOfClass : [ NSString class ] ] , " The key should be a string ! " ) ; <nl> - std : : string pKey = [ key UTF8String ] ; <nl> - <nl> - / / the value is a new dictionary <nl> - if ( [ value isKindOfClass : [ NSDictionary class ] ] ) { <nl> - Dictionary * pSubDict = new Dictionary ( ) ; <nl> - for ( id subKey in [ value allKeys ] ) { <nl> - id subValue = [ value objectForKey : subKey ] ; <nl> - addValueToDict ( subKey , subValue , pSubDict ) ; <nl> - } <nl> - pDict - > setObject ( pSubDict , pKey . c_str ( ) ) ; <nl> - pSubDict - > release ( ) ; <nl> - return ; <nl> - } <nl> - <nl> - / / the value is a string <nl> - if ( [ value isKindOfClass : [ NSString class ] ] ) { <nl> - String * pValue = new String ( [ value UTF8String ] ) ; <nl> - <nl> - pDict - > setObject ( pValue , pKey . c_str ( ) ) ; <nl> - pValue - > release ( ) ; <nl> - return ; <nl> - } <nl> - <nl> - / / the value is a number <nl> - if ( [ value isKindOfClass : [ NSNumber class ] ] ) { <nl> - NSString * pStr = [ value stringValue ] ; <nl> - String * pValue = new String ( [ pStr UTF8String ] ) ; <nl> - <nl> - pDict - > setObject ( pValue , pKey . c_str ( ) ) ; <nl> - pValue - > release ( ) ; <nl> - return ; <nl> - } <nl> - <nl> - / / the value is a array <nl> - if ( [ value isKindOfClass : [ NSArray class ] ] ) { <nl> - Array * array = new Array ( ) ; <nl> - array - > initWithCapacity ( [ value count ] ) ; <nl> - for ( id item in value ) { <nl> - addItemToArray ( item , array ) ; <nl> - } <nl> - pDict - > setObject ( array , pKey . c_str ( ) ) ; <nl> - array - > release ( ) ; <nl> - return ; <nl> - } <nl> - } <nl> - <nl> - static void addObjectToNSDict ( const char * key , Object * object , NSMutableDictionary * dict ) <nl> - { <nl> - NSString * NSkey = [ NSString stringWithCString : key encoding : NSUTF8StringEncoding ] ; <nl> - <nl> - / / the object is a Dictionary <nl> - if ( Dictionary * ccDict = dynamic_cast < Dictionary * > ( object ) ) { <nl> - NSMutableDictionary * dictElement = [ NSMutableDictionary dictionary ] ; <nl> - DictElement * element = NULL ; <nl> - CCDICT_FOREACH ( ccDict , element ) <nl> - { <nl> - addObjectToNSDict ( element - > getStrKey ( ) , element - > getObject ( ) , dictElement ) ; <nl> - } <nl> - <nl> - [ dict setObject : dictElement forKey : NSkey ] ; <nl> - return ; <nl> - } <nl> - <nl> - / / the object is a String <nl> - if ( String * element = dynamic_cast < String * > ( object ) ) { <nl> - NSString * strElement = [ NSString stringWithCString : element - > getCString ( ) encoding : NSUTF8StringEncoding ] ; <nl> - [ dict setObject : strElement forKey : NSkey ] ; <nl> - return ; <nl> - } <nl> - <nl> - / / the object is a Array <nl> - if ( Array * ccArray = dynamic_cast < Array * > ( object ) ) { <nl> - NSMutableArray * arrElement = [ NSMutableArray array ] ; <nl> - Object * element = NULL ; <nl> - CCARRAY_FOREACH ( ccArray , element ) <nl> - { <nl> - addObjectToNSArray ( element , arrElement ) ; <nl> - } <nl> - [ dict setObject : arrElement forKey : NSkey ] ; <nl> - return ; <nl> - } <nl> - } <nl> - <nl> - FileUtils * FileUtils : : getInstance ( ) <nl> - { <nl> - if ( s_sharedFileUtils = = NULL ) <nl> - { <nl> - s_sharedFileUtils = new FileUtilsMac ( ) ; <nl> - if ( ! s_sharedFileUtils - > init ( ) ) <nl> - { <nl> - delete s_sharedFileUtils ; <nl> - s_sharedFileUtils = NULL ; <nl> - CCLOG ( " ERROR : Could not init CCFileUtilsMac " ) ; <nl> - } <nl> - } <nl> - return s_sharedFileUtils ; <nl> - } <nl> - <nl> - <nl> - static NSFileManager * s_fileManager = [ NSFileManager defaultManager ] ; <nl> - <nl> - std : : string FileUtilsMac : : getWritablePath ( ) <nl> - { <nl> - / / save to document folder <nl> - NSArray * paths = NSSearchPathForDirectoriesInDomains ( NSCachesDirectory , NSUserDomainMask , YES ) ; <nl> - NSString * documentsDirectory = [ paths objectAtIndex : 0 ] ; <nl> - std : : string strRet = [ documentsDirectory UTF8String ] ; <nl> - strRet . append ( " / " ) ; <nl> - return strRet ; <nl> - } <nl> - <nl> - bool FileUtilsMac : : isFileExist ( const std : : string & strFilePath ) <nl> - { <nl> - if ( 0 = = strFilePath . length ( ) ) <nl> - { <nl> - return false ; <nl> - } <nl> - <nl> - bool bRet = false ; <nl> - <nl> - if ( strFilePath [ 0 ] ! = ' / ' ) <nl> - { <nl> - std : : string path = strFilePath ; <nl> - std : : string file ; <nl> - size_t pos = path . find_last_of ( " / " ) ; <nl> - if ( pos ! = std : : string : : npos ) <nl> - { <nl> - file = path . substr ( pos + 1 ) ; <nl> - path = path . substr ( 0 , pos + 1 ) ; <nl> - NSString * fullpath = [ [ NSBundle mainBundle ] pathForResource : [ NSString stringWithUTF8String : file . c_str ( ) ] <nl> - ofType : nil <nl> - inDirectory : [ NSString stringWithUTF8String : path . c_str ( ) ] ] ; <nl> - if ( fullpath ! = nil ) { <nl> - bRet = true ; <nl> - } <nl> - } <nl> - } <nl> - else <nl> - { <nl> - / / Search path is an absolute path . <nl> - if ( [ s_fileManager fileExistsAtPath : [ NSString stringWithUTF8String : strFilePath . c_str ( ) ] ] ) { <nl> - bRet = true ; <nl> - } <nl> - } <nl> - <nl> - return bRet ; <nl> - } <nl> - <nl> - std : : string FileUtilsMac : : getFullPathForDirectoryAndFilename ( const std : : string & strDirectory , const std : : string & strFilename ) <nl> - { <nl> - if ( strDirectory [ 0 ] ! = ' / ' ) <nl> - { <nl> - NSString * fullpath = [ [ NSBundle mainBundle ] pathForResource : [ NSString stringWithUTF8String : strFilename . c_str ( ) ] <nl> - ofType : nil <nl> - inDirectory : [ NSString stringWithUTF8String : strDirectory . c_str ( ) ] ] ; <nl> - if ( fullpath ! = nil ) { <nl> - return [ fullpath UTF8String ] ; <nl> - } <nl> - } <nl> - else <nl> - { <nl> - std : : string fullPath = strDirectory + strFilename ; <nl> - / / Search path is an absolute path . <nl> - if ( [ s_fileManager fileExistsAtPath : [ NSString stringWithUTF8String : fullPath . c_str ( ) ] ] ) { <nl> - return fullPath ; <nl> - } <nl> - } <nl> - return " " ; <nl> - } <nl> - <nl> - bool FileUtilsMac : : isAbsolutePath ( const std : : string & strPath ) <nl> - { <nl> - NSString * path = [ NSString stringWithUTF8String : strPath . c_str ( ) ] ; <nl> - return [ path isAbsolutePath ] ? true : false ; <nl> - } <nl> - <nl> - Dictionary * FileUtilsMac : : createDictionaryWithContentsOfFile ( const std : : string & filename ) <nl> - { <nl> - std : : string fullPath = FileUtils : : getInstance ( ) - > fullPathForFilename ( filename . c_str ( ) ) ; <nl> - NSString * pPath = [ NSString stringWithUTF8String : fullPath . c_str ( ) ] ; <nl> - NSDictionary * pDict = [ NSDictionary dictionaryWithContentsOfFile : pPath ] ; <nl> - <nl> - Dictionary * pRet = Dictionary : : create ( ) ; <nl> - for ( id key in [ pDict allKeys ] ) { <nl> - id value = [ pDict objectForKey : key ] ; <nl> - addValueToDict ( key , value , pRet ) ; <nl> - } <nl> - <nl> - return pRet ; <nl> - } <nl> - <nl> - bool FileUtilsMac : : writeToFile ( Dictionary * dict , const std : : string & fullPath ) <nl> - { <nl> - CCLOG ( " iOS | | Mac Dictionary % d write to file % s " , dict - > _ID , fullPath . c_str ( ) ) ; <nl> - NSMutableDictionary * nsDict = [ NSMutableDictionary dictionary ] ; <nl> - <nl> - DictElement * element = NULL ; <nl> - CCDICT_FOREACH ( dict , element ) <nl> - { <nl> - addObjectToNSDict ( element - > getStrKey ( ) , element - > getObject ( ) , nsDict ) ; <nl> - } <nl> - <nl> - NSString * file = [ NSString stringWithUTF8String : fullPath . c_str ( ) ] ; <nl> - / / do it atomically <nl> - return [ nsDict writeToFile : file atomically : YES ] ; <nl> - } <nl> - <nl> - Array * FileUtilsMac : : createArrayWithContentsOfFile ( const std : : string & filename ) <nl> - { <nl> - / / NSString * pPath = [ NSString stringWithUTF8String : pFileName ] ; <nl> - / / NSString * pathExtension = [ pPath pathExtension ] ; <nl> - / / pPath = [ pPath stringByDeletingPathExtension ] ; <nl> - / / pPath = [ [ NSBundle mainBundle ] pathForResource : pPath ofType : pathExtension ] ; <nl> - / / fixing cannot read data using Array : : createWithContentsOfFile <nl> - std : : string fullPath = FileUtils : : getInstance ( ) - > fullPathForFilename ( filename . c_str ( ) ) ; <nl> - NSString * pPath = [ NSString stringWithUTF8String : fullPath . c_str ( ) ] ; <nl> - NSArray * array = [ NSArray arrayWithContentsOfFile : pPath ] ; <nl> - <nl> - Array * ret = Array : : createWithCapacity ( [ array count ] ) ; <nl> - for ( id value in array ) { <nl> - addItemToArray ( value , ret ) ; <nl> - } <nl> - <nl> - return ret ; <nl> - } <nl> - <nl> - <nl> - NS_CC_END <nl> - <nl> mmm a / cocos2dx / platform / qt5 / CCFileUtilsQt5 . cpp <nl> ppp b / cocos2dx / platform / qt5 / CCFileUtilsQt5 . cpp <nl> FileUtilsQt5 : : init ( ) <nl> } <nl> <nl> std : : string <nl> - FileUtilsQt5 : : getWritablePath ( ) <nl> + FileUtilsQt5 : : getWritablePath ( ) const <nl> { <nl> QDir dir ( QStandardPaths : : writableLocation ( QStandardPaths : : DataLocation ) ) ; <nl> <nl> FileUtilsQt5 : : getWritablePath ( ) <nl> return dir . path ( ) . toStdString ( ) ; <nl> } <nl> <nl> - bool FileUtilsQt5 : : isFileExist ( const std : : string & strFilePath ) <nl> + bool FileUtilsQt5 : : isFileExist ( const std : : string & strFilePath ) const <nl> { <nl> QString filePath = QString : : fromStdString ( strFilePath ) ; <nl> <nl> mmm a / cocos2dx / platform / tizen / CCFileUtilsTizen . cpp <nl> ppp b / cocos2dx / platform / tizen / CCFileUtilsTizen . cpp <nl> bool FileUtilsTizen : : init ( ) <nl> return FileUtils : : init ( ) ; <nl> } <nl> <nl> - string FileUtilsTizen : : getWritablePath ( ) <nl> + string FileUtilsTizen : : getWritablePath ( ) const <nl> { <nl> UiApp * pApp = UiApp : : GetInstance ( ) ; <nl> if ( ! pApp ) <nl> string FileUtilsTizen : : getWritablePath ( ) <nl> return path ; <nl> } <nl> <nl> - bool FileUtilsTizen : : isFileExist ( const std : : string & strFilePath ) <nl> + bool FileUtilsTizen : : isFileExist ( const std : : string & strFilePath ) const <nl> { <nl> std : : string strPath = strFilePath ; <nl> if ( ! isAbsolutePath ( strPath ) ) <nl> mmm a / cocos2dx / platform / tizen / CCFileUtilsTizen . h <nl> ppp b / cocos2dx / platform / tizen / CCFileUtilsTizen . h <nl> class CC_DLL FileUtilsTizen : public FileUtils <nl> public : <nl> / * override funtions * / <nl> bool init ( ) ; <nl> - virtual std : : string getWritablePath ( ) ; <nl> - virtual bool isFileExist ( const std : : string & strFilePath ) ; <nl> + virtual std : : string getWritablePath ( ) const ; <nl> + virtual bool isFileExist ( const std : : string & strFilePath ) const ; <nl> } ; <nl> <nl> / / end of platform group <nl> mmm a / cocos2dx / platform / win32 / CCFileUtilsWin32 . cpp <nl> ppp b / cocos2dx / platform / win32 / CCFileUtilsWin32 . cpp <nl> bool FileUtilsWin32 : : init ( ) <nl> return FileUtils : : init ( ) ; <nl> } <nl> <nl> - bool FileUtilsWin32 : : isFileExist ( const std : : string & strFilePath ) <nl> + bool FileUtilsWin32 : : isFileExist ( const std : : string & strFilePath ) const <nl> { <nl> if ( 0 = = strFilePath . length ( ) ) <nl> { <nl> bool FileUtilsWin32 : : isFileExist ( const std : : string & strFilePath ) <nl> return GetFileAttributesW ( utf16Buf ) ! = - 1 ? true : false ; <nl> } <nl> <nl> - bool FileUtilsWin32 : : isAbsolutePath ( const std : : string & strPath ) <nl> + bool FileUtilsWin32 : : isAbsolutePath ( const std : : string & strPath ) const <nl> { <nl> if ( strPath . length ( ) > 2 <nl> & & ( ( strPath [ 0 ] > = ' a ' & & strPath [ 0 ] < = ' z ' ) | | ( strPath [ 0 ] > = ' A ' & & strPath [ 0 ] < = ' Z ' ) ) <nl> std : : string FileUtilsWin32 : : getFullPathForDirectoryAndFilename ( const std : : string <nl> return FileUtils : : getFullPathForDirectoryAndFilename ( unixDirectory , unixFilename ) ; <nl> } <nl> <nl> - string FileUtilsWin32 : : getWritablePath ( ) <nl> + string FileUtilsWin32 : : getWritablePath ( ) const <nl> { <nl> / / Get full path of executable , e . g . c : \ Program Files ( x86 ) \ My Game Folder \ MyGame . exe <nl> char full_path [ CC_MAX_PATH + 1 ] ; <nl> mmm a / cocos2dx / platform / win32 / CCFileUtilsWin32 . h <nl> ppp b / cocos2dx / platform / win32 / CCFileUtilsWin32 . h <nl> class CC_DLL FileUtilsWin32 : public FileUtils <nl> public : <nl> / * override funtions * / <nl> bool init ( ) ; <nl> - virtual std : : string getWritablePath ( ) ; <nl> - virtual bool isFileExist ( const std : : string & strFilePath ) ; <nl> - virtual bool isAbsolutePath ( const std : : string & strPath ) ; <nl> + virtual std : : string getWritablePath ( ) const ; <nl> + virtual bool isFileExist ( const std : : string & strFilePath ) const ; <nl> + virtual bool isAbsolutePath ( const std : : string & strPath ) const ; <nl> protected : <nl> / * * <nl> * Gets resource file data <nl> mmm a / cocos2dx / shaders / CCShaderCache . cpp <nl> ppp b / cocos2dx / shaders / CCShaderCache . cpp <nl> void ShaderCache : : purgeSharedShaderCache ( ) <nl> } <nl> <nl> ShaderCache : : ShaderCache ( ) <nl> - : _programs ( 0 ) <nl> + : _programs ( ) <nl> { <nl> <nl> } <nl> <nl> ShaderCache : : ~ ShaderCache ( ) <nl> { <nl> + for ( auto it = _programs . begin ( ) ; it ! = _programs . end ( ) ; + + it ) { <nl> + ( it - > second ) - > release ( ) ; <nl> + } <nl> + <nl> CCLOGINFO ( " deallocing ShaderCache : % p " , this ) ; <nl> - _programs - > release ( ) ; <nl> } <nl> <nl> bool ShaderCache : : init ( ) <nl> - { <nl> - _programs = Dictionary : : create ( ) ; <nl> - _programs - > retain ( ) ; <nl> - <nl> + { <nl> loadDefaultShaders ( ) ; <nl> return true ; <nl> } <nl> void ShaderCache : : loadDefaultShaders ( ) <nl> / / Position Texture Color shader <nl> GLProgram * p = new GLProgram ( ) ; <nl> loadDefaultShader ( p , kShaderType_PositionTextureColor ) ; <nl> - <nl> - _programs - > setObject ( p , GLProgram : : SHADER_NAME_POSITION_TEXTURE_COLOR ) ; <nl> - p - > release ( ) ; <nl> + _programs . insert ( std : : make_pair ( GLProgram : : SHADER_NAME_POSITION_TEXTURE_COLOR , p ) ) ; <nl> <nl> / / Position Texture Color alpha test <nl> p = new GLProgram ( ) ; <nl> loadDefaultShader ( p , kShaderType_PositionTextureColorAlphaTest ) ; <nl> - <nl> - _programs - > setObject ( p , GLProgram : : SHADER_NAME_POSITION_TEXTURE_ALPHA_TEST ) ; <nl> - p - > release ( ) ; <nl> + _programs . insert ( std : : make_pair ( GLProgram : : SHADER_NAME_POSITION_TEXTURE_ALPHA_TEST , p ) ) ; <nl> <nl> / / <nl> / / Position , Color shader <nl> / / <nl> p = new GLProgram ( ) ; <nl> loadDefaultShader ( p , kShaderType_PositionColor ) ; <nl> - <nl> - _programs - > setObject ( p , GLProgram : : SHADER_NAME_POSITION_COLOR ) ; <nl> - p - > release ( ) ; <nl> + _programs . insert ( std : : make_pair ( GLProgram : : SHADER_NAME_POSITION_COLOR , p ) ) ; <nl> <nl> / / <nl> / / Position Texture shader <nl> / / <nl> p = new GLProgram ( ) ; <nl> loadDefaultShader ( p , kShaderType_PositionTexture ) ; <nl> - <nl> - _programs - > setObject ( p , GLProgram : : SHADER_NAME_POSITION_TEXTURE ) ; <nl> - p - > release ( ) ; <nl> + _programs . insert ( std : : make_pair ( GLProgram : : SHADER_NAME_POSITION_TEXTURE , p ) ) ; <nl> <nl> / / <nl> / / Position , Texture attribs , 1 Color as uniform shader <nl> / / <nl> p = new GLProgram ( ) ; <nl> loadDefaultShader ( p , kShaderType_PositionTexture_uColor ) ; <nl> - <nl> - _programs - > setObject ( p , GLProgram : : SHADER_NAME_POSITION_TEXTURE_U_COLOR ) ; <nl> - p - > release ( ) ; <nl> + _programs . insert ( std : : make_pair ( GLProgram : : SHADER_NAME_POSITION_TEXTURE_U_COLOR , p ) ) ; <nl> <nl> / / <nl> / / Position Texture A8 Color shader <nl> / / <nl> p = new GLProgram ( ) ; <nl> loadDefaultShader ( p , kShaderType_PositionTextureA8Color ) ; <nl> - <nl> - _programs - > setObject ( p , GLProgram : : SHADER_NAME_POSITION_TEXTURE_A8_COLOR ) ; <nl> - p - > release ( ) ; <nl> + _programs . insert ( std : : make_pair ( GLProgram : : SHADER_NAME_POSITION_TEXTURE_A8_COLOR , p ) ) ; <nl> <nl> / / <nl> / / Position and 1 color passed as a uniform ( to simulate glColor4ub ) <nl> / / <nl> p = new GLProgram ( ) ; <nl> loadDefaultShader ( p , kShaderType_Position_uColor ) ; <nl> - <nl> - _programs - > setObject ( p , GLProgram : : SHADER_NAME_POSITION_U_COLOR ) ; <nl> - p - > release ( ) ; <nl> + _programs . insert ( std : : make_pair ( GLProgram : : SHADER_NAME_POSITION_U_COLOR , p ) ) ; <nl> <nl> / / <nl> / / Position , Legth ( TexCoords , Color ( used by Draw Node basically ) <nl> / / <nl> p = new GLProgram ( ) ; <nl> loadDefaultShader ( p , kShaderType_PositionLengthTexureColor ) ; <nl> - <nl> - _programs - > setObject ( p , GLProgram : : SHADER_NAME_POSITION_LENGTH_TEXTURE_COLOR ) ; <nl> - p - > release ( ) ; <nl> + _programs . insert ( std : : make_pair ( GLProgram : : SHADER_NAME_POSITION_LENGTH_TEXTURE_COLOR , p ) ) ; <nl> } <nl> <nl> void ShaderCache : : reloadDefaultShaders ( ) <nl> void ShaderCache : : loadDefaultShader ( GLProgram * p , int type ) <nl> CHECK_GL_ERROR_DEBUG ( ) ; <nl> } <nl> <nl> - GLProgram * ShaderCache : : programForKey ( const char * key ) <nl> + GLProgram * ShaderCache : : programForKey ( const std : : string & key ) <nl> { <nl> - return static_cast < GLProgram * > ( _programs - > objectForKey ( key ) ) ; <nl> + auto it = _programs . find ( key ) ; <nl> + if ( it ! = _programs . end ( ) ) <nl> + return it - > second ; <nl> + return nullptr ; <nl> } <nl> <nl> - void ShaderCache : : addProgram ( GLProgram * program , const char * key ) <nl> + void ShaderCache : : addProgram ( GLProgram * program , const std : : string & key ) <nl> { <nl> - _programs - > setObject ( program , key ) ; <nl> + program - > retain ( ) ; <nl> + _programs . insert ( std : : make_pair ( key , program ) ) ; <nl> } <nl> <nl> NS_CC_END <nl> mmm a / cocos2dx / shaders / CCShaderCache . h <nl> ppp b / cocos2dx / shaders / CCShaderCache . h <nl> THE SOFTWARE . <nl> # ifndef __CCSHADERCACHE_H__ <nl> # define __CCSHADERCACHE_H__ <nl> <nl> + # include < string > <nl> + # include < unordered_map > <nl> + <nl> # include " cocoa / CCDictionary . h " <nl> <nl> NS_CC_BEGIN <nl> class CC_DLL ShaderCache : public Object <nl> void reloadDefaultShaders ( ) ; <nl> <nl> / * * returns a GL program for a given key * / <nl> - GLProgram * programForKey ( const char * key ) ; <nl> + GLProgram * programForKey ( const std : : string & key ) ; <nl> <nl> / * * adds a GLProgram to the cache for a given name * / <nl> - void addProgram ( GLProgram * program , const char * key ) ; <nl> + void addProgram ( GLProgram * program , const std : : string & key ) ; <nl> <nl> private : <nl> bool init ( ) ; <nl> void loadDefaultShader ( GLProgram * program , int type ) ; <nl> <nl> - Dictionary * _programs ; <nl> - <nl> + / / Dictionary * _programs ; <nl> + std : : unordered_map < std : : string , GLProgram * > _programs ; <nl> } ; <nl> <nl> / / end of shaders group <nl> mmm a / cocos2dx / sprite_nodes / CCSprite . cpp <nl> ppp b / cocos2dx / sprite_nodes / CCSprite . cpp <nl> static unsigned char cc_2x2_white_image [ ] = { <nl> 0xFF , 0xFF , 0xFF , 0xFF <nl> } ; <nl> <nl> - # define CC_2x2_WHITE_IMAGE_KEY " cc_2x2_white_image " <nl> + # define CC_2x2_WHITE_IMAGE_KEY " / cc_2x2_white_image " <nl> <nl> void Sprite : : setTexture ( Texture2D * texture ) <nl> { <nl> void Sprite : : setTexture ( Texture2D * texture ) <nl> bool isOK = image - > initWithRawData ( cc_2x2_white_image , sizeof ( cc_2x2_white_image ) , 2 , 2 , 8 ) ; <nl> CCASSERT ( isOK , " The 2x2 empty texture was created unsuccessfully . " ) ; <nl> <nl> - texture = TextureCache : : getInstance ( ) - > addUIImage ( image , CC_2x2_WHITE_IMAGE_KEY ) ; <nl> + texture = TextureCache : : getInstance ( ) - > addImage ( image , CC_2x2_WHITE_IMAGE_KEY ) ; <nl> CC_SAFE_RELEASE ( image ) ; <nl> } <nl> } <nl> mmm a / cocos2dx / textures / CCTexture2D . h <nl> ppp b / cocos2dx / textures / CCTexture2D . h <nl> class CC_DLL Texture2D : public Object <nl> <nl> <nl> struct PixelFormatInfo { <nl> - public : <nl> + <nl> + PixelFormatInfo ( GLenum internalFormat , GLenum format , GLenum type , int bpp , bool compressed , bool alpha ) <nl> + : internalFormat ( internalFormat ) <nl> + , format ( format ) <nl> + , type ( type ) <nl> + , bpp ( bpp ) <nl> + , compressed ( compressed ) <nl> + , alpha ( alpha ) <nl> + { } <nl> + <nl> GLenum internalFormat ; <nl> GLenum format ; <nl> GLenum type ; <nl> int bpp ; <nl> bool compressed ; <nl> bool alpha ; <nl> - <nl> - PixelFormatInfo ( GLenum internalFormat , GLenum format , GLenum type , int bpp , bool compressed , bool alpha ) <nl> - : internalFormat ( internalFormat ) , <nl> - format ( format ) , <nl> - type ( type ) , <nl> - bpp ( bpp ) , <nl> - compressed ( compressed ) , <nl> - alpha ( alpha ) <nl> - { } <nl> } ; <nl> <nl> typedef std : : map < Texture2D : : PixelFormat , const PixelFormatInfo > PixelFormatInfoMap ; <nl> mmm a / cocos2dx / textures / CCTextureCache . cpp <nl> ppp b / cocos2dx / textures / CCTextureCache . cpp <nl> TextureCache : : TextureCache ( ) <nl> , _asyncRefCount ( 0 ) <nl> { <nl> CCASSERT ( _sharedTextureCache = = nullptr , " Attempted to allocate a second instance of a singleton . " ) ; <nl> - <nl> - _textures = new Dictionary ( ) ; <nl> - _textures - > init ( ) ; <nl> - <nl> } <nl> <nl> TextureCache : : ~ TextureCache ( ) <nl> { <nl> CCLOGINFO ( " deallocing TextureCache : % p " , this ) ; <nl> <nl> - CC_SAFE_RELEASE ( _textures ) ; <nl> + for ( auto it = _textures . begin ( ) ; it ! = _textures . end ( ) ; + + it ) <nl> + ( it - > second ) - > release ( ) ; <nl> <nl> CC_SAFE_DELETE ( _loadingThread ) ; <nl> _sharedTextureCache = nullptr ; <nl> void TextureCache : : destroyInstance ( ) <nl> <nl> const char * TextureCache : : description ( ) const <nl> { <nl> - return String : : createWithFormat ( " < TextureCache | Number of textures = % u > " , _textures - > count ( ) ) - > getCString ( ) ; <nl> - } <nl> - <nl> - Dictionary * TextureCache : : snapshotTextures ( ) <nl> - { <nl> - Dictionary * pRet = new Dictionary ( ) ; <nl> - DictElement * pElement = NULL ; <nl> - CCDICT_FOREACH ( _textures , pElement ) <nl> - { <nl> - pRet - > setObject ( pElement - > getObject ( ) , pElement - > getStrKey ( ) ) ; <nl> - } <nl> - pRet - > autorelease ( ) ; <nl> - return pRet ; <nl> + return String : : createWithFormat ( " < TextureCache | Number of textures = % lu > " , _textures . size ( ) ) - > getCString ( ) ; <nl> } <nl> <nl> - void TextureCache : : addImageAsync ( const char * path , Object * target , SEL_CallFuncO selector ) <nl> + / / Dictionary * TextureCache : : snapshotTextures ( ) <nl> + / / { <nl> + / / Dictionary * pRet = new Dictionary ( ) ; <nl> + / / DictElement * pElement = NULL ; <nl> + / / CCDICT_FOREACH ( _textures , pElement ) <nl> + / / { <nl> + / / pRet - > setObject ( pElement - > getObject ( ) , pElement - > getStrKey ( ) ) ; <nl> + / / } <nl> + / / pRet - > autorelease ( ) ; <nl> + / / return pRet ; <nl> + / / } <nl> + <nl> + void TextureCache : : addImageAsync ( const std : : string & path , Object * target , SEL_CallFuncO selector ) <nl> { <nl> - CCASSERT ( path ! = NULL , " TextureCache : fileimage MUST not be NULL " ) ; <nl> - <nl> Texture2D * texture = NULL ; <nl> <nl> - / / optimization <nl> - <nl> - std : : string pathKey = path ; <nl> + std : : string fullpath = FileUtils : : getInstance ( ) - > fullPathForFilename ( path . c_str ( ) ) ; <nl> <nl> - pathKey = FileUtils : : getInstance ( ) - > fullPathForFilename ( pathKey . c_str ( ) ) ; <nl> - texture = static_cast < Texture2D * > ( _textures - > objectForKey ( pathKey . c_str ( ) ) ) ; <nl> + auto it = _textures . find ( fullpath ) ; <nl> + if ( it ! = _textures . end ( ) ) <nl> + texture = it - > second ; <nl> <nl> - std : : string fullpath = pathKey ; <nl> - if ( texture ! = NULL ) <nl> + if ( texture ! = NULL & & target & & selector ) <nl> { <nl> - if ( target & & selector ) <nl> - { <nl> - ( target - > * selector ) ( texture ) ; <nl> - } <nl> - <nl> + ( target - > * selector ) ( texture ) ; <nl> return ; <nl> } <nl> <nl> void TextureCache : : addImageAsync ( const char * path , Object * target , SEL_CallFuncO <nl> <nl> void TextureCache : : loadImage ( ) <nl> { <nl> - AsyncStruct * pAsyncStruct = nullptr ; <nl> + AsyncStruct * asyncStruct = nullptr ; <nl> <nl> while ( true ) <nl> { <nl> void TextureCache : : loadImage ( ) <nl> } <nl> else <nl> { <nl> - pAsyncStruct = pQueue - > front ( ) ; <nl> + asyncStruct = pQueue - > front ( ) ; <nl> pQueue - > pop ( ) ; <nl> _asyncStructQueueMutex . unlock ( ) ; <nl> } <nl> <nl> - const char * filename = pAsyncStruct - > filename . c_str ( ) ; <nl> + const char * filename = asyncStruct - > filename . c_str ( ) ; <nl> <nl> / / generate image <nl> - Image * pImage = new Image ( ) ; <nl> - if ( pImage & & ! pImage - > initWithImageFileThreadSafe ( filename ) ) <nl> + Image * image = new Image ( ) ; <nl> + if ( image & & ! image - > initWithImageFileThreadSafe ( filename ) ) <nl> { <nl> - CC_SAFE_RELEASE ( pImage ) ; <nl> + CC_SAFE_RELEASE ( image ) ; <nl> CCLOG ( " can not load % s " , filename ) ; <nl> continue ; <nl> } <nl> <nl> / / generate image info <nl> - ImageInfo * pImageInfo = new ImageInfo ( ) ; <nl> - pImageInfo - > asyncStruct = pAsyncStruct ; <nl> - pImageInfo - > image = pImage ; <nl> + ImageInfo * imageInfo = new ImageInfo ( ) ; <nl> + imageInfo - > asyncStruct = asyncStruct ; <nl> + imageInfo - > image = image ; <nl> <nl> / / put the image info into the queue <nl> _imageInfoMutex . lock ( ) ; <nl> - _imageInfoQueue - > push ( pImageInfo ) ; <nl> + _imageInfoQueue - > push ( imageInfo ) ; <nl> _imageInfoMutex . unlock ( ) ; <nl> } <nl> <nl> void TextureCache : : addImageAsyncCallBack ( float dt ) <nl> } <nl> else <nl> { <nl> - ImageInfo * pImageInfo = imagesQueue - > front ( ) ; <nl> + ImageInfo * imageInfo = imagesQueue - > front ( ) ; <nl> imagesQueue - > pop ( ) ; <nl> _imageInfoMutex . unlock ( ) ; <nl> <nl> - AsyncStruct * pAsyncStruct = pImageInfo - > asyncStruct ; <nl> - Image * pImage = pImageInfo - > image ; <nl> + AsyncStruct * asyncStruct = imageInfo - > asyncStruct ; <nl> + Image * image = imageInfo - > image ; <nl> <nl> - Object * target = pAsyncStruct - > target ; <nl> - SEL_CallFuncO selector = pAsyncStruct - > selector ; <nl> - const char * filename = pAsyncStruct - > filename . c_str ( ) ; <nl> + Object * target = asyncStruct - > target ; <nl> + SEL_CallFuncO selector = asyncStruct - > selector ; <nl> + const char * filename = asyncStruct - > filename . c_str ( ) ; <nl> <nl> / / generate texture in render thread <nl> Texture2D * texture = new Texture2D ( ) ; <nl> <nl> - texture - > initWithImage ( pImage ) ; <nl> + texture - > initWithImage ( image ) ; <nl> <nl> # if CC_ENABLE_CACHE_TEXTURE_DATA <nl> / / cache the texture file name <nl> VolatileTexture : : addImageTexture ( texture , filename ) ; <nl> # endif <nl> - / / cache the texture <nl> - _textures - > setObject ( texture , filename ) ; <nl> + / / cache the texture . retain it , since it is added in the map <nl> + _textures . insert ( std : : make_pair ( filename , texture ) ) ; <nl> + texture - > retain ( ) ; <nl> + <nl> texture - > autorelease ( ) ; <nl> <nl> if ( target & & selector ) <nl> void TextureCache : : addImageAsyncCallBack ( float dt ) <nl> target - > release ( ) ; <nl> } <nl> <nl> - pImage - > release ( ) ; <nl> - delete pAsyncStruct ; <nl> - delete pImageInfo ; <nl> + image - > release ( ) ; <nl> + delete asyncStruct ; <nl> + delete imageInfo ; <nl> <nl> - - _asyncRefCount ; <nl> if ( 0 = = _asyncRefCount ) <nl> void TextureCache : : addImageAsyncCallBack ( float dt ) <nl> } <nl> } <nl> <nl> - Texture2D * TextureCache : : addImage ( const char * path ) <nl> + Texture2D * TextureCache : : addImage ( const std : : string & path ) <nl> { <nl> - CCASSERT ( path ! = NULL , " TextureCache : fileimage MUST not be NULL " ) ; <nl> - <nl> Texture2D * texture = NULL ; <nl> - Image * pImage = NULL ; <nl> + Image * image = NULL ; <nl> / / Split up directory and filename <nl> / / MUTEX : <nl> / / Needed since addImageAsync calls this method from a different thread <nl> <nl> - std : : string pathKey = path ; <nl> - <nl> - pathKey = FileUtils : : getInstance ( ) - > fullPathForFilename ( pathKey . c_str ( ) ) ; <nl> - if ( pathKey . size ( ) = = 0 ) <nl> + std : : string fullpath = FileUtils : : getInstance ( ) - > fullPathForFilename ( path . c_str ( ) ) ; <nl> + if ( fullpath . size ( ) = = 0 ) <nl> { <nl> return NULL ; <nl> } <nl> - texture = static_cast < Texture2D * > ( _textures - > objectForKey ( pathKey . c_str ( ) ) ) ; <nl> + auto it = _textures . find ( fullpath ) ; <nl> + if ( it ! = _textures . end ( ) ) <nl> + texture = it - > second ; <nl> <nl> - std : : string fullpath = pathKey ; <nl> - if ( ! texture ) <nl> + if ( ! texture ) <nl> { <nl> - std : : string lowerCase ( pathKey ) ; <nl> - for ( unsigned int i = 0 ; i < lowerCase . length ( ) ; + + i ) <nl> - { <nl> - lowerCase [ i ] = tolower ( lowerCase [ i ] ) ; <nl> - } <nl> / / all images are handled by UIImage except PVR extension that is handled by our own handler <nl> do <nl> { <nl> - pImage = new Image ( ) ; <nl> - CC_BREAK_IF ( NULL = = pImage ) ; <nl> + image = new Image ( ) ; <nl> + CC_BREAK_IF ( NULL = = image ) ; <nl> <nl> - bool bRet = pImage - > initWithImageFile ( fullpath . c_str ( ) ) ; <nl> + bool bRet = image - > initWithImageFile ( fullpath . c_str ( ) ) ; <nl> CC_BREAK_IF ( ! bRet ) ; <nl> <nl> texture = new Texture2D ( ) ; <nl> <nl> - if ( texture & & <nl> - texture - > initWithImage ( pImage ) ) <nl> + if ( texture & & texture - > initWithImage ( image ) ) <nl> { <nl> # if CC_ENABLE_CACHE_TEXTURE_DATA <nl> / / cache the texture file name <nl> VolatileTexture : : addImageTexture ( texture , fullpath . c_str ( ) ) ; <nl> # endif <nl> - _textures - > setObject ( texture , pathKey . c_str ( ) ) ; <nl> - texture - > release ( ) ; <nl> + / / texture already retained , no need to re - retain it <nl> + _textures . insert ( std : : make_pair ( fullpath , texture ) ) ; <nl> } <nl> else <nl> { <nl> - CCLOG ( " cocos2d : Couldn ' t create texture for file : % s in TextureCache " , path ) ; <nl> + CCLOG ( " cocos2d : Couldn ' t create texture for file : % s in TextureCache " , path . c_str ( ) ) ; <nl> } <nl> } while ( 0 ) ; <nl> } <nl> <nl> - CC_SAFE_RELEASE ( pImage ) ; <nl> + CC_SAFE_RELEASE ( image ) ; <nl> <nl> return texture ; <nl> } <nl> <nl> - Texture2D * TextureCache : : addUIImage ( Image * image , const char * key ) <nl> + Texture2D * TextureCache : : addImage ( Image * image , const std : : string & key ) <nl> { <nl> CCASSERT ( image ! = NULL , " TextureCache : image MUST not be nil " ) ; <nl> <nl> Texture2D * texture = NULL ; <nl> - / / textureForKey ( ) use full path , so the key should be full path <nl> - std : : string forKey ; <nl> - if ( key ) <nl> - { <nl> - forKey = FileUtils : : getInstance ( ) - > fullPathForFilename ( key ) ; <nl> - } <nl> - <nl> - / / Don ' t have to lock here , because addImageAsync ( ) will not <nl> - / / invoke opengl function in loading thread . <nl> <nl> - do <nl> + do <nl> { <nl> - / / If key is nil , then create a new texture each time <nl> - if ( key & & ( texture = ( Texture2D * ) _textures - > objectForKey ( forKey . c_str ( ) ) ) ) <nl> - { <nl> + auto it = _textures . find ( key ) ; <nl> + if ( it ! = _textures . end ( ) ) { <nl> + texture = it - > second ; <nl> break ; <nl> } <nl> <nl> Texture2D * TextureCache : : addUIImage ( Image * image , const char * key ) <nl> texture = new Texture2D ( ) ; <nl> texture - > initWithImage ( image ) ; <nl> <nl> - if ( key & & texture ) <nl> + if ( texture ) <nl> { <nl> - _textures - > setObject ( texture , forKey . c_str ( ) ) ; <nl> + _textures . insert ( std : : make_pair ( key , texture ) ) ; <nl> + texture - > retain ( ) ; <nl> + <nl> texture - > autorelease ( ) ; <nl> } <nl> else <nl> Texture2D * TextureCache : : addUIImage ( Image * image , const char * key ) <nl> <nl> void TextureCache : : removeAllTextures ( ) <nl> { <nl> - _textures - > removeAllObjects ( ) ; <nl> + for ( auto it = _textures . begin ( ) ; it ! = _textures . end ( ) ; + + it ) { <nl> + ( it - > second ) - > release ( ) ; <nl> + } <nl> + _textures . clear ( ) ; <nl> } <nl> <nl> void TextureCache : : removeUnusedTextures ( ) <nl> { <nl> - / * <nl> - DictElement * pElement = NULL ; <nl> - CCDICT_FOREACH ( _textures , pElement ) <nl> - { <nl> - CCLOG ( " cocos2d : TextureCache : texture : % s " , pElement - > getStrKey ( ) ) ; <nl> - Texture2D * value = static_cast < Texture2D * > ( pElement - > getObject ( ) ) ; <nl> - if ( value - > retainCount ( ) = = 1 ) <nl> - { <nl> - CCLOG ( " cocos2d : TextureCache : removing unused texture : % s " , pElement - > getStrKey ( ) ) ; <nl> - _textures - > removeObjectForElememt ( pElement ) ; <nl> - } <nl> - } <nl> - * / <nl> - <nl> - / * * Inter engineer zhuoshi sun finds that this way will get better performance <nl> - * / <nl> - if ( _textures - > count ( ) ) <nl> - { <nl> - / / find elements to be removed <nl> - DictElement * pElement = NULL ; <nl> - list < DictElement * > elementToRemove ; <nl> - CCDICT_FOREACH ( _textures , pElement ) <nl> - { <nl> - CCLOG ( " cocos2d : TextureCache : texture : % s " , pElement - > getStrKey ( ) ) ; <nl> - Texture2D * value = static_cast < Texture2D * > ( pElement - > getObject ( ) ) ; <nl> - if ( value - > retainCount ( ) = = 1 ) <nl> - { <nl> - elementToRemove . push_back ( pElement ) ; <nl> - } <nl> - } <nl> - <nl> - / / remove elements <nl> - for ( auto iter = elementToRemove . begin ( ) ; iter ! = elementToRemove . end ( ) ; + + iter ) <nl> - { <nl> - CCLOG ( " cocos2d : TextureCache : removing unused texture : % s " , ( * iter ) - > getStrKey ( ) ) ; <nl> - _textures - > removeObjectForElememt ( * iter ) ; <nl> + for ( auto it = _textures . cbegin ( ) ; it ! = _textures . cend ( ) ; / * nothing * / ) { <nl> + Texture2D * tex = it - > second ; <nl> + if ( tex - > retainCount ( ) = = 1 ) { <nl> + CCLOG ( " cocos2d : TextureCache : removing unused texture : % s " , it - > first . c_str ( ) ) ; <nl> + <nl> + tex - > release ( ) ; <nl> + _textures . erase ( it + + ) ; <nl> + } else { <nl> + + + it ; <nl> } <nl> + <nl> } <nl> } <nl> <nl> void TextureCache : : removeTexture ( Texture2D * texture ) <nl> return ; <nl> } <nl> <nl> - Array * keys = _textures - > allKeysForObject ( texture ) ; <nl> - _textures - > removeObjectsForKeys ( keys ) ; <nl> + for ( auto it = _textures . cbegin ( ) ; it ! = _textures . cend ( ) ; / * nothing * / ) { <nl> + if ( it - > second = = texture ) { <nl> + texture - > release ( ) ; <nl> + _textures . erase ( it + + ) ; <nl> + break ; <nl> + } else <nl> + + + it ; <nl> + } <nl> } <nl> <nl> - void TextureCache : : removeTextureForKey ( const char * textureKeyName ) <nl> + void TextureCache : : removeTextureForKey ( const std : : string & textureKeyName ) <nl> { <nl> - if ( textureKeyName = = NULL ) <nl> - { <nl> - return ; <nl> + auto it = _textures . find ( textureKeyName ) ; <nl> + if ( it ! = _textures . end ( ) ) { <nl> + ( it - > second ) - > release ( ) ; <nl> + _textures . erase ( it ) ; <nl> } <nl> - <nl> - string fullPath = FileUtils : : getInstance ( ) - > fullPathForFilename ( textureKeyName ) ; <nl> - _textures - > removeObjectForKey ( fullPath ) ; <nl> } <nl> <nl> - Texture2D * TextureCache : : textureForKey ( const char * key ) <nl> + Texture2D * TextureCache : : getTextureForKey ( const std : : string & key ) const <nl> { <nl> - return static_cast < Texture2D * > ( _textures - > objectForKey ( FileUtils : : getInstance ( ) - > fullPathForFilename ( key ) ) ) ; <nl> + auto it = _textures . find ( key ) ; <nl> + if ( it ! = _textures . end ( ) ) <nl> + return it - > second ; <nl> + return NULL ; <nl> } <nl> <nl> void TextureCache : : reloadAllTextures ( ) <nl> void TextureCache : : reloadAllTextures ( ) <nl> # endif <nl> } <nl> <nl> - void TextureCache : : dumpCachedTextureInfo ( ) <nl> + void TextureCache : : dumpCachedTextureInfo ( ) const <nl> { <nl> unsigned int count = 0 ; <nl> unsigned int totalBytes = 0 ; <nl> <nl> - DictElement * pElement = NULL ; <nl> - CCDICT_FOREACH ( _textures , pElement ) <nl> - { <nl> - Texture2D * tex = static_cast < Texture2D * > ( pElement - > getObject ( ) ) ; <nl> + for ( auto it = _textures . begin ( ) ; it ! = _textures . end ( ) ; + + it ) { <nl> + <nl> + Texture2D * tex = it - > second ; <nl> unsigned int bpp = tex - > getBitsPerPixelForFormat ( ) ; <nl> / / Each texture takes up width * height * bytesPerPixel bytes . <nl> unsigned int bytes = tex - > getPixelsWide ( ) * tex - > getPixelsHigh ( ) * bpp / 8 ; <nl> totalBytes + = bytes ; <nl> count + + ; <nl> - CCLOG ( " cocos2d : \ " % s \ " rc = % lu id = % lu % lu x % lu @ % ld bpp = > % lu KB " , <nl> - pElement - > getStrKey ( ) , <nl> + log ( " cocos2d : \ " % s \ " rc = % lu id = % lu % lu x % lu @ % ld bpp = > % lu KB " , <nl> + it - > first . c_str ( ) , <nl> ( long ) tex - > retainCount ( ) , <nl> ( long ) tex - > getName ( ) , <nl> ( long ) tex - > getPixelsWide ( ) , <nl> void TextureCache : : dumpCachedTextureInfo ( ) <nl> ( long ) bytes / 1024 ) ; <nl> } <nl> <nl> - CCLOG ( " cocos2d : TextureCache dumpDebugInfo : % ld textures , for % lu KB ( % . 2f MB ) " , ( long ) count , ( long ) totalBytes / 1024 , totalBytes / ( 1024 . 0f * 1024 . 0f ) ) ; <nl> + log ( " cocos2d : TextureCache dumpDebugInfo : % ld textures , for % lu KB ( % . 2f MB ) " , ( long ) count , ( long ) totalBytes / 1024 , totalBytes / ( 1024 . 0f * 1024 . 0f ) ) ; <nl> } <nl> <nl> # if CC_ENABLE_CACHE_TEXTURE_DATA <nl> void VolatileTexture : : reloadAllTextures ( ) <nl> { <nl> case kImageFile : <nl> { <nl> - Image * pImage = new Image ( ) ; <nl> + Image * image = new Image ( ) ; <nl> unsigned long nSize = 0 ; <nl> unsigned char * pBuffer = FileUtils : : getInstance ( ) - > getFileData ( vt - > _fileName . c_str ( ) , " rb " , & nSize ) ; <nl> <nl> - if ( pImage & & pImage - > initWithImageData ( pBuffer , nSize ) ) <nl> + if ( image & & image - > initWithImageData ( pBuffer , nSize ) ) <nl> { <nl> Texture2D : : PixelFormat oldPixelFormat = Texture2D : : getDefaultAlphaPixelFormat ( ) ; <nl> Texture2D : : setDefaultAlphaPixelFormat ( vt - > _pixelFormat ) ; <nl> - vt - > _texture - > initWithImage ( pImage ) ; <nl> + vt - > _texture - > initWithImage ( image ) ; <nl> Texture2D : : setDefaultAlphaPixelFormat ( oldPixelFormat ) ; <nl> } <nl> <nl> CC_SAFE_DELETE_ARRAY ( pBuffer ) ; <nl> - CC_SAFE_RELEASE ( pImage ) ; <nl> + CC_SAFE_RELEASE ( image ) ; <nl> } <nl> break ; <nl> case kImageData : <nl> mmm a / cocos2dx / textures / CCTextureCache . h <nl> ppp b / cocos2dx / textures / CCTextureCache . h <nl> THE SOFTWARE . <nl> # include < condition_variable > <nl> # include < queue > <nl> # include < string > <nl> + # include < unordered_map > <nl> <nl> # include " cocoa / CCObject . h " <nl> - # include " cocoa / CCDictionary . h " <nl> # include " textures / CCTexture2D . h " <nl> # include " platform / CCImage . h " <nl> <nl> class CC_DLL TextureCache : public Object <nl> <nl> const char * description ( void ) const ; <nl> <nl> - Dictionary * snapshotTextures ( ) ; <nl> + / / Dictionary * snapshotTextures ( ) ; <nl> <nl> - / * * Returns a Texture2D object given an file image <nl> - * If the file image was not previously loaded , it will create a new Texture2D <nl> + / * * Returns a Texture2D object given an filename . <nl> + * If the filename was not previously loaded , it will create a new Texture2D <nl> * object and it will return it . It will use the filename as a key . <nl> * Otherwise it will return a reference of a previously loaded image . <nl> * Supported image extensions : . png , . bmp , . tiff , . jpeg , . pvr , . gif <nl> * / <nl> - Texture2D * addImage ( const char * fileimage ) ; <nl> + Texture2D * addImage ( const std : : string & filepath ) ; <nl> <nl> / * Returns a Texture2D object given a file image <nl> * If the file image was not previously loaded , it will create a new Texture2D object and it will return it . <nl> class CC_DLL TextureCache : public Object <nl> * Supported image extensions : . png , . jpg <nl> * @ since v0 . 8 <nl> * / <nl> - virtual void addImageAsync ( const char * path , Object * target , SEL_CallFuncO selector ) ; <nl> + virtual void addImageAsync ( const std : : string & filepath , Object * target , SEL_CallFuncO selector ) ; <nl> <nl> - / * * Returns a Texture2D object given an UIImage image <nl> + / * * Returns a Texture2D object given an Image . <nl> * If the image was not previously loaded , it will create a new Texture2D object and it will return it . <nl> - * Otherwise it will return a reference of a previously loaded image <nl> + * Otherwise it will return a reference of a previously loaded image . <nl> * The " key " parameter will be used as the " key " for the cache . <nl> * If " key " is nil , then a new texture will be created each time . <nl> * / <nl> - Texture2D * addUIImage ( Image * image , const char * key ) ; <nl> + Texture2D * addImage ( Image * image , const std : : string & key ) ; <nl> + CC_DEPRECATED_ATTRIBUTE Texture2D * addUIImage ( Image * image , const char * key ) { return addImage ( image , key ) ; } <nl> <nl> / * * Returns an already created texture . Returns nil if the texture doesn ' t exist . <nl> @ since v0 . 99 . 5 <nl> * / <nl> - Texture2D * textureForKey ( const char * key ) ; <nl> + Texture2D * getTextureForKey ( const std : : string & key ) const ; <nl> + CC_DEPRECATED_ATTRIBUTE Texture2D * textureForKey ( const char * key ) const { return getTextureForKey ( key ) ; } <nl> <nl> / * * Purges the dictionary of loaded textures . <nl> * Call this method if you receive the " Memory Warning " <nl> class CC_DLL TextureCache : public Object <nl> / * * Deletes a texture from the cache given a its key name <nl> @ since v0 . 99 . 4 <nl> * / <nl> - void removeTextureForKey ( const char * textureKeyName ) ; <nl> + void removeTextureForKey ( const std : : string & key ) ; <nl> <nl> / * * Output to CCLOG the current contents of this TextureCache <nl> * This will attempt to calculate the size of each texture , and the total texture memory in use <nl> * <nl> * @ since v1 . 0 <nl> * / <nl> - void dumpCachedTextureInfo ( ) ; <nl> + void dumpCachedTextureInfo ( ) const ; <nl> <nl> private : <nl> void addImageAsyncCallBack ( float dt ) ; <nl> class CC_DLL TextureCache : public Object <nl> public : <nl> AsyncStruct ( const std : : string & fn , Object * t , SEL_CallFuncO s ) : filename ( fn ) , target ( t ) , selector ( s ) { } <nl> <nl> - std : : string filename ; <nl> - Object * target ; <nl> - SEL_CallFuncO selector ; <nl> + std : : string filename ; <nl> + Object * target ; <nl> + SEL_CallFuncO selector ; <nl> } ; <nl> <nl> protected : <nl> class CC_DLL TextureCache : public Object <nl> <nl> int _asyncRefCount ; <nl> <nl> - Dictionary * _textures ; <nl> + std : : unordered_map < std : : string , Texture2D * > _textures ; <nl> <nl> static TextureCache * _sharedTextureCache ; <nl> } ; <nl> mmm a / samples / Cpp / TestCpp / Classes / DataVisitorTest / DataVisitorTest . cpp <nl> ppp b / samples / Cpp / TestCpp / Classes / DataVisitorTest / DataVisitorTest . cpp <nl> void PrettyPrinterDemo : : onEnter ( ) <nl> <nl> vistor . clear ( ) ; <nl> addSprite ( ) ; <nl> - dict = TextureCache : : getInstance ( ) - > snapshotTextures ( ) ; <nl> - dict - > acceptVisitor ( vistor ) ; <nl> - log ( " % s " , vistor . getResult ( ) . c_str ( ) ) ; <nl> + / / dict = TextureCache : : getInstance ( ) - > snapshotTextures ( ) ; <nl> + / / dict - > acceptVisitor ( vistor ) ; <nl> + / / log ( " % s " , vistor . getResult ( ) . c_str ( ) ) ; <nl> } <nl> <nl> void DataVisitorTestScene : : runThisTest ( ) <nl> mmm a / samples / Cpp / TestCpp / Classes / PerformanceTest / PerformanceAllocTest . cpp <nl> ppp b / samples / Cpp / TestCpp / Classes / PerformanceTest / PerformanceAllocTest . cpp <nl> void NodeDeallocTest : : initWithQuantityOfNodes ( unsigned int nNodes ) <nl> { <nl> PerformceAllocScene : : initWithQuantityOfNodes ( nNodes ) ; <nl> <nl> - printf ( " Size of Sprite : % lu \ n " , sizeof ( Node ) ) ; <nl> + printf ( " Size of Node : % lu \ n " , sizeof ( Node ) ) ; <nl> <nl> scheduleUpdate ( ) ; <nl> } <nl> void SpriteCreateEmptyTest : : initWithQuantityOfNodes ( unsigned int nNodes ) <nl> { <nl> PerformceAllocScene : : initWithQuantityOfNodes ( nNodes ) ; <nl> <nl> - printf ( " Size of Node : % lu \ n " , sizeof ( Sprite ) ) ; <nl> + printf ( " Size of Sprite : % lu \ n " , sizeof ( Sprite ) ) ; <nl> <nl> scheduleUpdate ( ) ; <nl> } <nl> void SpriteCreateTest : : initWithQuantityOfNodes ( unsigned int nNodes ) <nl> { <nl> PerformceAllocScene : : initWithQuantityOfNodes ( nNodes ) ; <nl> <nl> - printf ( " Size of Node : % lu \ n " , sizeof ( Sprite ) ) ; <nl> + printf ( " Size of Sprite : % lu \ n " , sizeof ( Sprite ) ) ; <nl> <nl> scheduleUpdate ( ) ; <nl> } <nl> void SpriteCreateTest : : update ( float dt ) <nl> <nl> std : : string SpriteCreateTest : : title ( ) <nl> { <nl> - return " Create Sprite . " ; <nl> + return " Create Sprite " ; <nl> } <nl> <nl> std : : string SpriteCreateTest : : subtitle ( ) <nl> void SpriteDeallocTest : : initWithQuantityOfNodes ( unsigned int nNodes ) <nl> { <nl> PerformceAllocScene : : initWithQuantityOfNodes ( nNodes ) ; <nl> <nl> - printf ( " Size of Node : % lu \ n " , sizeof ( Sprite ) ) ; <nl> + printf ( " Size of sprite : % lu \ n " , sizeof ( Sprite ) ) ; <nl> <nl> scheduleUpdate ( ) ; <nl> } <nl> mmm a / samples / Cpp / TestCpp / Classes / RenderTextureTest / RenderTextureTest . cpp <nl> ppp b / samples / Cpp / TestCpp / Classes / RenderTextureTest / RenderTextureTest . cpp <nl> string RenderTextureSave : : subtitle ( ) <nl> return " Press ' Save Image ' to create an snapshot of the render texture " ; <nl> } <nl> <nl> - void RenderTextureSave : : clearImage ( cocos2d : : Object * pSender ) <nl> + void RenderTextureSave : : clearImage ( cocos2d : : Object * sender ) <nl> { <nl> _target - > clear ( CCRANDOM_0_1 ( ) , CCRANDOM_0_1 ( ) , CCRANDOM_0_1 ( ) , CCRANDOM_0_1 ( ) ) ; <nl> } <nl> <nl> - void RenderTextureSave : : saveImage ( cocos2d : : Object * pSender ) <nl> + void RenderTextureSave : : saveImage ( cocos2d : : Object * sender ) <nl> { <nl> static int counter = 0 ; <nl> <nl> void RenderTextureSave : : saveImage ( cocos2d : : Object * pSender ) <nl> _target - > saveToFile ( jpg , Image : : Format : : JPG ) ; <nl> <nl> <nl> - auto pImage = _target - > newImage ( ) ; <nl> + auto image = _target - > newImage ( ) ; <nl> <nl> - auto tex = TextureCache : : getInstance ( ) - > addUIImage ( pImage , png ) ; <nl> + auto tex = TextureCache : : getInstance ( ) - > addImage ( image , png ) ; <nl> <nl> - CC_SAFE_DELETE ( pImage ) ; <nl> + CC_SAFE_DELETE ( image ) ; <nl> <nl> auto sprite = Sprite : : createWithTexture ( tex ) ; <nl> <nl> | Merge pull request from ricardoquesada / fileutils_perf_improvements | cocos2d/cocos2d-x | c5b113c35e7ea0c0cad5bfae8d0c8efb1c71e415 | 2013-09-09T02:50:12Z |
mmm a / arangod / Replication / DatabaseInitialSyncer . cpp <nl> ppp b / arangod / Replication / DatabaseInitialSyncer . cpp <nl> Result DatabaseInitialSyncer : : runWithInventory ( bool incremental , <nl> } <nl> } <nl> <nl> + / / / @ brief returns the inventory <nl> + Result DatabaseInitialSyncer : : inventory ( VPackBuilder & builder ) { <nl> + if ( _client = = nullptr | | _connection = = nullptr | | _endpoint = = nullptr ) { <nl> + return Result ( TRI_ERROR_INTERNAL , " invalid endpoint " ) ; <nl> + } <nl> + <nl> + auto r = sendStartBatch ( ) ; <nl> + if ( r . fail ( ) ) { <nl> + return r ; <nl> + } <nl> + <nl> + TRI_DEFER ( sendFinishBatch ( ) ) ; <nl> + <nl> + / / caller did not supply an inventory , we need to fetch it <nl> + return fetchInventory ( builder ) ; <nl> + } <nl> + <nl> / / / @ brief check whether the initial synchronization should be aborted <nl> bool DatabaseInitialSyncer : : isAborted ( ) const { <nl> if ( application_features : : ApplicationServer : : isStopping ( ) | | <nl> mmm a / arangod / Replication / DatabaseInitialSyncer . h <nl> ppp b / arangod / Replication / DatabaseInitialSyncer . h <nl> class DatabaseInitialSyncer final : public InitialSyncer { <nl> / / / in rocksdb for a constant view of the data <nl> double batchUpdateTime ( ) const { return _batchUpdateTime ; } <nl> <nl> + / / / @ brief fetch the server ' s inventory , public method <nl> + Result inventory ( arangodb : : velocypack : : Builder & builder ) ; <nl> + <nl> private : <nl> - <nl> + / / / @ brief fetch the server ' s inventory <nl> + Result fetchInventory ( arangodb : : velocypack : : Builder & builder ) ; <nl> + <nl> / / / @ brief set a progress message <nl> void setProgress ( std : : string const & msg ) override ; <nl> <nl> class DatabaseInitialSyncer final : public InitialSyncer { <nl> Result handleCollection ( arangodb : : velocypack : : Slice const & , <nl> arangodb : : velocypack : : Slice const & , bool incremental , <nl> sync_phase_e ) ; <nl> - <nl> - / / / @ brief fetch the server ' s inventory <nl> - Result fetchInventory ( arangodb : : velocypack : : Builder & builder ) ; <nl> - <nl> + <nl> / / / @ brief handle the inventory response of the master <nl> Result handleLeaderCollections ( arangodb : : velocypack : : Slice const & , bool ) ; <nl> <nl> class DatabaseInitialSyncer final : public InitialSyncer { <nl> <nl> } / / arangodb <nl> <nl> - # endif <nl> \ No newline at end of file <nl> + # endif <nl> mmm a / arangod / Replication / DatabaseTailingSyncer . cpp <nl> ppp b / arangod / Replication / DatabaseTailingSyncer . cpp <nl> Result DatabaseTailingSyncer : : syncCollectionFinalize ( std : : string const & collecti <nl> LOG_TOPIC ( DEBUG , Logger : : REPLICATION ) < < " Fetching more data fromTick " < < fromTick ; <nl> } <nl> } <nl> + <nl> + bool DatabaseTailingSyncer : : skipMarker ( VPackSlice const & slice ) { <nl> + / / we do not have a " cname " attribute in the marker . . . <nl> + / / now check for a globally unique id attribute ( " cuid " ) <nl> + / / if its present , then we will use our local cuid - > collection name <nl> + / / translation table <nl> + VPackSlice const name = slice . get ( " cuid " ) ; <nl> + if ( ! name . isString ( ) ) { <nl> + return false ; <nl> + } <nl> + <nl> + if ( _translations . empty ( ) ) { <nl> + / / no translations yet . . . query master inventory to find names of all <nl> + / / collections <nl> + try { <nl> + DatabaseInitialSyncer init ( * _vocbase , _configuration ) ; <nl> + VPackBuilder inventoryResponse ; <nl> + Result res = init . inventory ( inventoryResponse ) ; <nl> + if ( res . fail ( ) ) { <nl> + LOG_TOPIC ( ERR , Logger : : REPLICATION ) < < " got error while fetching master inventory for collection name translations : " < < res . errorMessage ( ) ; <nl> + return false ; <nl> + } <nl> + VPackSlice invSlice = inventoryResponse . slice ( ) ; <nl> + if ( ! invSlice . isObject ( ) ) { <nl> + return false ; <nl> + } <nl> + invSlice = invSlice . get ( " collections " ) ; <nl> + if ( ! invSlice . isArray ( ) ) { <nl> + return false ; <nl> + } <nl> + <nl> + for ( auto const & it : VPackArrayIterator ( invSlice ) ) { <nl> + if ( ! it . isObject ( ) ) { <nl> + continue ; <nl> + } <nl> + VPackSlice c = it . get ( " parameters " ) ; <nl> + if ( c . hasKey ( " name " ) & & c . hasKey ( " globallyUniqueId " ) ) { <nl> + _translations [ c . get ( " globallyUniqueId " ) . copyString ( ) ] = c . get ( " name " ) . copyString ( ) ; <nl> + } <nl> + } <nl> + } catch ( std : : exception const & ex ) { <nl> + LOG_TOPIC ( ERR , Logger : : REPLICATION ) < < " got error while fetching inventory : " < < ex . what ( ) ; <nl> + return false ; <nl> + } <nl> + } <nl> + <nl> + / / look up cuid in translations map <nl> + auto it = _translations . find ( name . copyString ( ) ) ; <nl> + <nl> + if ( it ! = _translations . end ( ) ) { <nl> + return isExcludedCollection ( ( * it ) . second ) ; <nl> + } <nl> + <nl> + return false ; <nl> + } <nl> mmm a / arangod / Replication / DatabaseTailingSyncer . h <nl> ppp b / arangod / Replication / DatabaseTailingSyncer . h <nl> class DatabaseTailingSyncer final : public TailingSyncer { <nl> return & ( vocbases ( ) . begin ( ) - > second . database ( ) ) ; <nl> } <nl> <nl> + / / / @ brief whether or not we should skip a specific marker <nl> + bool skipMarker ( arangodb : : velocypack : : Slice const & slice ) override ; <nl> + <nl> private : <nl> <nl> / / / @ brief vocbase to use for this run <nl> TRI_vocbase_t * _vocbase ; <nl> - <nl> + <nl> + / / / @ brief translation between globallyUniqueId and collection name <nl> + std : : unordered_map < std : : string , std : : string > _translations ; <nl> } ; <nl> } <nl> <nl> mmm a / arangod / Replication / GlobalInitialSyncer . cpp <nl> ppp b / arangod / Replication / GlobalInitialSyncer . cpp <nl> Result GlobalInitialSyncer : : updateServerInventory ( VPackSlice const & masterDataba <nl> return TRI_ERROR_NO_ERROR ; <nl> } <nl> <nl> + / / / @ brief returns the inventory <nl> + Result GlobalInitialSyncer : : inventory ( VPackBuilder & builder ) { <nl> + if ( _client = = nullptr | | _connection = = nullptr | | _endpoint = = nullptr ) { <nl> + return Result ( TRI_ERROR_INTERNAL , " invalid endpoint " ) ; <nl> + } else if ( application_features : : ApplicationServer : : isStopping ( ) ) { <nl> + return Result ( TRI_ERROR_SHUTTING_DOWN ) ; <nl> + } <nl> + <nl> + auto r = sendStartBatch ( ) ; <nl> + if ( r . fail ( ) ) { <nl> + return r ; <nl> + } <nl> + <nl> + TRI_DEFER ( sendFinishBatch ( ) ) ; <nl> + <nl> + / / caller did not supply an inventory , we need to fetch it <nl> + return fetchInventory ( builder ) ; <nl> + } <nl> + <nl> Result GlobalInitialSyncer : : fetchInventory ( VPackBuilder & builder ) { <nl> std : : string url = ReplicationUrl + " / inventory ? serverId = " + _localServerIdString + <nl> " & batchId = " + std : : to_string ( _batchId ) + " & global = true " ; <nl> mmm a / arangod / Replication / GlobalInitialSyncer . h <nl> ppp b / arangod / Replication / GlobalInitialSyncer . h <nl> class GlobalInitialSyncer final : public InitialSyncer { <nl> / / / public method , catches exceptions <nl> arangodb : : Result run ( bool incremental ) override ; <nl> <nl> + / / / @ brief fetch the server ' s inventory , public method <nl> + Result inventory ( arangodb : : velocypack : : Builder & builder ) ; <nl> + <nl> private : <nl> <nl> / / / @ brief run method , performs a full synchronization <nl> mmm a / arangod / Replication / GlobalTailingSyncer . cpp <nl> ppp b / arangod / Replication / GlobalTailingSyncer . cpp <nl> <nl> # include " Replication / GlobalInitialSyncer . h " <nl> # include " Replication / ReplicationFeature . h " <nl> <nl> + # include < velocypack / Iterator . h > <nl> + # include < velocypack / velocypack - aliases . h > <nl> + <nl> using namespace arangodb ; <nl> using namespace arangodb : : basics ; <nl> using namespace arangodb : : httpclient ; <nl> Result GlobalTailingSyncer : : saveApplierState ( ) { <nl> } <nl> return TRI_ERROR_INTERNAL ; <nl> } <nl> + <nl> + bool GlobalTailingSyncer : : skipMarker ( VPackSlice const & slice ) { <nl> + / / we do not have a " cname " attribute in the marker . . . <nl> + / / now check for a globally unique id attribute ( " cuid " ) <nl> + / / if its present , then we will use our local cuid - > collection name <nl> + / / translation table <nl> + VPackSlice const name = slice . get ( " cuid " ) ; <nl> + if ( ! name . isString ( ) ) { <nl> + return false ; <nl> + } <nl> + <nl> + if ( _translations . empty ( ) ) { <nl> + / / no translations yet . . . query master inventory to find names of all <nl> + / / collections <nl> + try { <nl> + GlobalInitialSyncer init ( _configuration ) ; <nl> + VPackBuilder inventoryResponse ; <nl> + Result res = init . inventory ( inventoryResponse ) ; <nl> + if ( res . fail ( ) ) { <nl> + LOG_TOPIC ( ERR , Logger : : REPLICATION ) < < " got error while fetching master inventory for collection name translations : " < < res . errorMessage ( ) ; <nl> + return false ; <nl> + } <nl> + <nl> + VPackSlice invSlice = inventoryResponse . slice ( ) ; <nl> + if ( ! invSlice . isObject ( ) ) { <nl> + return false ; <nl> + } <nl> + invSlice = invSlice . get ( " databases " ) ; <nl> + if ( ! invSlice . isObject ( ) ) { <nl> + return false ; <nl> + } <nl> + <nl> + for ( auto const & it : VPackObjectIterator ( invSlice ) ) { <nl> + VPackSlice dbObj = it . value ; <nl> + if ( ! dbObj . isObject ( ) ) { <nl> + continue ; <nl> + } <nl> + <nl> + dbObj = dbObj . get ( " collections " ) ; <nl> + if ( ! dbObj . isArray ( ) ) { <nl> + return false ; <nl> + } <nl> + <nl> + for ( auto const & it : VPackArrayIterator ( dbObj ) ) { <nl> + if ( ! it . isObject ( ) ) { <nl> + continue ; <nl> + } <nl> + VPackSlice c = it . get ( " parameters " ) ; <nl> + if ( c . hasKey ( " name " ) & & c . hasKey ( " globallyUniqueId " ) ) { <nl> + / / we ' ll store everything for all databases in a global hash table , <nl> + / / as we expect the globally unique ids to be unique . . . <nl> + _translations [ c . get ( " globallyUniqueId " ) . copyString ( ) ] = c . get ( " name " ) . copyString ( ) ; <nl> + } <nl> + } <nl> + } <nl> + } catch ( std : : exception const & ex ) { <nl> + LOG_TOPIC ( ERR , Logger : : REPLICATION ) < < " got error while fetching inventory : " < < ex . what ( ) ; <nl> + return false ; <nl> + } <nl> + } <nl> + <nl> + / / look up cuid in translations map <nl> + auto it = _translations . find ( name . copyString ( ) ) ; <nl> + <nl> + if ( it ! = _translations . end ( ) ) { <nl> + return isExcludedCollection ( ( * it ) . second ) ; <nl> + } <nl> + <nl> + return false ; <nl> + } <nl> mmm a / arangod / Replication / GlobalTailingSyncer . h <nl> ppp b / arangod / Replication / GlobalTailingSyncer . h <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> <nl> # ifndef ARANGOD_REPLICATION_GLOBAL_CONTINUOUS_SYNCER_H <nl> - # define ARANGOD_REPLICATION_DATABASE_CONTINUOUS_SYNCER_H 1 <nl> + # define ARANGOD_REPLICATION_GLOBAL_CONTINUOUS_SYNCER_H 1 <nl> <nl> # include " TailingSyncer . h " <nl> # include " Replication / GlobalReplicationApplier . h " <nl> class GlobalTailingSyncer : public TailingSyncer { <nl> } <nl> <nl> protected : <nl> - <nl> / / / @ brief resolve to proper base url <nl> std : : string tailingBaseUrl ( std : : string const & command ) override ; <nl> <nl> / / / @ brief save the current applier state <nl> Result saveApplierState ( ) override ; <nl> + <nl> + bool skipMarker ( arangodb : : velocypack : : Slice const & slice ) override ; <nl> + <nl> + private : <nl> + / / / @ brief translation between globallyUniqueId and collection name <nl> + std : : unordered_map < std : : string , std : : string > _translations ; <nl> } ; <nl> } <nl> <nl> mmm a / arangod / Replication / TailingSyncer . cpp <nl> ppp b / arangod / Replication / TailingSyncer . cpp <nl> void TailingSyncer : : abortOngoingTransactions ( ) { <nl> <nl> / / / @ brief whether or not a marker should be skipped <nl> bool TailingSyncer : : skipMarker ( TRI_voc_tick_t firstRegularTick , <nl> - VPackSlice const & slice ) const { <nl> + VPackSlice const & slice ) { <nl> bool tooOld = false ; <nl> std : : string const tick = VelocyPackHelper : : getStringValue ( slice , " tick " , " " ) ; <nl> <nl> bool TailingSyncer : : skipMarker ( TRI_voc_tick_t firstRegularTick , <nl> if ( tooOld ) { <nl> return true ; <nl> } <nl> - <nl> + <nl> / / the transient applier state is just used for one shard / collection <nl> - if ( ! _configuration . _restrictCollections . empty ( ) ) { <nl> - if ( _configuration . _restrictType . empty ( ) & & _configuration . _includeSystem ) { <nl> - return false ; <nl> - } <nl> - <nl> - VPackSlice const name = slice . get ( " cname " ) ; <nl> - if ( name . isString ( ) ) { <nl> - return isExcludedCollection ( name . copyString ( ) ) ; <nl> - } <nl> + if ( _configuration . _restrictCollections . empty ( ) ) { <nl> + return false ; <nl> } <nl> - <nl> - return false ; <nl> + <nl> + if ( _configuration . _restrictType . empty ( ) & & _configuration . _includeSystem ) { <nl> + return false ; <nl> + } <nl> + <nl> + VPackSlice const name = slice . get ( " cname " ) ; <nl> + if ( name . isString ( ) ) { <nl> + return isExcludedCollection ( name . copyString ( ) ) ; <nl> + } <nl> + <nl> + / / call virtual method <nl> + return skipMarker ( slice ) ; <nl> } <nl> <nl> / / / @ brief whether or not a collection should be excluded <nl> mmm a / arangod / Replication / TailingSyncer . h <nl> ppp b / arangod / Replication / TailingSyncer . h <nl> class TailingSyncer : public Syncer { <nl> void abortOngoingTransactions ( ) ; <nl> <nl> / / / @ brief whether or not a collection should be excluded <nl> - bool skipMarker ( TRI_voc_tick_t , arangodb : : velocypack : : Slice const & ) const ; <nl> + bool skipMarker ( TRI_voc_tick_t , arangodb : : velocypack : : Slice const & ) ; <nl> <nl> / / / @ brief whether or not a collection should be excluded <nl> bool isExcludedCollection ( std : : string const & ) const ; <nl> class TailingSyncer : public Syncer { <nl> arangodb : : Result runInternal ( ) ; <nl> <nl> protected : <nl> + virtual bool skipMarker ( arangodb : : velocypack : : Slice const & slice ) = 0 ; <nl> <nl> / / / @ brief pointer to the applier <nl> ReplicationApplier * _applier ; <nl> mmm a / js / server / tests / replication / replication - ongoing - global . js <nl> ppp b / js / server / tests / replication / replication - ongoing - global . js <nl> const compare = function ( masterFunc , masterFunc2 , slaveFuncOngoing , slaveFuncFin <nl> while ( replication . globalApplier . state ( ) . state . running ) { <nl> internal . wait ( 0 . 1 , false ) ; <nl> } <nl> + <nl> + applierConfiguration = applierConfiguration | | { } ; <nl> + applierConfiguration . endpoint = masterEndpoint ; <nl> + applierConfiguration . username = " root " ; <nl> + applierConfiguration . password = " " ; <nl> + applierConfiguration . includeSystem = false ; <nl> <nl> var syncResult = replication . syncGlobal ( { <nl> endpoint : masterEndpoint , <nl> const compare = function ( masterFunc , masterFunc2 , slaveFuncOngoing , slaveFuncFin <nl> password : " " , <nl> verbose : true , <nl> includeSystem : false , <nl> - keepBarrier : false <nl> + keepBarrier : false , <nl> + restrictType : applierConfiguration . restrictType , <nl> + restrictCollections : applierConfiguration . restrictCollections <nl> } ) ; <nl> <nl> assertTrue ( syncResult . hasOwnProperty ( ' lastLogTick ' ) ) ; <nl> const compare = function ( masterFunc , masterFunc2 , slaveFuncOngoing , slaveFuncFin <nl> / / use lastLogTick as of now <nl> state . lastLogTick = replication . logger . state ( ) . state . lastLogTick ; <nl> <nl> - applierConfiguration = applierConfiguration | | { } ; <nl> - applierConfiguration . endpoint = masterEndpoint ; <nl> - applierConfiguration . username = " root " ; <nl> - applierConfiguration . password = " " ; <nl> - applierConfiguration . includeSystem = false ; <nl> - <nl> if ( ! applierConfiguration . hasOwnProperty ( ' chunkSize ' ) ) { <nl> applierConfiguration . chunkSize = 16384 ; <nl> } <nl> function BaseTestConfig ( ) { <nl> ' use strict ' ; <nl> <nl> return { <nl> + <nl> + testIncludeCollection : function ( ) { <nl> + connectToMaster ( ) ; <nl> + <nl> + compare ( <nl> + function ( state ) { <nl> + } , <nl> + <nl> + function ( state ) { <nl> + db . _create ( cn ) ; <nl> + db . _create ( cn + " 2 " ) ; <nl> + for ( var i = 0 ; i < 100 ; + + i ) { <nl> + db . _collection ( cn ) . save ( { value : i } ) ; <nl> + db . _collection ( cn + " 2 " ) . save ( { value : i } ) ; <nl> + } <nl> + internal . wal . flush ( true , true ) ; <nl> + } , <nl> + <nl> + function ( state ) { <nl> + return true ; <nl> + } , <nl> + <nl> + function ( state ) { <nl> + assertTrue ( db . _collection ( cn ) . count ( ) = = = 100 ) ; <nl> + assertNull ( db . _collection ( cn + " 2 " ) ) ; <nl> + } , <nl> + <nl> + { restrictType : " include " , restrictCollections : [ cn ] } <nl> + ) ; <nl> + } , <nl> + <nl> + testExcludeCollection : function ( ) { <nl> + connectToMaster ( ) ; <nl> + <nl> + compare ( <nl> + function ( state ) { <nl> + } , <nl> + <nl> + function ( state ) { <nl> + db . _create ( cn ) ; <nl> + db . _create ( cn + " 2 " ) ; <nl> + for ( var i = 0 ; i < 100 ; + + i ) { <nl> + db . _collection ( cn ) . save ( { value : i } ) ; <nl> + db . _collection ( cn + " 2 " ) . save ( { value : i } ) ; <nl> + } <nl> + internal . wal . flush ( true , true ) ; <nl> + } , <nl> + <nl> + function ( state ) { <nl> + return true ; <nl> + } , <nl> + <nl> + function ( state ) { <nl> + assertTrue ( db . _collection ( cn ) . count ( ) = = = 100 ) ; <nl> + assertNull ( db . _collection ( cn + " 2 " ) ) ; <nl> + } , <nl> + <nl> + { restrictType : " exclude " , restrictCollections : [ cn + " 2 " ] } <nl> + ) ; <nl> + } , <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> / / / @ brief test collection creation <nl> | honor " restrictType " and " restrictCollections " when syncing ( ) | arangodb/arangodb | 96cbe699b9dc9611d889a32ea332fc73692f69db | 2018-06-28T17:14:14Z |
mmm a / atom / browser / ui / views / menu_delegate . cc <nl> ppp b / atom / browser / ui / views / menu_delegate . cc <nl> void MenuDelegate : : RunMenu ( ui : : MenuModel * model , views : : MenuButton * button ) { <nl> <nl> views : : MenuItemView * MenuDelegate : : BuildMenu ( ui : : MenuModel * model ) { <nl> DCHECK_GE ( id_ , 0 ) ; <nl> - DCHECK_LT ( id_ , items_ . size ( ) ) ; <nl> <nl> if ( ! items_ [ id_ ] ) { <nl> views : : MenuModelAdapter * delegate = new views : : MenuModelAdapter ( model ) ; <nl> mmm a / atom / browser / ui / win / notify_icon . cc <nl> ppp b / atom / browser / ui / win / notify_icon . cc <nl> <nl> # include " atom / browser / ui / win / notify_icon . h " <nl> <nl> # include " atom / browser / ui / win / notify_icon_host . h " <nl> - # include " atom / browser / ui / win / menu_2 . h " <nl> # include " base / strings / string_number_conversions . h " <nl> # include " base / strings / utf_string_conversions . h " <nl> # include " base / win / windows_version . h " <nl> | win : Fix compilation error . | electron/electron | ffad6fe884acde5212e05e2d41c9043bf9329714 | 2014-07-21T12:45:31Z |
mmm a / src / clustering / immediate_consistency / branch / multistore . cc <nl> ppp b / src / clustering / immediate_consistency / branch / multistore . cc <nl> void multistore_ptr_t < protocol_t > : : single_shard_backfill ( int i , <nl> <nl> / / TODO : Blithely assing progress along might be broken . <nl> <nl> - store_view_t < protocol_t > * view = store_views [ i ] ; <nl> + store_view_t < protocol_t > * store = store_views [ i ] ; <nl> try { <nl> - view - > send_backfill ( start_point . mask ( view - > get_region ( ) ) , <nl> - boost : : bind ( & multistore_send_backfill_should_backfill_t < protocol_t > : : should_backfill , helper , _1 ) , <nl> - chunk_fun , / / TODO : Do we need to wrap this ? <nl> - progress , <nl> - read_tokens [ i ] , <nl> - interruptor ) ; <nl> + store - > send_backfill ( start_point . mask ( get_region ( i ) ) , <nl> + boost : : bind ( & multistore_send_backfill_should_backfill_t < protocol_t > : : should_backfill , helper , _1 ) , <nl> + chunk_fun , / / TODO : Do we need to wrap this ? <nl> + progress , <nl> + read_tokens [ i ] , <nl> + interruptor ) ; <nl> } catch ( interrupted_exc_t & exc ) { <nl> / / do nothing <nl> } <nl> bool multistore_ptr_t < protocol_t > : : send_multistore_backfill ( const region_map_t < p <nl> int num_stores_assertion , <nl> signal_t * interruptor ) THROWS_ONLY ( interrupted_exc_t ) { <nl> guarantee ( num_stores ( ) = = num_stores_assertion ) ; <nl> + guarantee ( region_is_superset ( get_multistore_joined_region ( ) , start_point . get_domain ( ) ) ) ; <nl> <nl> multistore_send_backfill_should_backfill_t < protocol_t > helper ( num_stores ( ) , get_multistore_joined_region ( ) , should_backfill ) ; <nl> <nl> - pmap ( num_stores ( ) , boost : : bind ( & multistore_ptr_t < protocol_t > : : single_shard_backfill , this , _1 , & helper , start_point , chunk_fun , progress , read_tokens , interruptor ) ) ; <nl> + pmap ( num_stores ( ) , boost : : bind ( & multistore_ptr_t < protocol_t > : : single_shard_backfill , this , _1 , & helper , boost : : ref ( start_point ) , boost : : ref ( chunk_fun ) , progress , read_tokens , interruptor ) ) ; <nl> <nl> if ( interruptor - > is_pulsed ( ) ) { <nl> throw interrupted_exc_t ( ) ; <nl> } <nl> <nl> return helper . get_result ( ) ; <nl> - <nl> - / / TODO : All the parameters must be not marked UNUSED . In particular , progress . <nl> } <nl> <nl> + template < class protocol_t > <nl> + void multistore_ptr_t < protocol_t > : : single_shard_read ( int i , <nl> + DEBUG_ONLY ( const typename protocol_t : : store_t : : metainfo_t & expected_metainfo , ) <nl> + const typename protocol_t : : read_t & read , <nl> + boost : : scoped_ptr < fifo_enforcer_sink_t : : exit_read_t > * read_tokens , <nl> + std : : vector < typename protocol_t : : read_response_t > * responses , <nl> + signal_t * interruptor ) THROWS_NOTHING { <nl> + if ( ! region_overlaps ( get_region ( i ) , read . get_region ( ) ) ) { <nl> + read_tokens [ i ] . reset ( ) ; <nl> + return ; <nl> + } <nl> <nl> + store_view_t < protocol_t > * store = store_views [ i ] ; <nl> + try { <nl> + responses - > push_back ( store - > read ( DEBUG_ONLY ( expected_metainfo . mask ( get_region ( i ) ) , ) <nl> + read . shard ( get_region ( i ) ) , <nl> + read_tokens [ i ] , <nl> + interruptor ) ) ; <nl> + } catch ( interrupted_exc_t & exc ) { <nl> + / / do nothing <nl> + } <nl> + } <nl> <nl> template < class protocol_t > <nl> typename protocol_t : : read_response_t <nl> - multistore_ptr_t < protocol_t > : : read ( DEBUG_ONLY ( UNUSED const typename protocol_t : : store_t : : metainfo_t & expected_metainfo , ) <nl> - UNUSED const typename protocol_t : : read_t & read , <nl> - UNUSED boost : : scoped_ptr < fifo_enforcer_sink_t : : exit_read_t > * read_tokens , <nl> - UNUSED int num_stores_assertion , <nl> - UNUSED signal_t * interruptor ) THROWS_ONLY ( interrupted_exc_t ) { <nl> - <nl> - / / TODO : uh , implement . <nl> - <nl> - / / TODO : no unused parameters . <nl> + multistore_ptr_t < protocol_t > : : read ( DEBUG_ONLY ( const typename protocol_t : : store_t : : metainfo_t & expected_metainfo , ) <nl> + const typename protocol_t : : read_t & read , <nl> + boost : : scoped_ptr < fifo_enforcer_sink_t : : exit_read_t > * read_tokens , <nl> + int num_stores_assertion , <nl> + signal_t * interruptor ) THROWS_ONLY ( interrupted_exc_t ) { <nl> + guarantee ( num_stores ( ) = = num_stores_assertion ) ; <nl> + std : : vector < typename protocol_t : : read_response_t > responses ; <nl> + pmap ( num_stores ( ) , boost : : bind ( & multistore_ptr_t < protocol_t > : : single_shard_read , <nl> + this , _1 , DEBUG_ONLY ( boost : : ref ( expected_metainfo ) , ) <nl> + boost : : ref ( read ) , <nl> + read_tokens , <nl> + & responses , <nl> + interruptor ) ) ; <nl> <nl> - return typename protocol_t : : read_response_t ( ) ; <nl> + if ( interruptor - > is_pulsed ( ) ) { <nl> + throw interrupted_exc_t ( ) ; <nl> + } <nl> <nl> + typename protocol_t : : temporary_cache_t fake_cache ; <nl> + return read . unshard ( responses , & fake_cache ) ; <nl> } <nl> <nl> template < class protocol_t > <nl> mmm a / src / clustering / immediate_consistency / branch / multistore . hpp <nl> ppp b / src / clustering / immediate_consistency / branch / multistore . hpp <nl> class multistore_ptr_t { <nl> boost : : scoped_ptr < fifo_enforcer_sink_t : : exit_read_t > * read_tokens , <nl> signal_t * interruptor ) THROWS_NOTHING ; <nl> <nl> + void single_shard_read ( int i , <nl> + DEBUG_ONLY ( const typename protocol_t : : store_t : : metainfo_t & expected_metainfo , ) <nl> + const typename protocol_t : : read_t & read , <nl> + boost : : scoped_ptr < fifo_enforcer_sink_t : : exit_read_t > * read_tokens , <nl> + std : : vector < typename protocol_t : : read_response_t > * responses , <nl> + signal_t * interruptor ) THROWS_NOTHING ; <nl> + <nl> / / Used by the constructors . <nl> void initialize ( store_view_t < protocol_t > * * _store_views , const typename protocol_t : : region_t & _region_mask ) THROWS_NOTHING ; <nl> <nl> | Allegedly implemented multistore_ptr_t : : read . | rethinkdb/rethinkdb | 307385cd66decb666fd1f54cfc346cee5fa2cd03 | 2012-05-10T23:07:33Z |
mmm a / tensorflow / contrib / layers / python / layers / layers . py <nl> ppp b / tensorflow / contrib / layers / python / layers / layers . py <nl> def legacy_convolution2d ( x , <nl> # Simple alias for fully_connected which removes the activation_fn parameter . <nl> legacy_linear = functools . partial ( legacy_fully_connected , activation_fn = None ) <nl> <nl> - linear = functools . partial ( fully_connected , activation_fn = nn . relu ) <nl> - relu = functools . partial ( fully_connected , activation_fn = nn . relu6 ) <nl> - relu6 = functools . partial ( fully_connected , activation_fn = None ) <nl> + linear = legacy_linear <nl> + relu = legacy_relu <nl> + relu6 = legacy_relu6 <nl> <nl> | Fix re - directions of linear , relu , relu6 to legacy_linear . | tensorflow/tensorflow | 35204bc1c89231b2effd00b3f8e6d78164640377 | 2016-05-24T03:23:21Z |
mmm a / Examples / ReinforcementLearning / DeepQNeuralNetwork . py <nl> ppp b / Examples / ReinforcementLearning / DeepQNeuralNetwork . py <nl> def __init__ ( self , input_shape , nb_actions , <nl> self . _explorer = explorer <nl> self . _minibatch_size = minibatch_size <nl> self . _history = History ( input_shape ) <nl> - self . _memory = ReplayMemory ( 500000 , input_shape , 4 ) <nl> + self . _memory = ReplayMemory ( 500000 , input_shape [ 1 : ] , 4 ) <nl> self . _action_taken = 0 <nl> <nl> # Metrics accumulator <nl> def act ( self , state ) : <nl> # Use the network to output the best action <nl> env_with_history = self . _history . value <nl> q_values = self . _action_value_net . eval ( <nl> - env_with_history . reshape ( ( 1 , ) + state . shape ) # Append batch axis with only one sample to evaluate <nl> + env_with_history . reshape ( ( 1 , ) + env_with_history . shape ) # Append batch axis with only one sample to evaluate <nl> ) <nl> <nl> self . _episode_q_means . append ( np . mean ( self . _episode_q_means ) ) <nl> | Fixing input shapes | microsoft/CNTK | 5a6146dd6e5df09c6caca8f5782e8a8b878962d8 | 2017-03-29T17:49:36Z |
mmm a / xbmc / cores / dvdplayer / DVDPlayerVideo . cpp <nl> ppp b / xbmc / cores / dvdplayer / DVDPlayerVideo . cpp <nl> void CDVDPlayerVideo : : Process ( ) <nl> <nl> m_stalled = true ; <nl> m_started = false ; <nl> + <nl> + g_renderManager . DiscardBuffer ( ) ; <nl> } <nl> else if ( pMsg - > IsType ( CDVDMsg : : VIDEO_NOSKIP ) ) <nl> { <nl> | Merge pull request from FernetMenta / flush | xbmc/xbmc | afda4947f1a08ec28003f0a80512d702ef9b5ced | 2015-07-21T15:17:38Z |
mmm a / tensorflow / contrib / seq2seq / python / ops / helper . py <nl> ppp b / tensorflow / contrib / seq2seq / python / ops / helper . py <nl> def batch_size ( self ) : <nl> " " " <nl> raise NotImplementedError ( " batch_size has not been implemented " ) <nl> <nl> + @ abc . abstractproperty <nl> + def input_shape ( self ) : <nl> + " " " Shape of each input element in batch . <nl> + <nl> + Returns a ` TensorShape ` . <nl> + " " " <nl> + raise NotImplementedError ( " input_shape has not been implemented " ) <nl> + <nl> @ abc . abstractproperty <nl> def sample_ids_shape ( self ) : <nl> " " " Shape of tensor returned by ` sample ` , excluding the batch dimension . <nl> def __init__ ( self , initialize_fn , sample_fn , next_inputs_fn , <nl> self . _sample_fn = sample_fn <nl> self . _next_inputs_fn = next_inputs_fn <nl> self . _batch_size = None <nl> + self . _input_shape = None <nl> self . _sample_ids_shape = tensor_shape . TensorShape ( sample_ids_shape or [ ] ) <nl> self . _sample_ids_dtype = sample_ids_dtype or dtypes . int32 <nl> <nl> def initialize ( self , name = None ) : <nl> ( finished , next_inputs ) = self . _initialize_fn ( ) <nl> if self . _batch_size is None : <nl> self . _batch_size = array_ops . size ( finished ) <nl> + if self . _input_shape is None : <nl> + self . _input_shape = next_inputs . shape [ 1 : ] <nl> return ( finished , next_inputs ) <nl> <nl> def sample ( self , time , outputs , state , name = None ) : <nl> def __init__ ( self , inputs , sequence_length , time_major = False , name = None ) : <nl> " " " <nl> with ops . name_scope ( name , " TrainingHelper " , [ inputs , sequence_length ] ) : <nl> inputs = ops . convert_to_tensor ( inputs , name = " inputs " ) <nl> + self . _inputs = inputs <nl> if not time_major : <nl> inputs = nest . map_structure ( _transpose_batch_time , inputs ) <nl> <nl> def __init__ ( self , inputs , sequence_length , time_major = False , name = None ) : <nl> lambda inp : array_ops . zeros_like ( inp [ 0 , : ] ) , inputs ) <nl> <nl> self . _batch_size = array_ops . size ( sequence_length ) <nl> + self . _input_shape = inputs . shape [ 2 : ] <nl> <nl> @ property <nl> def batch_size ( self ) : <nl> return self . _batch_size <nl> <nl> + @ property <nl> + def input_shape ( self ) : <nl> + return self . _input_shape <nl> + <nl> @ property <nl> def sample_ids_shape ( self ) : <nl> return tensor_shape . TensorShape ( [ ] ) <nl> def sample_ids_shape ( self ) : <nl> def sample_ids_dtype ( self ) : <nl> return dtypes . int32 <nl> <nl> + @ property <nl> + def inputs ( self ) : <nl> + return self . _inputs <nl> + <nl> + @ property <nl> + def sequence_length ( self ) : <nl> + return self . _sequence_length <nl> + <nl> def initialize ( self , name = None ) : <nl> with ops . name_scope ( name , " TrainingHelperInitialize " ) : <nl> finished = math_ops . equal ( 0 , self . _sequence_length ) <nl> def __init__ ( self , embedding , start_tokens , end_token ) : <nl> if self . _end_token . get_shape ( ) . ndims ! = 0 : <nl> raise ValueError ( " end_token must be a scalar " ) <nl> self . _start_inputs = self . _embedding_fn ( self . _start_tokens ) <nl> + self . _input_shape = self . _start_inputs . shape [ 1 : ] <nl> <nl> @ property <nl> def batch_size ( self ) : <nl> return self . _batch_size <nl> <nl> + @ property <nl> + def input_shape ( self ) : <nl> + return self . _input_shape <nl> + <nl> @ property <nl> def sample_ids_shape ( self ) : <nl> return tensor_shape . TensorShape ( [ ] ) <nl> def __init__ ( self , sample_fn , sample_shape , sample_dtype , <nl> self . _sample_dtype = sample_dtype <nl> self . _next_inputs_fn = next_inputs_fn <nl> self . _batch_size = array_ops . shape ( start_inputs ) [ 0 ] <nl> + self . _input_shape = start_inputs . shape [ 1 : ] <nl> + <nl> self . _start_inputs = ops . convert_to_tensor ( <nl> start_inputs , name = " start_inputs " ) <nl> <nl> def __init__ ( self , sample_fn , sample_shape , sample_dtype , <nl> def batch_size ( self ) : <nl> return self . _batch_size <nl> <nl> + @ property <nl> + def input_shape ( self ) : <nl> + return self . _input_shape <nl> + <nl> @ property <nl> def sample_ids_shape ( self ) : <nl> return self . _sample_shape <nl> | Add input_shape to seq2seq helpers . | tensorflow/tensorflow | 642454bd3296959f0025e1fb1730cdd95c36713f | 2018-01-26T02:02:44Z |
mmm a / Examples / Text / ATIS / README . md <nl> ppp b / Examples / Text / ATIS / README . md <nl> To run locally , <nl> ` ` ` <nl> <nl> By default , the maxEpochs is set to 1 to save training time . One can change it to larger value such as 20 in order to get a good model accuracy . <nl> - Depends on GPU , it normally takes about 20 minutes to run 20 epochs on single GPU , and slot F1 score is about 93 . <nl> + Depends on GPU , it normally takes about 20 minutes to run 20 epochs on single GPU . The slot F1 score should be around 94 with 20 epochs . <nl> <nl> * * For Microsoft users only * * , to run the job on Philly : <nl> - first upload data folder to philly cloud . e . g . ` \ \ storage . gcr . philly . selfhost . corp . microsoft . com \ pnrsy \ < your_alias > \ ATIS ` <nl> | Update the README file about F1 score . | microsoft/CNTK | eccd3d05718de8f700ff90687b7b8f471441fb91 | 2016-08-08T22:22:08Z |
mmm a / dbms / include / DB / Core / Block . h <nl> ppp b / dbms / include / DB / Core / Block . h <nl> class Block <nl> const ColumnWithTypeAndName & getByName ( const std : : string & name ) const ; <nl> <nl> bool has ( const std : : string & name ) const ; <nl> - bool hasNullColumns ( const ColumnNumbers & arguments ) const ; <nl> - bool hasNullableColumns ( const ColumnNumbers & arguments ) const ; <nl> <nl> size_t getPositionByName ( const std : : string & name ) const ; <nl> <nl> mmm a / dbms / src / Core / Block . cpp <nl> ppp b / dbms / src / Core / Block . cpp <nl> bool Block : : has ( const std : : string & name ) const <nl> } <nl> <nl> <nl> - bool Block : : hasNullColumns ( const ColumnNumbers & arguments ) const <nl> - { <nl> - for ( const auto & arg : arguments ) <nl> - { <nl> - const auto & elem = unsafeGetByPosition ( arg ) ; <nl> - if ( elem . column & & elem . column . get ( ) - > isNull ( ) ) <nl> - return true ; <nl> - } <nl> - return false ; <nl> - } <nl> - <nl> - <nl> - bool Block : : hasNullableColumns ( const ColumnNumbers & arguments ) const <nl> - { <nl> - for ( const auto & arg : arguments ) <nl> - { <nl> - const auto & elem = unsafeGetByPosition ( arg ) ; <nl> - if ( elem . column & & elem . column . get ( ) - > isNullable ( ) ) <nl> - return true ; <nl> - } <nl> - return false ; <nl> - } <nl> - <nl> - <nl> size_t Block : : getPositionByName ( const std : : string & name ) const <nl> { <nl> IndexByName_t : : const_iterator it = index_by_name . find ( name ) ; <nl> mmm a / dbms / src / Functions / IFunction . cpp <nl> ppp b / dbms / src / Functions / IFunction . cpp <nl> void createNullValuesByteMap ( Block & block , size_t result ) <nl> } <nl> } <nl> <nl> + bool hasNullColumns ( const Block & block , const ColumnNumbers & arguments ) const <nl> + { <nl> + for ( const auto & arg : arguments ) <nl> + { <nl> + const auto & elem = block . unsafeGetByPosition ( arg ) ; <nl> + if ( elem . column & & elem . column . get ( ) - > isNull ( ) ) <nl> + return true ; <nl> + } <nl> + return false ; <nl> + } <nl> + <nl> bool hasNullColumns ( const ColumnsWithTypeAndName & args ) <nl> { <nl> for ( const auto & arg : args ) <nl> bool hasNullColumns ( const DataTypes & args ) <nl> return false ; <nl> } <nl> <nl> + bool hasNullableColumns ( const Block & block , const ColumnNumbers & arguments ) const <nl> + { <nl> + for ( const auto & arg : arguments ) <nl> + { <nl> + const auto & elem = block . unsafeGetByPosition ( arg ) ; <nl> + if ( elem . column & & elem . column . get ( ) - > isNullable ( ) ) <nl> + return true ; <nl> + } <nl> + return false ; <nl> + } <nl> + <nl> bool hasNullableColumns ( const ColumnsWithTypeAndName & args ) <nl> { <nl> for ( const auto & arg : args ) <nl> void IFunction : : getLambdaArgumentTypes ( DataTypes & arguments ) const <nl> <nl> void IFunction : : execute ( Block & block , const ColumnNumbers & arguments , size_t result ) <nl> { <nl> - if ( ! hasSpecialSupportForNulls ( ) & & block . hasNullColumns ( arguments ) ) <nl> + if ( ! hasSpecialSupportForNulls ( ) & & hasNullColumns ( block , arguments ) ) <nl> { <nl> ColumnWithTypeAndName & dest_col = block . getByPosition ( result ) ; <nl> dest_col . column = std : : make_shared < ColumnNull > ( block . rowsInFirstColumn ( ) , Null ( ) ) ; <nl> return ; <nl> } <nl> <nl> - if ( ! hasSpecialSupportForNulls ( ) & & block . hasNullableColumns ( arguments ) ) <nl> + if ( ! hasSpecialSupportForNulls ( ) & & hasNullableColumns ( block , arguments ) ) <nl> { <nl> Block non_nullable_block = block . extractNonNullableBlock ( arguments ) ; <nl> executeImpl ( non_nullable_block , arguments , result ) ; <nl> void IFunction : : execute ( Block & block , const ColumnNumbers & arguments , size_t r <nl> <nl> void IFunction : : execute ( Block & block , const ColumnNumbers & arguments , const ColumnNumbers & prerequisites , size_t result ) <nl> { <nl> - if ( ! hasSpecialSupportForNulls ( ) & & block . hasNullColumns ( arguments ) ) <nl> + if ( ! hasSpecialSupportForNulls ( ) & & hasNullColumns ( block , arguments ) ) <nl> { <nl> ColumnWithTypeAndName & dest_col = block . getByPosition ( result ) ; <nl> dest_col . column = std : : make_shared < ColumnNull > ( block . rowsInFirstColumn ( ) , Null ( ) ) ; <nl> return ; <nl> } <nl> <nl> - if ( ! hasSpecialSupportForNulls ( ) & & block . hasNullableColumns ( arguments ) ) <nl> + if ( ! hasSpecialSupportForNulls ( ) & & hasNullableColumns ( block , arguments ) ) <nl> { <nl> Block non_nullable_block = block . extractNonNullableBlock ( arguments ) ; <nl> executeImpl ( non_nullable_block , arguments , prerequisites , result ) ; <nl> | dbms : Cleanup [ # METR - 19266 ] | ClickHouse/ClickHouse | e38d29346e139f5b2f0769c3ad4078933b713f1c | 2016-08-05T07:49:56Z |
new file mode 100644 <nl> index 000000000000 . . 6ef57442e734 <nl> mmm / dev / null <nl> ppp b / docs / ABIStabilityManifesto . md <nl> <nl> + # Swift ABI Stability Manifesto <nl> + <nl> + * Authors : [ Michael Ilseman ] ( https : / / github . com / milseman ) ( compiled through conversations with many others ) <nl> + <nl> + # # Introduction <nl> + <nl> + # # # The Big Picture <nl> + <nl> + One of the top priorities for Swift right now is compatibility across future Swift versions . Compatibility aims at accomplishing two goals : <nl> + <nl> + 1 . * Source compatibility * means that newer compilers can compile code written in an older version of Swift . This aims to reduce the migration pain that Swift developers face when migrating to a newer Swift version . Without source compatibility , projects face version - lock where all source code in a project and its packages must be written in the same version of Swift . With source compatibility , package authors will be able to maintain a single code base across multiple Swift versions while allowing their users to use a newer version of Swift . <nl> + 2 . * Binary framework & runtime compatibility * enables the distribution of frameworks in a binary form that works across multiple Swift versions . Binary frameworks include both a * Swift module file * , which communicates source - level information of the framework ' s API , and a * shared library * , which provides the compiled implementation that is loaded at runtime . Thus , there are two necessary goals for binary framework compatibility : <nl> + * * Module format stability * stabilizes the module file , which is the compiler ' s representation of the public interfaces of a framework . This includes API declarations and inlineable code . The module file is used by the compiler for necessary tasks such as type checking and code generation when compiling client code using a framework . <nl> + * * ABI stability * enables binary compatibility between applications and libraries compiled with different Swift versions . It is the focus of the rest of this document . <nl> + <nl> + This document is an exploration and explanation of Swift ' s ABI alongside the goals and investigations needed before declaring Swift ' s ABI stable . It is meant to be a resource to the community as well as a declaration of the direction of Swift ' s ABI . <nl> + <nl> + Throughout this document there will be references to issues in Swift ' s [ issue tracking system ] ( https : / / bugs . swift . org ) denoted by " SR - xxxx " . These references track open engineering and design tasks for Swift ' s ABI . <nl> + <nl> + # # # What Is ABI ? <nl> + <nl> + At runtime , Swift program binaries interact with other libraries and components through an ABI . ABI is Application Binary Interface , or the specification to which independently compiled binary entities must conform to be linked together and executed . These binary entities must agree on many low level details : how to call functions , how their data is represented in memory , and even where their metadata is and how to access it . <nl> + <nl> + ABI is per - platform , as it is a low level concern influenced by both the architecture and the OS . Most platform vendors define a " standard ABI " which is used for C code and built on by C - family languages . Swift , however , is a very different language from C and has its own per - platform ABI . While most of this document is platform - agnostic , platform - specific concerns have influenced details of the design and implementation of Swift ' s ABI . For details on each platform ' s standard ABI , refer to the [ Appendix ] ( # platform - abis ) . <nl> + <nl> + # # # What Is ABI Stability ? <nl> + <nl> + ABI stability means locking down the ABI to the point that future compiler versions can produce binaries conforming to the stable ABI . Once an ABI is stable , it tends to persist for the rest of the platform ' s lifetime due to ever - increasing mutual dependencies . <nl> + <nl> + ABI stability only affects invariants of externally visible public interfaces and symbols . Internal symbols , conventions , and layout can continue to change without breaking the ABI . For example , future compilers are free to change the calling conventions for internal function calls so long as the public interfaces are preserved . <nl> + <nl> + Decisions about the ABI will have long - term ramifications and may limit the ways in which the language can grow and evolve in the future . Future Swift versions can add new , orthogonal aspects to the ABI , but any inefficiencies or inflexibilities present when stability is declared will ( effectively ) persist forever for that platform . <nl> + <nl> + ABI changes that are new and orthogonal are called * ABI - additive * changes . ABI - additive changes may be taken advantage of when the minimum targeted Swift version supports them . This allows us to extend or progressively lock down more of the ABI . These may be ABI additions to support new features or that allow for more efficient data access . Examples appear throughout this document . <nl> + <nl> + # # # What Does ABI Stability Enable ? <nl> + <nl> + ABI stability enables OS vendors to embed a Swift standard library and runtime that is compatible with applications built with older or newer versions of Swift . This would remove the need for apps to distribute their own copy of these libraries on those platforms . It also allows for better decoupling of tools and better integration into the OS . <nl> + <nl> + As noted earlier , ABI stability is necessary , though not sufficient , for binary frameworks . Module format stability is also required and is beyond the scope of this document . <nl> + <nl> + # # # Library Evolution <nl> + <nl> + Expressive and performance - focused languages which have binary interfaces tend to exhibit the [ fragile binary interface problem ] ( https : / / en . wikipedia . org / wiki / Fragile_binary_interface_problem ) , which makes it difficult for any library or component to change over time without requiring every user to recompile with new versions of that library . A major push in Swift currently is the plan for [ Library Evolution ] ( https : / / github . com / apple / swift / blob / master / docs / LibraryEvolution . rst ) , which aims to grant flexibility for library authors to maintain backwards and forwards binary compatibility . Many implementation concerns therein could have an impact on ABI . <nl> + <nl> + One of the goals of rolling out ABI stability is to remain flexible enough to accommodate library evolution changes without limiting the design space . Library evolution concerns will be addressed in each individual section , though a common refrain will be that the details are still undecided . <nl> + <nl> + # # Components of the Swift ABI <nl> + <nl> + In practice , ABI concerns can be tightly coupled . But , as a conceptual model , I ' d like to break them out into 6 separate classifications : <nl> + <nl> + 1 . Types , such as structs and classes , must have a defined in - memory layout for instances of that type . For binary entities to interoperate , they must share the same layout conventions . This layout is discussed in the [ Data Layout ] ( # data - layout ) section . <nl> + <nl> + 2 . Type metadata is used extensively by Swift programs , the Swift runtime , reflection , and tools such as debuggers and visualizers . This metadata must either have a defined memory layout , or have a set of defined APIs for querying the metadata of a type . Type metadata is discussed in the [ Type Metadata ] ( # metadata ) section . <nl> + <nl> + 3 . Every exported or external symbol in a library needs a unique name upon which binary entities can agree . Swift provides function overloading and contextual name spaces ( such as modules and types ) , which means that any name in source code might not be globally unique . A unique name is produced through a technique called * name mangling * . Swift ' s name mangling scheme is discussed in the [ Mangling ] ( # mangling ) section . <nl> + <nl> + 4 . Functions must know how to call each other , which entails such things as the layout of the call stack , what registers are preserved , and ownership conventions . Calling conventions are discussed in the [ Calling Convention ] ( # calling - convention ) section . <nl> + <nl> + 5 . Swift ships with a runtime library which handles such things as dynamic casting , reference counting , reflection , etc . Compiled Swift programs make external calls out to this runtime . Thus , Swift runtime API is Swift ABI . Runtime API stability is discussed in the [ Runtime ] ( # runtime ) section . <nl> + <nl> + 6 . Swift ships with a standard library that defines many common types , structures , and operations on these . For a shipped standard library to work with applications written in different versions of Swift , it must expose a stable API . Thus , Swift Standard Library API is Swift ABI , as well as the layout of many of the types it defines . Swift standard library ABI stability concerns are discussed in the [ Standard Library ] ( # standard - library ) section . <nl> + <nl> + # # < a name = " type - layout " > < / a > Data Layout <nl> + <nl> + # # # Background <nl> + <nl> + First , let ' s define some common terminology . <nl> + <nl> + * An * object * is a stored entity of some type , meaning it has a location in memory or in registers . Objects can be values of struct / enum type , class instances , references to class instances , values of protocol type , or even closures . This is [ in contrast to ] ( https : / / en . wikipedia . org / wiki / Object_ ( computer_science ) ) the class - based - OO definition of object as being an instance of a class . <nl> + * A * data member * of an object is any value that requires layout within the object itself . Data members include an object ' s stored properties and associated values . <nl> + * A * spare bit * is a bit that is unused by objects of a given type . These often arise due to things such as alignment , padding , and address spaces , further described below . <nl> + * An * extra inhabitant * is a bit pattern that does not represent a valid value for objects of a given type . For example , a simple C - like enum with 3 cases can fit in 2 bits , where it will have one extra inhabitant : the fourth unused bit pattern . <nl> + <nl> + * Data layout * , also known as type layout , specifies the in - memory layout of an object ' s data . This includes the size of an object in memory , the alignment of an object ( defined later ) , and how to find each data member within an object . <nl> + <nl> + An object has a statically known layout if the compiler is able to determine its layout at compilation time . Objects whose layout is not determinable until runtime have * opaque layout * . Such objects are further discussed in the [ opaque layout section ] ( # opaque - layout ) <nl> + <nl> + # # # # Layout and Properties of Types <nl> + <nl> + For every type ` T ` in Swift with statically known layout , the ABI specifies a means to determine : <nl> + <nl> + * The * alignment * for that type : for ` x : T ` , the address of ` x ` modulo alignment is always zero . <nl> + * The * size * for that type : the byte size ( possibly 0 ) without padding at the end . <nl> + * The * offset * for each data member ( if applicable ) : the address at which every member resides , relative to the object ' s base address . <nl> + <nl> + Derived from alignment and size is the * stride * of the type , which is the size of objects of that type rounded up to alignment ( minimum 1 ) . The stride is mostly useful for objects laid out contiguously in memory , such as in arrays . <nl> + <nl> + < a name = " type - properties " > < / a > Some types have interesting properties : <nl> + <nl> + * A type is * trivial * , also known as POD ( " plain ol ' data " ) , if it merely stores data and has no extra copy , move , or destruction semantics . Trivial objects can be copied by replicating their bits , and are destroyed through deallocation . A type is trivial only if all data members are also trivial . <nl> + * A type is * bitwise movable * if there are no side table references dependent on its address . A [ move ] ( https : / / doc . rust - lang . org / book / ownership . html # move - semantics ) operation can occur when an object is copied from one location into another and the original location is no longer used . Bitwise movable objects are moved by performing a bitwise copy and then invalidating the original location . A type is bitwise movable only if all its data members are also bitwise movable . All trivial types are bitwise movable . <nl> + <nl> + An example of a trivial type is a Point struct that contains two Double fields : an x coordinate and a y coordinate . This struct is trivial , as it can be copied merely by copying its bits and its destruction performs no extra operations . <nl> + <nl> + An example of a bitwise movable , but non - trivial , type is a struct that contains a reference to a class instance . Objects of that type cannot be copied merely by copying their bits , because a retain operation must be performed on the reference . Upon destruction , such objects must perform a release . However , the object can be moved from one address to another by copying its bits provided the original location is invalidated , keeping the overall retain count unchanged . <nl> + <nl> + An example of a type that is neither trivial nor bitwise movable is a struct containing a weak reference . Weak references are tracked in a side table so that they can be nil - ed out when the referenced object is destroyed . When moving an object of such type from one address to another , the side table must be updated to refer to the weak reference ' s new address . <nl> + <nl> + # # # # < a name = " opaque - layout " > < / a > Opaque Layout <nl> + <nl> + Opaque layout occurs whenever the layout is not known until runtime . This can come up for unspecialized generics , which do not have a known layout at compilation time . It can also come up for resilient types , which are described in the [ next section ] ( # layout - library - evolution ) . <nl> + <nl> + The size and alignment of an object of opaque layout , as well as whether it is trivial or bitwise movable , is determined by querying its value witness table , which is described further in the [ value witness table section ] ( # value - witness - table ) . The offsets for data members are determined by querying the type ' s metadata , which is described further in the [ value metadata section ] ( # value - metadata ) . Objects of opaque layout must typically be passed indirectly , described further in the [ function signature lowering section ] ( # function - signature - lowering ) . The Swift runtime interacts with objects of opaque layout through pointers , and thus they must be addressable , described further in the [ abstraction levels section ] ( # abstraction - levels ) . <nl> + <nl> + In practice , layout might be partially - known at compilation time . An example is a generic struct over type ` T ` that stores an integer as well as an object of type ` T ` . In this case , the layout of the integer itself is known and its location within the generic struct might be as well , depending on the specifics of the layout algorithm . However , the generic stored property has opaque layout , and thus the struct overall has an unknown size and alignment . We are investigating how to most efficiently lay out partially - opaque aggregates [ [ SR - 3722 ] ( https : / / bugs . swift . org / browse / SR - 3722 ) ] . This will likely entail placing the opaque members at the end in order to guarantee known offsets of non - opaque data members . <nl> + <nl> + # # # # < a name = " layout - library - evolution " > < / a > Library Evolution <nl> + <nl> + Library evolution introduces * resilient * layouts of public types by default and provides new annotations that freeze the layout for performance . A resilient layout avoids many of the pitfalls of the fragile binary problem by making the layout opaque . Resilient types have far more freedom to change and evolve without breaking binary compatibility : public data members can be rearranged , added , and even removed ( by providing a computed getter / setter instead ) . The new annotations provide the ability to relinquish these freedoms by making stricter guarantees about their layout in order to be more efficiently compiled and accessed . <nl> + <nl> + In order to allow for cross - module optimizations for modules that are distributed together , there is the concept of a * resilience domain * . A resilience domain is a grouping of modules which are version - locked with each other and thus do not have binary compatibility across multiple version requirements with each other . See [ Resilience Domains ] ( https : / / github . com / apple / swift / blob / master / docs / LibraryEvolution . rst # resilience - domains ) for more details . <nl> + <nl> + Resilient types are required to have opaque layout when exposed outside their resilience domain . Inside a resilience domain , this requirement is lifted and their layout may be statically known or opaque as determined by their type ( see [ previous section ] ( # opaque - layout ) ) . <nl> + <nl> + Annotations may be applied to a library ' s types in future versions of that library , in which case the annotations are versioned , yet the library remains binary compatible . How how this will impact the ABI is still under investigation [ [ SR - xxxx ] ( ) ] . <nl> + <nl> + <nl> + # # # # < a name = " abstraction - levels " > < / a > Abstraction Levels <nl> + <nl> + All types in Swift conceptually exist at multiple levels of abstraction . For example , an ` Int ` value is of a concrete type and can be passed to functions in registers . But , that same value might be passed to a function expecting a generic type ` T ` , which has opaque layout . Since the function is expecting its argument to be passed indirectly , the integer value must be promoted to the stack . When that value has type ` T ` , it is said to be at a higher abstraction level than when it was an integer . Moving between abstraction levels is done through a process called * reabstraction * . <nl> + <nl> + For many types in Swift , reabstraction involves directly copying the value to memory so that it is addressable . Reabstraction may be more complicated for tuples and higher - order functions , explained later in the [ tuples layout section ] ( # tuples ) and the [ function signature lowering section ] ( # lowering - higher - order - functions ) . <nl> + <nl> + <nl> + # # # A Tour of Types <nl> + <nl> + What follows is a breakdown of the different kinds of types in Swift and what needs to be specified . <nl> + <nl> + # # # # Structs <nl> + <nl> + The layout algorithm for structs should result in an efficient use of space , possibly by laying out fields in a different order than declared [ [ SR - 3723 ] ( https : / / bugs . swift . org / browse / SR - 3723 ) ] . We may want a fully declaration - order - agnostic algorithm to allow data members to be reordered in source without breaking binary compatibility [ [ SR - 3724 ] ( https : / / bugs . swift . org / browse / SR - 3724 ) ] . We also need to consider whether , by default , we want to ensure struct data members are addressable ( i . e . byte - aligned ) or if we ' d rather do bit - packing to save space [ [ SR - 3725 ] ( https : / / bugs . swift . org / browse / SR - 3725 ) ] . <nl> + <nl> + Zero sized structs do not take up any space as data members and struct members may be laid out in the padding of sub - structs . We may want to explore whether there are implementation benefits to capping alignment at some number , e . g . 16 on many platforms [ [ SR - xxxx ] ( ) ] . <nl> + <nl> + # # # # < a name = " tuples " > < / a > Tuples <nl> + <nl> + Tuples are similar to anonymous structs , but they differ in that they exhibit structural subtyping : a tuple of type e . g . ` ( Bool , Bool ) ` can be passed anywhere expecting generic types ` ( T , U ) ` . But , the type ` ( T , U ) ` exists at a higher abstraction level than ` ( Bool , Bool ) ` . Due to this , tuples may face more expensive reabstraction costs if their layout is aggressively packed . Reabstracting such a tuple would involve splitting and promoting each element into their own addresses . <nl> + <nl> + This may be an argument for a simple , declaration - order , non bit - packed layout algorithm for tuples . Tuples are often used for small local values and rarely persisted across ABI boundaries in a way that aggressive packing is performance - critical . This would also be more consistent with how fixed - size C arrays are presented in Swift , which are imported as tuples . <nl> + <nl> + We should investigate whether to aggressively bit - pack tuple elements similarly to structs , paying the reabstraction costs , or if the benefits are not worth the costs [ [ SR - 3726 ] ( https : / / bugs . swift . org / browse / SR - 3726 ) ] . <nl> + <nl> + Tuples should be binary compatible between labeled and unlabeled tuples of the same type and structure . <nl> + <nl> + # # # # Enums <nl> + <nl> + A value of enum type exists as one of many variants or cases . Determining which is the job of the * discriminator * , also known as a tag , which is an integer value denoting which case is presently stored . To save space , discriminators can be put in spare bits or be represented by extra inhabitants . <nl> + <nl> + ` @ closed ` enums , that is enums that can ' t have cases added to them later , can be classified into the following : <nl> + <nl> + * Degenerate - zero cased , or single cased without an associated value <nl> + * Trivial - no associated values <nl> + * Single payload - an enum where only one case has associated values <nl> + * Multi - payload - an enum that has multiple cases with associated values <nl> + <nl> + Degenerate enums take zero space . Trivial enums are just their discriminator . <nl> + <nl> + Single payload enums try to fit their discriminator in the payload ' s extra inhabitants for the non - payload cases , otherwise they will store the discriminator after the payload . When the discriminator is stored after the payload , the bits are not set for the payload case . The payload is guaranteed to be layout compatible with the enum as the payload case does not use any extra inhabitants . Storing the discriminator after the payload may also result in more efficient layout of aggregates containing the enum , due to alignment . <nl> + <nl> + The layout algorithm for multi - payload enums is more complicated and still needs to be developed [ [ SR - 3727 ] ( https : / / bugs . swift . org / browse / SR - 3727 ) ] . The algorithm should try to rearrange payloads so as to coalesce cases and save space . This rearrangement can also improve performance and code size . For example , if ARC - ed payload components reside in the same location , operations like copy can be done directly on the values without extensive switching . <nl> + <nl> + Enum raw values are not ABI , as they are implemented as code present in the computed property getter and setter . ` @ objc ` enums are C - compatible , which means they must be trivial . <nl> + <nl> + Library evolution adds the notion of ` @ open ` enums ( which will also be resilient ) , which allow library owners to add new cases and reorder existing cases without breaking binary compatibility . How this is accomplished is still to be determined . <nl> + <nl> + # # # # Classes <nl> + <nl> + There are two constructs present when discussing about class layout : * class instances * , which reside on the heap , and * references * to class instances , which are reference - counted pointers . <nl> + <nl> + # # # # # Class Instances <nl> + <nl> + The layout of class instances is mostly opaque . This is to avoid the vexing problem of [ fragile binary interfaces ] ( https : / / en . wikipedia . org / wiki / Fragile_binary_interface_problem ) , also known as the " fragile base class problem " , in which seemingly minor changes to a base class break binary compatibility with subclasses . <nl> + <nl> + The run - time type of a non - final class instance or a class existential is not known statically . To facilitate dynamic casts , the object must store a pointer to its type , called the * isa * pointer . The * isa * pointer is always stored at offset 0 within the object . How that type is represented and what information it provides is part of the class ' s metadata and is covered in the [ class metadata section ] ( # class - metadata ) . Similarly , the function for a non - final method call is also not known statically and is dispatched based on the run - time type . Method dispatch is covered in the [ method dispatch section ] ( # method - dispatch ) . <nl> + <nl> + Class instances will , as part of ABI - stability , guarantee a word - sized field of opaque data following the isa field that may be used for reference counting by the runtime . But , the format and conventions of this opaque data will not be ABI at first in order to have more flexibility for language or implementation changes . Instead , runtime functions provide the means to interact with reference counts . This opaque data and its conventions may be locked down for more efficient access in the future , which will be an ABI - additive change . <nl> + <nl> + # # # # # References <nl> + <nl> + Classes are reference types . This means that Swift code dealing with class instances does so through references , which are pointers at the binary level . These references participate in [ automatic reference counting ] ( https : / / en . wikipedia . org / wiki / Automatic_Reference_Counting ) ( ARC ) . <nl> + <nl> + References to Objective - C - compatible class instances ( i . e . those that inherit from an Objective - C class or are imported from Objective - C ) must provide the same bit - level guarantees to the Objective - C runtime as Objective - C references . Thus , such references are opaque : they have no guarantees other than that nil is 0 and provide no extra inhabitants . <nl> + <nl> + References to native , non - Objective - C - compatible Swift class instances do not have this constraint . The alignment of native Swift class instances is part of ABI , providing spare bits in the lower bits of references . Platforms may also provide spare bits ( typically upper bits ) and extra inhabitants ( typically lower addresses ) for references due to limited address spaces . <nl> + <nl> + We may want to explore using spare bits in references to store local reference counts in order to perform some ARC operations more efficiently [ [ SR - 3728 ] ( https : / / bugs . swift . org / browse / SR - 3728 ) ] . These would need to be flushed to the object whenever a reference may escape or the local reference count reaches zero . If these local reference counts can cross ABI boundaries , then such a change will have to be implemented in an ABI - additive way with deployment target checking . <nl> + <nl> + # # # # < a name = " existential - containers " > < / a > Existential Containers <nl> + <nl> + Any discussion of existentials quickly becomes bogged down in obscure terminology , so let ' s first establish some background surrounding the terms * existential values * , * existential containers * , and * witness tables * . <nl> + <nl> + In type theory , an [ existential type ] ( https : / / en . wikipedia . org / wiki / Type_system # Existential_types ) describes an interface of an abstract type . Values of an existential type are * existential values * . These arise in Swift when an object ' s type is a protocol : storing or passing an object of protocol type means that the actual run - time type is opaque ( not known at compile time , and thus neither is its layout ) . But , that opaque type has known interfaces because that type conforms to the protocol . <nl> + <nl> + A type ' s conformance to a protocol consists of functions ( whether methods or getters and setters ) , but the specific addresses of those functions are not known at compilation time for existential values as their actual type is not known until run time . This is a similar situation as with references to non - final class instances , and is solved using a [ similar technique ] ( # method - dispatch ) . * Witness tables * are tables of function pointers implementing a protocol conformance and are further discussed in the [ witness table section ] ( # witness - tables ) . <nl> + <nl> + * Existential containers * store values of protocol or protocol composition type alongside corresponding witness tables for each protocol conformance . For existentials that are not class - constrained ( may be value types or classes ) , the container needs to store : <nl> + <nl> + * the value itself : either in an inline buffer or as a pointer to out - of - line storage <nl> + * a pointer to the type metadata <nl> + * a witness table pointer for every conformance . <nl> + <nl> + Class - constrained existentials omit the metadata pointer ( as the object itself contains a pointer to its type ) , as well as any excess inline buffer space . ` Any ` , which is an existential value without any conformances , has no witness table pointer . <nl> + <nl> + We are re - evaluating the inline buffer size for existential containers prior to ABI stability [ [ SR - 3729 ] ( https : / / bugs . swift . org / browse / SR - 3729 ) ] . We are also considering making the out - of - line allocation be copy - on - write ( COW ) [ [ SR - xxxx ] ( ) ] . We should also explore " exploding " existential parameters , i . e . converting an existential parameter into a protocol - constrained generic parameter [ [ SR - xxxx ] ( ) ] . <nl> + <nl> + # # # Declaring Stability <nl> + <nl> + ABI stability means nailing down type layout and making decisions about how to handle the concerns of Library Evolution . The end result will be a technical specification of the layout algorithms that future compilers must adhere to in order to ensure binary compatibility [ [ SR - 3730 ] ( https : / / bugs . swift . org / browse / SR - 3730 ) ] . <nl> + <nl> + For all of the areas discussed above , more aggressive layout improvements may be invented in the post - ABI stability future . For example , we may want to explore rearranging and packing nested type data members with outer type data members . Such improvements would have to be done in an ABI - additive fashion through deployment target and / or min - version checking . This may mean that the module file will need to track per - type ABI versioning information . <nl> + <nl> + A potentially out of date description of Swift ' s current type layout can be found in the [ Type Layout docs ] ( https : / / github . com / apple / swift / blob / master / docs / ABI . rst # type - layout ) . <nl> + <nl> + <nl> + # # < a name = " metadata " > < / a > Type Metadata <nl> + <nl> + While data layout specifies the layout of objects of a given type , * type metadata * holds information about the types themselves . The information available and how to access this information is part of Swift ABI . <nl> + <nl> + Swift keeps metadata records for every * concrete type * . Concrete types include all non - generic types as well as generic types with concrete type parameters . These records are created by the compiler as well as lazily created at run time ( e . g . for generic type instantiations ) . This metadata stores information about its type , discussed in each section below . <nl> + <nl> + A potential approach to stability mechanism is to provide metadata read / write functions alongside the runtime to interact with metadata , giving some freedom to the underlying structures to grow and change . This effectively makes large portions of metadata opaque . But , certain fields require access to be as efficient as possible ( e . g . dynamic casts , calling into witness tables ) and the performance hit from going through an intermediary function would be unacceptable . Thus , we will probably freeze the performance - critical parts and use accessor functions for the rest [ [ SR - xxxx ] ( ) ] . <nl> + <nl> + Metadata has many historical artifacts in its representation that we want to clean up [ [ SR - xxxx ] ( ) ] . We also want to make small tweaks to present more semantic information in the metadata , to enable better future tooling and features such as reflection [ [ SR - xxxx ] ( ) ] . Some of these need to be done before declaring ABI stability and some may be additive . <nl> + <nl> + # # # # Declaring Stability <nl> + <nl> + Stabilizing the ABI means producing a precise technical specification for the fixed part of the metadata layout of all language constructs so that future compilers and tools can continue to read and write them . A prose description is not necessarily needed , though explanations are useful . We will also want to carve out extra space for areas where it is likely to be needed for future functionality [ [ SR - 3731 ] ( https : / / bugs . swift . org / browse / SR - 3731 ) ] . <nl> + <nl> + For more , but potentially out of date , details see the [ Type Metadata docs ] ( https : / / github . com / apple / swift / blob / master / docs / ABI . rst # type - metadata ) . <nl> + <nl> + # # # Generic Parameters <nl> + <nl> + Swift has a powerful generics system , which shows up both at compilation time ( through specialization optimizations ) and at run time when the type is unknown . Swift types may be parameterized over generic types , and thus every type ' s metadata describes whether generic type parameters are present and if so provides information about them . <nl> + <nl> + At run time , objects only have concrete types . If the type in source code is generic , the concrete type is an instantiation of that generic type . Generic instantiation metadata provide type metadata for each generic type parameter . If the generic type is constrained , corresponding [ witness tables ] ( # witness - tables ) for each protocol conformance are also provided in the metadata . <nl> + <nl> + # # # < a name = " value - metadata " > < / a > Value Metadata <nl> + <nl> + Named value types store the type name ( currently mangled but we are investigating un - mangled [ [ SR - xxxx ] ( ) ] ) and a pointer to the type ' s parent for nested types . <nl> + <nl> + Value type metadata also has kind - specific entries . Struct metadata stores information about its fields , field offsets , field names , and field metadata . Enum metadata stores information about its cases , payload sizes , and payload metadata . Tuple metadata stores information about its elements and labels . <nl> + <nl> + # # # # # < a name = " value - witness - table " > < / a > Value Witness Tables <nl> + <nl> + Every concrete type has a * value witness table * that provides information about how to lay out and manipulate values of that type . When a value type has [ opaque layout ] ( # opaque - layout ) , the actual layout and properties of that value type are not known at compilation time , so the value witness table is consulted . <nl> + <nl> + The value witness table stores whether a type is trivial and / or bitwise movable , whether there are extra inhabitants and if so how to store and retrieve them , etc . For enums , the value witness table will also provide functionality for interacting with the discriminator . There may be more efficient ways of representing enums that simplify this functionality ( or provide a fast path ) , and that ' s under investigation [ [ SR - xxxx ] ( ) ] . <nl> + <nl> + These value witness tables may be constructed statically for known values or dynamically for some generic values . While every unique type in Swift has a unique metadata pointer , value witness tables can be shared by types so long as the information provided is identical ( i . e . same layout ) . Value witness tables always represent a type at its highest [ abstraction level ] ( # abstraction - levels ) . The value witness table entries and structure need to be locked down for ABI stability [ [ SR - xxxx ] ( ) ] . <nl> + <nl> + # # # < a name = " class - metadata " > < / a > Class Metadata <nl> + <nl> + Swift class metadata is layout - compatible with Objective - C class objects on Apple ' s platforms , which places requirements on the contents of the first section of class metadata . In this first section , entries such as super class pointers , instance size , instance alignment , flags , and opaque data for the Objective - C runtime are stored . <nl> + <nl> + Following that are superclass members , parent type metadata , generic parameter metadata , class members , and * vtables * , described below . Library evolution may present many changes to what exactly is present and will likely make many of the contents opaque to accommodate changes . <nl> + <nl> + # # # # # < a name = " method - dispatch " > < / a > Method Dispatch <nl> + <nl> + Invoking a non - final instance method involves calling a function that is not known at compile time : it must be resolved at run time . This is solved through the use of a * vtable * , or virtual method table ( so called because overridable methods are also known as " virtual " methods ) . A * vtable * is a table of function pointers to a class or subclass ' s implementation of overridable methods . If the vtable is determined to be part of ABI , it needs a layout algorithm that also provides flexibility for library evolution . <nl> + <nl> + Alternatively , we may decide to perform inter - module calls through opaque * thunks * , or compiler - created intermediary functions , which then perform either direct or vtable dispatch as needed [ [ SR - xxxx ] ( ) ] . This enables greater library evolution without breaking binary compatibility by allowing internal class hierarchies to change . This would also unify non - final method dispatch between open and non - open classes while still allowing for aggressive compiler optimizations like de - virtualization for non - open classes . This approach would make vtables not be ABI , as that part of the type metadata would effectively be opaque to another module . <nl> + <nl> + # # # Protocol and Existential Metadata <nl> + <nl> + # # # # # < a name = " witness - tables " > < / a > Protocol Witness Tables <nl> + <nl> + The protocol witness table is a function table of a type ' s conformance to the protocol ' s interfaces . If the protocol also has an associated type requirement , then the witness table will store the metadata for the associated type . Protocol witness tables are used with [ existential containers ] ( # existential - containers ) where the run time type is not known . <nl> + <nl> + Protocol witness tables may be created dynamically by the runtime or statically by the compiler . The layout of a protocol witness table is ABI and we need to determine a layout algorithm that also accommodates library evolution concerns , where additional protocol requirements may be added with default fall - backs [ [ SR - 3732 ] ( https : / / bugs . swift . org / browse / SR - 3732 ) ] . <nl> + <nl> + # # # # # Existential Metadata <nl> + <nl> + Existential type metadata contains the number of witness tables present , whether the type is class - constrained , and a * protocol descriptor * for each protocol constraint . A protocol descriptor describes an individual protocol constraint , such as whether it is class - constrained , the size of conforming witness tables , and protocol descriptors for any protocols it refines . Protocol descriptors are layout compatible with the Objective - C runtime ' s protocol records on Apple platforms . <nl> + <nl> + <nl> + # # # Function Metadata <nl> + <nl> + In addition to common metadata entries , function type metadata stores information about the function signature : parameter and result type metadata , calling convention , per - parameter ownership conventions , and whether the function throws . Function type metadata always represents the function at its highest abstraction level , which is explained later in the [ function signature lowering section ] ( # lowering - higher - order - functions ) . Function parameters are currently modeled with a tuple - based design , but this should be updated to match modern Swift [ [ SR - xxxx ] ( ) ] . As more ownership semantics are modeled , more information may be stored about each parameter . <nl> + <nl> + # # < a name = " mangling " > < / a > Mangling <nl> + <nl> + Mangling is used to produce unique symbols . It applies to both external ( public ) symbols as well as internal or hidden symbols . Only the mangling scheme for external symbols is part of ABI . <nl> + <nl> + ABI stability means a stable mangling scheme , fully specified so that future compilers and tools can honor it . For a potentially out - of - date specification of what the mangling currently looks like , see the [ Name Mangling docs ] ( https : / / github . com / apple / swift / blob / master / docs / ABI . rst # mangling ) . <nl> + <nl> + There are some corner cases currently in the mangling scheme that should be fixed before declaring ABI stability . We need to come up with a canonicalization of generic and protocol requirements to allow for order - agnostic mangling [ [ SR - 3733 ] ( https : / / bugs . swift . org / browse / SR - 3733 ) ] . We also may decide to more carefully mangle variadicity of function parameters , etc [ [ SR - 3734 ] ( https : / / bugs . swift . org / browse / SR - 3734 ) ] . Most often , though , mangling improvements focus on reducing symbol size . <nl> + <nl> + Mangling design centers around coming up with short and efficient manglings that still retain important properties such as uniqueness and integration with existing tools and formats . Given the prevalence of public symbols in libraries and frameworks , and debugging symbols in applications , the symbol names themselves can make up a significant portion of binary size . Reducing this impact is a major focus of stabilizing the mangling . Post - ABI - stability , any new manglings or techniques must be additive and must support the existing manglings . <nl> + <nl> + There are many ways to improve the existing mangling without major impact on existing tools . Throughout these endeavors , we will be empirically measuring and tracking symbol size and its impact on binary size [ [ SR - xxxx ] ( ) ] . ABI work on mangling focuses on producing * compact manglings * and using * suffix differentiation * . <nl> + <nl> + # # # Compact Manglings <nl> + <nl> + Minor tweaks to shorten the mangling can have a beneficial impact on all Swift program binary sizes . These tweaks should compact existing manglings while preserving a simple unique mapping . One example is not distinguishing between struct / enum in mangling structures , which would also provide more library evolution freedom [ [ SR - xxxx ] ( ) ] . We are considering dropping some internal witness table symbols when they don ' t provide any meaningful information conducive to debugging [ [ SR - xxxx ] ( ) ] . We are currently overhauling word substitutions in mangling , with the goal of reducing as much redundancy in names as possible [ [ SR - xxxx ] ( ) ] . <nl> + <nl> + There are other aggressive directions to investigate as well , such as mangling based on a known overload set for non - resilient functions . This does have the downside of making manglings unstable when new overloads are added , so its benefits would have to be carefully weighed [ [ SR - xxxx ] ( ) ] . <nl> + <nl> + Any more ambitious reimagining of how to store symbols such as aggressive whole - library symbol name compression would have to be done in tight coupling with existing low level tools . Unfortunately , this might make some of the more ambitious options infeasible in time for ABI stability . They could be rolled out as ABI - additive using deployment target checking in the future . <nl> + <nl> + # # # Suffix Differentiation <nl> + <nl> + There are many existing low level tools and formats that store and consume the symbol information , and some of them use efficient storage techniques such as tries . Suffix differentiation is about adjusting the mangling in ways that take advantage of them : by distinguishing manglings through suffixes , i . e . having common shared prefixes . This is currently underway and is resulting in binary size reductions for platforms that use these techniques [ [ SR - xxxx ] ( ) ] . <nl> + <nl> + # # < a name = " calling - convention " > < / a > Calling Convention <nl> + <nl> + For the purposes of this document , " standard calling convention " refers to the C calling convention for a given platform ( see [ appendix ] ( # platform - abis ) ) , and " Swift calling convention " refers to the calling convention used by Swift code when calling other Swift code . The Swift runtime uses the standard calling convention , though it may make alterations ( see section [ Runtime calling convention ] ( # runtime - calling - convention ) ) . <nl> + <nl> + Calling convention stability pertains to public interfaces . The Swift compiler is free to choose any convention for internal ( intra - module ) functions and calls . <nl> + <nl> + For rationale and potentially - out - of - date details , see the [ Swift Calling Convention Whitepaper ] ( https : / / github . com / apple / swift / blob / master / docs / CallingConvention . rst ) . As part of nailing down the calling conventions , that document will either be updated with the final specifications of the calling conventions or else moved to a rationale document and a more succinct and rigorous specification put in its place . <nl> + <nl> + # # # Register convention <nl> + <nl> + This section will be using the terms * callee - saved * and * scratch * to classify registers as part of a register convention . <nl> + <nl> + * A * callee - saved register * must be preserved over the duration of a function call . If a called function ( the * callee * ) wishes to change the value stored in the register , it must restore it before returning . <nl> + * A * scratch * register , also known as caller - saved or callee - clobbered , is not preserved over the duration of a function call . If the register ' s value must be preserved , code surrounding a function call must save and restore the value . <nl> + <nl> + Swift uses roughly the same categorization of registers as the standard calling convention . But , for some platforms , the Swift calling convention adds additional situational uses of some callee - saved registers : the * call context * register and the * error * register . <nl> + <nl> + # # # # Call Context Register <nl> + <nl> + The value held in the * call context * register depends on the kind of function called : <nl> + <nl> + * Instance methods on class types : pointer to self <nl> + * Class methods : pointer to type metadata ( which may be subclass metadata ) <nl> + * Mutating method on value types : pointer to the value ( i . e . value is passed indirectly ) <nl> + * Non - mutating methods on value types : self may fit in one or more registers , else passed indirectly <nl> + * * Thick closures * , i . e . closures requiring a context : the closure context <nl> + <nl> + Having the call context register be callee - saved is advantageous . It keeps the register stable across calls , where the context is very likely to be used and reused in subsequent or nested calls . Additionally , this makes partial application free as well as converting thin closures to thick . <nl> + <nl> + # # # # Error Register <nl> + <nl> + Throwing functions communicate error values to their callers through the * error * register on some platforms . The error register holds a pointer to the error value if an error occurred , otherwise 0 . The caller of a throwing function is expected to quickly check for 0 before continuing on with non - error code , otherwise branching to code to handle or propagate the error . Using a callee - saved register for the error register enables free conversion from non - throwing to throwing functions , which is required to honor the subtyping relationship . <nl> + <nl> + # # # < a name = " function - signature - lowering " > < / a > Function Signature Lowering <nl> + <nl> + Function signature lowering is the mapping of a function ' s source - language type , which includes formal parameters and results , all the way down to a physical convention , which dictates what values are stored in what registers and what values to pass on the stack . <nl> + <nl> + ABI stability requires nailing down and fully specifying this algorithm so that future Swift versions can lower Swift types to the same physical call signature as prior Swift versions . More in - depth descriptions and rationale of function signature lowering can be found in the [ function signature lowering docs ] ( https : / / github . com / apple / swift / blob / master / docs / CallingConvention . rst # function - signature - lowering ) . <nl> + <nl> + Lowering the result value is usually done first , with a certain number of registers designated to hold the result value if it fits , otherwise the result value is passed on the stack . A good heuristic is needed for the limit and is architecture specific ( e . g . 4 registers on modern 64 - bit architectures ) [ [ SR - xxxx ] ( ) ] . <nl> + <nl> + Next comes lowering parameters , which proceeds greedily by trying to fit values into registers from left - to - right , though some parameters may be re - ordered . For example , closures are best placed at the end to take advantage of ABI compatibility between thick closures and thin ones without a context . <nl> + <nl> + Some values must be passed and returned indirectly as they are * address only * . Address only values include [ non - bitwise - copyable ] ( # type - properties ) values , values with [ opaque layout ] ( # opaque - layout ) , and non - class - constrained [ existential values ] ( # existential - containers ) . Even if the runtime type would normally be passed in a register , or even if the type is statically known at the call - site , if the callee receives or returns values with opaque layout , they must be passed or returned indirectly . <nl> + <nl> + We should investigate whether it makes sense to split values with partially opaque layout by passing the non - opaque parts in registers [ [ SR - xxxx ] ( ) ] . <nl> + <nl> + Parameter ownership is not reflected in the physical calling convention , though it will be noted in the mangling of the function name . Default argument expressions will not be ABI , as they will be emitted into the caller . This means that a library can add , modify , or remove default argument expressions without breaking binary compatibility ( though modifying / removing may break source compatibility ) . <nl> + <nl> + # # # # < a name = " lowering - higher - order - functions " > < / a > Lowering Higher - Order Functions <nl> + <nl> + Passing or returning higher - order functions may involve undergoing [ reabstraction ] ( # abstraction - levels ) , which requires that the compiler creates a thunk mapping between the actual calling convention and the expected calling convention . <nl> + <nl> + For example , let ' s say there are two functions : <nl> + <nl> + ` ` ` swift <nl> + func add1 ( _ i : Int ) - > Int { return i + 1 } <nl> + func apply < T , U > ( _ f : ( T ) - > U , _ x : T ) - > U { return f ( x ) } <nl> + ` ` ` <nl> + <nl> + ` apply ` ' s function parameter ` f ` must take and return its values indirectly , as ` T ` and ` U ` have opaque layout . If ` add1 ` is passed to ` apply ` , the compiler will create a thunk for ` apply ` to call that takes a parameter indirectly and calls ` add1 ` by passing it in register . The thunk will then receive the result in register and return it indirectly back to ` apply ` . <nl> + <nl> + # # # Stack Invariants <nl> + <nl> + Calling conventions include invariants about the call stack , such as stack alignment . Unless there is a very compelling reason to deviate , Swift should just honor the stack invariants of the standard calling convention . This is because Swift functions may share their call stack with non - Swift code . For example a Swift function that calls an Objective - C function , which in turn calls another Swift function , would want to maintain the proper stack alignment ( and any other stack invariants ) for all calls . This is far simpler if they both honor the same invariants . <nl> + <nl> + # # # < a name = " runtime - calling - convention " > < / a > Runtime Calling Convention <nl> + <nl> + The Swift runtime uses the standard calling convention , though it may evolve to preserve more invariants . It ' s likely beneficial to introduce one or a number of tweaks to the scratch register sets of some runtime functions . Swift code that makes a call into the runtime assumes some registers are scratch , i . e . clobbered by the runtime function . But , some runtime functions may not need as many scratch registers and can guarantee more registers as callee - saved . Every formerly - scratch register that is denoted callee - saved ( i . e . runtime function saved ) relieves the register pressure of the surrounding Swift code making the runtime call . <nl> + <nl> + Such changes to runtime functions can be rolled out incrementally in the future , and they are backwards compatible so long as no version of that function ever clobbers the now - saved registers . But , such a change is ratcheting , that is every register that is changed to be runtime - saved can no longer go back to being scratch without breaking binary compatibility . If the reduced scratch register set causes the runtime function to spill , then the whole exercise was pointless and actively harmful . Great care should be taken and testing applied for any change to ensure that the runtime function never spills in the future . <nl> + <nl> + <nl> + # # < a name = " runtime " > < / a > Runtime <nl> + <nl> + Swift exposes a runtime that provides APIs for compiled code . Calls into the Swift runtime are produced by the compiler for concerns such as memory management and run - time type information . Additionally , the runtime exposes low - level reflection APIs that are useful to the standard library and some users . <nl> + <nl> + Every existing runtime function will need to be audited for its desirability and behavior [ [ SR - 3735 ] ( https : / / bugs . swift . org / browse / SR - 3735 ) ] . For every function , we need to evaluate whether we want the API as is : <nl> + <nl> + * If yes , then we need to precisely specify the semantics and guarantees of the API . <nl> + * If not , we need to either change , remove , or replace the API , and precisely specify the new semantics . <nl> + <nl> + The runtime is also responsible for lazily creating new type metadata entries at run time , either for generic type instantiations or for resilient constructs . Library evolution in general introduces a whole new category of needs from the runtime by making data and metadata more opaque , requiring interaction to be done through runtime APIs . Additionally , ownership semantics may require new runtime APIs or modifications to existing APIs . These new runtime needs are still under investigation [ [ SR - xxxx ] ( ) ] . <nl> + <nl> + There are many potential future directions to open up the ABI and operate on less - opaque data directly , as well a techniques such as call - site caching . These are ABI - additive , and will be interesting to explore in the future . <nl> + <nl> + For a potentially - out - of - date listing of runtime symbols and some details , see the [ Runtime docs ] ( https : / / github . com / apple / swift / blob / master / docs / Runtime . md ) . <nl> + <nl> + # # < a name = " standard - library " > < / a > Standard Library <nl> + <nl> + Any standard library API shipped post - ABI - stability must be supported into the future to ensure binary compatibility . The standard library will also be utilizing resilience annotations and * inlineable * code . Inlineable code is code that is bundled with the client ' s code , and is available for inlining to the optimizer if it decides to do so . The standard library faces the following ( non - exhaustive ) list of challenges for ensuring binary compatibility : <nl> + <nl> + * Published public functions and types cannot be removed or changed in ways that break binary compatibility . <nl> + * Choosing what code to make inlineable will affect performance and flexibility . <nl> + * Internal functions called by inlineable code become ABI , and are subject to the same binary compatibility concerns as public functions . <nl> + * Non - resilient types cannot change their layout . <nl> + * Protocols cannot add new requirements . <nl> + <nl> + # # # Inlineability <nl> + <nl> + Inlineable code that calls internal functions makes those internal functions ABI , as the client code will be making external calls to them . Thus , many internal interfaces in the standard library will need to be locked down if called from inlineable code . Whether to mark code inlineable will have to carefully weigh performance requirements against keeping flexibility for future changes . <nl> + <nl> + This tradeoff between performance and flexibility also affects the ability to deploy bug fixes and performance improvements to users . Users that have inlined code from the standard library will not be able to get bug fixes and performance improvements in an OS update without performing a recompilation with the new library . For more information on this topic , see [ Inlineable Functions ] ( https : / / github . com / apple / swift / blob / master / docs / LibraryEvolution . rst # inlineable - functions ) . <nl> + <nl> + # # # Upcoming Changes <nl> + <nl> + While the standard library is already ensuring source stability , it will be changing many of its fundamental underlying representations this year . When ABI stability lands , the standard library will be severely limited in the kinds of changes it can make to existing APIs and non - resilient types . Getting the standard library in the right place is of critical importance . <nl> + <nl> + The programming model for String is still being redesigned [ [ SR - xxxx ] ( ) ] , and many types such as Int are undergoing implementation changes [ [ SR - xxxx ] ( ) ] . At the same time , the standard library is simultaneously switching to new compiler features such as conditional conformances to clean up and deliver the best APIs [ [ SR - xxxx ] ( ) ] . <nl> + <nl> + Another goal of Swift is to improve the applicability of Swift to systems programming . Ownership semantics may make a large impact , including things such as improved ` inout ` semantics that allow for efficient and safe array slicing . Providing the right abstractions for efficient use of contiguous memory is still under investigation [ [ SR - xxxx ] ( ) ] . <nl> + <nl> + # # Next Steps <nl> + <nl> + All progress and issue tracking will be done through JIRA on [ bugs . swift . org ] ( https : / / bugs . swift . org ) , using the " AffectsABI " label . We will make an ABI stability dashboard to more easily monitor specifics and progress . The next step is to start making issues for everything that needs fixing and issues for the directions we want to explore . <nl> + <nl> + This document will be a living document until ABI stability is reached , updated with new findings and JIRA issues as they come up . After ABI stability is achieved , this document should be succeeded by technical specifications of Swift ' s ABI . <nl> + <nl> + Issue tracking alone doesn ' t effectively communicate the overall progress and when ABI stability can be expected to land . Some issues take longer than others and there isn ' t a good indication of how long the known tasks will take , nor of how many unknown issues are yet to be filed . For that , a higher level view of the overall status will be provided , possibly on [ swift . org ] ( https : / / swift . org ) . <nl> + <nl> + # # Appendix <nl> + <nl> + # # # < a name = " platform - abis " > < / a > Standard ABIs <nl> + <nl> + [ Apple ARM64 iOS platform ABI ] ( https : / / developer . apple . com / library / content / documentation / Xcode / Conceptual / iPhoneOSABIReference / Articles / ARM64FunctionCallingConventions . html ) is an vendor - specific variant of [ ARM ' s AAPCS64 ] ( http : / / infocenter . arm . com / help / topic / com . arm . doc . ihi0055b / IHI0055B_aapcs64 . pdf ) . <nl> + <nl> + [ Apple ARM32 iOS platform ABI ] ( https : / / developer . apple . com / library / content / documentation / Xcode / Conceptual / iPhoneOSABIReference / Articles / ARMv7FunctionCallingConventions . html ) is similarly a variant of [ ARM ' s AAPCS ] ( http : / / infocenter . arm . com / help / topic / com . arm . doc . ihi0042f / IHI0042F_aapcs . pdf ) . <nl> + <nl> + [ Apple x86 - 64 MacOS platform ABI ] ( https : / / developer . apple . com / library / content / documentation / DeveloperTools / Conceptual / LowLevelABI / 140 - x86 - 64_Function_Calling_Conventions / x86_64 . html ) is based off of the generic [ System V ABI ] ( http : / / people . freebsd . org / ~ obrien / amd64 - elf - abi . pdf ) also used by BSD and Linux . <nl> + <nl> + [ Apple i386 MacOS platform ABI ] ( https : / / developer . apple . com / library / content / documentation / DeveloperTools / Conceptual / LowLevelABI / 130 - IA - 32_Function_Calling_Conventions / IA32 . html # / / apple_ref / doc / uid / TP40002492 - SW4 ) is similarly based off of the generic [ i386 System V ABI ] ( http : / / www . sco . com / developers / devspecs / abi386 - 4 . pdf ) . <nl> | Merge pull request from milseman / abi - stability - manifesto | apple/swift | 3e671d2fe027a536d49300c88c42caafcc8962cc | 2017-01-25T21:14:51Z |
mmm a / lib / AST / ProtocolConformance . cpp <nl> ppp b / lib / AST / ProtocolConformance . cpp <nl> void NormalProtocolConformance : : differenceAndStoreConditionalRequirements ( ) { <nl> <nl> void NormalProtocolConformance : : setSignatureConformances ( <nl> ArrayRef < ProtocolConformanceRef > conformances ) { <nl> + if ( conformances . empty ( ) ) { <nl> + SignatureConformances = { } ; <nl> + return ; <nl> + } <nl> + <nl> auto & ctx = getProtocol ( ) - > getASTContext ( ) ; <nl> SignatureConformances = ctx . AllocateCopy ( conformances ) ; <nl> <nl> NormalProtocolConformance : : populateSignatureConformances ( ) { <nl> other . owning = false ; <nl> } <nl> <nl> - ~ Writer ( ) { <nl> - assert ( ( ! owning | | self - > isInvalid ( ) | | requirementSignature . empty ( ) ) & & <nl> - " signature conformances were not fully populated " ) ; <nl> - } <nl> - <nl> void operator ( ) ( ProtocolConformanceRef conformance ) { <nl> / / Make sure we have the right conformance . <nl> assert ( ! requirementSignature . empty ( ) & & " Too many conformances ? " ) ; <nl> mmm a / lib / Sema / TypeCheckProtocol . cpp <nl> ppp b / lib / Sema / TypeCheckProtocol . cpp <nl> void ConformanceChecker : : recordTypeWitness ( AssociatedTypeDecl * assocType , <nl> <nl> / / Record the type witness . <nl> Conformance - > setTypeWitness ( assocType , type , typeDecl ) ; <nl> + <nl> + / / Record type witnesses for any " overridden " associated types . <nl> + llvm : : SetVector < AssociatedTypeDecl * > overriddenAssocTypes ; <nl> + overriddenAssocTypes . insert ( assocType - > getOverriddenDecls ( ) . begin ( ) , <nl> + assocType - > getOverriddenDecls ( ) . end ( ) ) ; <nl> + for ( unsigned idx = 0 ; idx < overriddenAssocTypes . size ( ) ; + + idx ) { <nl> + auto overridden = overriddenAssocTypes [ idx ] ; <nl> + <nl> + / / Note all of the newly - discovered overridden associated types . <nl> + overriddenAssocTypes . insert ( overridden - > getOverriddenDecls ( ) . begin ( ) , <nl> + overridden - > getOverriddenDecls ( ) . end ( ) ) ; <nl> + <nl> + / / Find the conformance for this overridden protocol . <nl> + auto overriddenConformance = <nl> + DC - > getParentModule ( ) - > lookupConformance ( Adoptee , <nl> + overridden - > getProtocol ( ) ) ; <nl> + if ( ! overriddenConformance | | <nl> + ! overriddenConformance - > isConcrete ( ) ) <nl> + continue ; <nl> + <nl> + auto overriddenRootConformance = <nl> + overriddenConformance - > getConcrete ( ) - > getRootNormalConformance ( ) ; <nl> + ConformanceChecker ( TC , overriddenRootConformance , GlobalMissingWitnesses ) <nl> + . recordTypeWitness ( overridden , type , typeDecl , <nl> + / * performRedeclarationCheck = * / true ) ; <nl> + } <nl> } <nl> <nl> bool swift : : <nl> void ConformanceChecker : : addUsedConformances ( ProtocolConformance * conformance ) { <nl> addUsedConformances ( conformance , visited ) ; <nl> } <nl> <nl> - void ConformanceChecker : : ensureRequirementsAreSatisfied ( ) { <nl> + void ConformanceChecker : : ensureRequirementsAreSatisfied ( <nl> + bool failUnsubstituted ) { <nl> auto proto = Conformance - > getProtocol ( ) ; <nl> / / Some other problem stopped the signature being computed . <nl> if ( ! proto - > isRequirementSignatureComputed ( ) ) { <nl> void ConformanceChecker : : ensureRequirementsAreSatisfied ( ) { <nl> <nl> class GatherConformancesListener : public GenericRequirementsCheckListener { <nl> TypeChecker & tc ; <nl> - DeclContext * dc ; <nl> + NormalProtocolConformance * conformance ; <nl> std : : function < void ( ProtocolConformanceRef ) > & writer ; <nl> public : <nl> GatherConformancesListener ( <nl> - TypeChecker & tc , DeclContext * dc , <nl> + TypeChecker & tc , <nl> + NormalProtocolConformance * conformance , <nl> std : : function < void ( ProtocolConformanceRef ) > & writer ) <nl> - : tc ( tc ) , dc ( dc ) , writer ( writer ) { } <nl> + : tc ( tc ) , conformance ( conformance ) , writer ( writer ) { } <nl> <nl> void satisfiedConformance ( Type depTy , Type replacementTy , <nl> ProtocolConformanceRef conformance ) override { <nl> void ConformanceChecker : : ensureRequirementsAreSatisfied ( ) { <nl> conformance = * tc . conformsToProtocol ( <nl> interfaceType , <nl> conformance . getRequirement ( ) , <nl> - dc , <nl> + this - > conformance - > getDeclContext ( ) , <nl> ( ConformanceCheckFlags : : SuppressDependencyTracking | <nl> ConformanceCheckFlags : : SkipConditionalRequirements ) ) ; <nl> <nl> void ConformanceChecker : : ensureRequirementsAreSatisfied ( ) { <nl> <nl> writer ( conformance ) ; <nl> } <nl> - } listener ( TC , DC , writer ) ; <nl> + <nl> + bool diagnoseUnsatisfiedRequirement ( <nl> + const Requirement & req , Type first , Type second , <nl> + ArrayRef < ParentConditionalConformance > parents ) override { <nl> + / / Invalidate the conformance to suppress further diagnostics . <nl> + if ( conformance - > getLoc ( ) . isValid ( ) ) { <nl> + conformance - > setInvalid ( ) ; <nl> + } <nl> + <nl> + return false ; <nl> + } <nl> + } listener ( TC , Conformance , writer ) ; <nl> <nl> auto result = TC . checkGenericArguments ( <nl> DC , Loc , Loc , <nl> void ConformanceChecker : : ensureRequirementsAreSatisfied ( ) { <nl> nullptr , <nl> ConformanceCheckFlags : : Used , & listener ) ; <nl> <nl> - / / If there were errors , mark the conformance as invalid . <nl> - if ( result ! = RequirementCheckResult : : Success ) { <nl> + switch ( result ) { <nl> + case RequirementCheckResult : : Success : <nl> + return ; <nl> + <nl> + case RequirementCheckResult : : Failure : <nl> Conformance - > setInvalid ( ) ; <nl> + return ; <nl> + <nl> + case RequirementCheckResult : : UnsatisfiedDependency : <nl> + llvm_unreachable ( " Cannot handle unsatisfied dependencies here " ) ; <nl> + <nl> + case RequirementCheckResult : : SubstitutionFailure : <nl> + / / If we ' re not allowed to fail , record this as a partially - checked <nl> + / / conformance . <nl> + if ( ! failUnsubstituted ) { <nl> + TC . PartiallyCheckedConformances . insert ( Conformance ) ; <nl> + return ; <nl> + } <nl> + <nl> + / / Diagnose the failure generically . <nl> + / / FIXME : Would be nice to give some more context here ! <nl> + if ( ! Conformance - > isInvalid ( ) ) { <nl> + TC . diagnose ( Loc , diag : : type_does_not_conform , <nl> + Adoptee , Proto - > getDeclaredType ( ) ) ; <nl> + Conformance - > setInvalid ( ) ; <nl> + } <nl> + return ; <nl> } <nl> } <nl> <nl> void TypeChecker : : checkConformance ( NormalProtocolConformance * conformance ) { <nl> checker . checkAllConformances ( ) ; <nl> } <nl> <nl> + void TypeChecker : : checkConformanceRequirements ( <nl> + NormalProtocolConformance * conformance ) { <nl> + / / If the conformance is already invalid , there ' s nothing to do here . <nl> + if ( conformance - > isInvalid ( ) ) <nl> + return ; <nl> + <nl> + conformance - > setSignatureConformances ( { } ) ; <nl> + <nl> + llvm : : SetVector < ValueDecl * > globalMissingWitnesses ; <nl> + ConformanceChecker checker ( * this , conformance , globalMissingWitnesses ) ; <nl> + checker . ensureRequirementsAreSatisfied ( / * failUnsubstituted = * / true ) ; <nl> + } <nl> + <nl> / / / Determine the score when trying to match two identifiers together . <nl> static unsigned scoreIdentifiers ( Identifier lhs , Identifier rhs , <nl> unsigned limit ) { <nl> mmm a / lib / Sema / TypeCheckProtocol . h <nl> ppp b / lib / Sema / TypeCheckProtocol . h <nl> class ConformanceChecker : public WitnessChecker { <nl> <nl> / / / Check all of the protocols requirements are actually satisfied by a <nl> / / / the chosen type witnesses . <nl> - void ensureRequirementsAreSatisfied ( ) ; <nl> + / / / <nl> + / / / \ param failUnsubstituted Whether to fail when the requirements of the <nl> + / / / protocol could not be substituted ( e . g . , due to missing information ) . <nl> + / / / When true , emits a diagnostic in such cases ; when false , enqueues the <nl> + / / / conformance for later checking . <nl> + void ensureRequirementsAreSatisfied ( bool failUnsubstituted ) ; <nl> <nl> / / / Check the entire protocol conformance , ensuring that all <nl> / / / witnesses are resolved and emitting any diagnostics . <nl> void checkConformance ( MissingWitnessDiagnosisKind Kind ) ; <nl> } ; <nl> + <nl> / / / Captures the state needed to infer associated types . <nl> class AssociatedTypeInference { <nl> / / / The type checker we ' ll need to validate declarations etc . <nl> mmm a / lib / Sema / TypeCheckProtocolInference . cpp <nl> ppp b / lib / Sema / TypeCheckProtocolInference . cpp <nl> void ConformanceChecker : : resolveTypeWitnesses ( ) { <nl> SWIFT_DEFER { <nl> / / Resolution attempts to have the witnesses be correct by construction , but <nl> / / this isn ' t guaranteed , so let ' s double check . <nl> - ensureRequirementsAreSatisfied ( ) ; <nl> + ensureRequirementsAreSatisfied ( / * failUnsubstituted = * / false ) ; <nl> } ; <nl> <nl> / / Attempt to infer associated type witnesses . <nl> void ConformanceChecker : : resolveTypeWitnesses ( ) { <nl> for ( const auto & inferredWitness : * inferred ) { <nl> recordTypeWitness ( inferredWitness . first , inferredWitness . second , <nl> / * typeDecl = * / nullptr , <nl> - / * performRedeclarationCheck = * / true ) ; <nl> + / * performRedeclarationCheck = * / true ) ; <nl> } <nl> <nl> - ensureRequirementsAreSatisfied ( ) ; <nl> + ensureRequirementsAreSatisfied ( / * failUnsubstituted = * / false ) ; <nl> return ; <nl> } <nl> <nl> void ConformanceChecker : : resolveTypeWitnesses ( ) { <nl> <nl> recordTypeWitness ( assocType , ErrorType : : get ( TC . Context ) , nullptr , true ) ; <nl> } <nl> - <nl> - return ; <nl> - <nl> - / / Multiple solutions . Diagnose the ambiguity . <nl> - <nl> } <nl> <nl> void ConformanceChecker : : resolveSingleTypeWitness ( <nl> mmm a / lib / Sema / TypeChecker . cpp <nl> ppp b / lib / Sema / TypeChecker . cpp <nl> static void typeCheckFunctionsAndExternalDecls ( TypeChecker & TC ) { <nl> TC . finalizeDecl ( decl ) ; <nl> } <nl> <nl> + / / Ensure that the requirements of the given conformance are <nl> + / / fully checked . <nl> + for ( unsigned i = 0 ; i ! = TC . PartiallyCheckedConformances . size ( ) ; + + i ) { <nl> + auto conformance = TC . PartiallyCheckedConformances [ i ] ; <nl> + TC . checkConformanceRequirements ( conformance ) ; <nl> + } <nl> + TC . PartiallyCheckedConformances . clear ( ) ; <nl> + <nl> / / Complete any conformances that we used . <nl> for ( unsigned i = 0 ; i ! = TC . UsedConformances . size ( ) ; + + i ) { <nl> auto conformance = TC . UsedConformances [ i ] ; <nl> static void typeCheckFunctionsAndExternalDecls ( TypeChecker & TC ) { <nl> currentExternalDef < TC . Context . ExternalDefinitions . size ( ) | | <nl> ! TC . DeclsToFinalize . empty ( ) | | <nl> ! TC . DelayedRequirementSignatures . empty ( ) | | <nl> - ! TC . UsedConformances . empty ( ) ) ; <nl> + ! TC . UsedConformances . empty ( ) | | <nl> + ! TC . PartiallyCheckedConformances . empty ( ) ) ; <nl> <nl> / / FIXME : Horrible hack . Store this somewhere more appropriate . <nl> TC . Context . LastCheckedExternalDefinition = currentExternalDef ; <nl> mmm a / lib / Sema / TypeChecker . h <nl> ppp b / lib / Sema / TypeChecker . h <nl> class TypeChecker final : public LazyResolver { <nl> / / / completed before type checking is considered complete . <nl> llvm : : SetVector < NormalProtocolConformance * > UsedConformances ; <nl> <nl> + / / / The list of protocol conformances whose requirements could not be <nl> + / / / fully checked and , therefore , should be checked again at the top <nl> + / / / level . <nl> + llvm : : SetVector < NormalProtocolConformance * > PartiallyCheckedConformances ; <nl> + <nl> / / / The list of declarations that we ' ve done at least partial validation <nl> / / / of during type - checking , but which will need to be finalized before <nl> / / / we can hand them off to SILGen etc . <nl> class TypeChecker final : public LazyResolver { <nl> / / / Completely check the given conformance . <nl> void checkConformance ( NormalProtocolConformance * conformance ) ; <nl> <nl> + / / / Check the requirement signature of the given conformance . <nl> + void checkConformanceRequirements ( NormalProtocolConformance * conformance ) ; <nl> + <nl> / / / Check all of the conformances in the given context . <nl> void checkConformancesInContext ( DeclContext * dc , <nl> IterableDeclContext * idc ) ; <nl> mmm a / test / Generics / deduction . swift <nl> ppp b / test / Generics / deduction . swift <nl> func foo ( ) { <nl> <nl> infix operator + & <nl> func + & < R , S > ( lhs : inout R , rhs : S ) where R : RangeReplaceableCollection , S : Sequence , R . Element = = S . Element { } <nl> - / / expected - note @ - 1 { { candidate requires that the types ' String ' and ' String . Element ' ( aka ' Character ' ) be equivalent ( requirement specified as ' R . Element ' = = ' S . Element ' [ with R = [ String ] , S = String ] ) } } <nl> + / / expected - note @ - 1 { { candidate requires that the types ' String ' and ' Character ' be equivalent ( requirement specified as ' R . Element ' = = ' S . Element ' [ with R = [ String ] , S = String ] ) } } <nl> <nl> func rdar33477726_1 ( ) { <nl> var arr : [ String ] = [ ] <nl> func rdar33477726_1 ( ) { <nl> } <nl> <nl> func rdar33477726_2 < R , S > ( _ : R , _ : S ) where R : Sequence , S = = R . Element { } <nl> - / / expected - note @ - 1 { { candidate requires that the types ' Int ' and ' String . Element ' ( aka ' Character ' ) be equivalent ( requirement specified as ' S ' = = ' R . Element ' [ with R = String , S = Int ] ) } } <nl> + / / expected - note @ - 1 { { candidate requires that the types ' Int ' and ' Character ' be equivalent ( requirement specified as ' S ' = = ' R . Element ' [ with R = String , S = Int ] ) } } <nl> rdar33477726_2 ( " answer " , 42 ) <nl> / / expected - error @ - 1 { { cannot invoke ' rdar33477726_2 ( _ : _ : ) ' with an argument list of type ' ( String , Int ) ' } } <nl> <nl> prefix operator + - <nl> prefix func + - < T > ( _ : T ) where T : Sequence , T . Element = = Int { } <nl> - / / expected - note @ - 1 { { candidate requires that the types ' String . Element ' ( aka ' Character ' ) and ' Int ' be equivalent ( requirement specified as ' T . Element ' = = ' Int ' [ with T = String ] ) } } <nl> + / / expected - note @ - 1 { { candidate requires that the types ' Character ' and ' Int ' be equivalent ( requirement specified as ' T . Element ' = = ' Int ' [ with T = String ] ) } } <nl> <nl> + - " hello " <nl> / / expected - error @ - 1 { { unary operator ' + - ( _ : ) ' cannot be applied to an operand of type ' String ' } } <nl> new file mode 100644 <nl> index 000000000000 . . f3886fc76972 <nl> mmm / dev / null <nl> ppp b / validation - test / compiler_crashers_2_fixed / 0135 - rdar26140749 . swift <nl> <nl> + / / RUN : not % target - swift - frontend % s - typecheck <nl> + <nl> + protocol ProtocolWithCount : Collection { <nl> + var count : UInt64 { get } <nl> + } <nl> + <nl> + class ClassWithoutCount : ProtocolWithCount { <nl> + / / var count : UInt64 = 0 <nl> + var startIndex : UInt64 { get { return 0 } } <nl> + var endIndex : UInt64 { get { return 0 } } <nl> + subscript ( i : UInt64 ) - > Int64 { get { return 0 } } <nl> + } <nl> + <nl> new file mode 100644 <nl> index 000000000000 . . 5df35be46903 <nl> mmm / dev / null <nl> ppp b / validation - test / compiler_crashers_2_fixed / 0136 - rdar35082483 . swift <nl> <nl> + / / RUN : not % target - swift - frontend % s - typecheck <nl> + <nl> + struct S : Sequence { <nl> + struct Iterator : IteratorProtocol { <nl> + mutating func next ( ) - > Int ? { <nl> + fatalError ( ) <nl> + } <nl> + } <nl> + <nl> + func makeIterator ( ) - > Iterator { <nl> + fatalError ( ) <nl> + } <nl> + } <nl> + <nl> + extension S : Collection { <nl> + typealias Index = Int <nl> + <nl> + var startIndex : Int { return 0 } <nl> + var endIndex : Int { return 1 } <nl> + } <nl> mmm a / validation - test / stdlib / CollectionDiagnostics . swift <nl> ppp b / validation - test / stdlib / CollectionDiagnostics . swift <nl> import StdlibCollectionUnittest <nl> / / Check that Collection . SubSequence is constrained to Collection . <nl> / / <nl> <nl> - / / expected - error @ + 2 { { type ' CollectionWithBadSubSequence ' does not conform to protocol ' Collection ' } } <nl> - / / expected - error @ + 1 { { type ' CollectionWithBadSubSequence ' does not conform to protocol ' Sequence ' } } <nl> + / / expected - error @ + 1 { { type ' CollectionWithBadSubSequence ' does not conform to protocol ' Collection ' } } <nl> struct CollectionWithBadSubSequence : Collection { <nl> var startIndex : MinimalIndex { <nl> fatalError ( " unreachable " ) <nl> struct CollectionWithBadSubSequence : Collection { <nl> fatalError ( " unreachable " ) <nl> } <nl> <nl> - / / expected - note @ + 2 { { possibly intended match } } <nl> / / expected - note @ + 1 { { possibly intended match } } <nl> typealias SubSequence = OpaqueValue < Int8 > <nl> } <nl> | Merge pull request from DougGregor / conformance - checking - unsatisfied - reqs | apple/swift | 4a7386e0bbc8939eb5b3fbaa7cb8f145941bb402 | 2017-12-19T05:52:57Z |
mmm a / include / swift / AST / AutoDiff . h <nl> ppp b / include / swift / AST / AutoDiff . h <nl> bool getBuiltinDifferentiableOrLinearFunctionConfig ( <nl> bool getBuiltinDifferentiableOrLinearFunctionConfig ( <nl> StringRef operationName , unsigned & arity , bool & throws ) ; <nl> <nl> + / / / Returns the SIL differentiability witness generic signature given the <nl> + / / / original declaration ' s generic signature and the derivative generic <nl> + / / / signature . <nl> + / / / <nl> + / / / In general , the differentiability witness generic signature is equal to the <nl> + / / / derivative generic signature . <nl> + / / / <nl> + / / / Edge case , if two conditions are satisfied : <nl> + / / / 1 . The derivative generic signature is equal to the original generic <nl> + / / / signature . <nl> + / / / 2 . The derivative generic signature has * all concrete * generic parameters <nl> + / / / ( i . e . all generic parameters are bound to concrete types via same - type <nl> + / / / requirements ) . <nl> + / / / <nl> + / / / Then the differentiability witness generic signature is ` nullptr ` . <nl> + / / / <nl> + / / / Both the original and derivative declarations are lowered to SIL functions <nl> + / / / with a fully concrete type and no generic signature , so the <nl> + / / / differentiability witness should similarly have no generic signature . <nl> + GenericSignature <nl> + getDifferentiabilityWitnessGenericSignature ( GenericSignature origGenSig , <nl> + GenericSignature derivativeGenSig ) ; <nl> + <nl> } / / end namespace autodiff <nl> <nl> } / / end namespace swift <nl> mmm a / lib / AST / AutoDiff . cpp <nl> ppp b / lib / AST / AutoDiff . cpp <nl> bool autodiff : : getBuiltinDifferentiableOrLinearFunctionConfig ( <nl> return operationName . empty ( ) ; <nl> } <nl> <nl> + GenericSignature autodiff : : getDifferentiabilityWitnessGenericSignature ( <nl> + GenericSignature origGenSig , GenericSignature derivativeGenSig ) { <nl> + / / If there is no derivative generic signature , return the original generic <nl> + / / signature . <nl> + if ( ! derivativeGenSig ) <nl> + return origGenSig ; <nl> + / / If derivative generic signature has all concrete generic parameters and is <nl> + / / equal to the original generic signature , return ` nullptr ` . <nl> + auto derivativeCanGenSig = derivativeGenSig . getCanonicalSignature ( ) ; <nl> + auto origCanGenSig = origGenSig . getCanonicalSignature ( ) ; <nl> + if ( origCanGenSig = = derivativeCanGenSig & & <nl> + derivativeCanGenSig - > areAllParamsConcrete ( ) ) <nl> + return GenericSignature ( ) ; <nl> + / / Otherwise , return the derivative generic signature . <nl> + return derivativeGenSig ; <nl> + } <nl> + <nl> Type TangentSpace : : getType ( ) const { <nl> switch ( kind ) { <nl> case Kind : : TangentVector : <nl> mmm a / lib / SILGen / SILGen . cpp <nl> ppp b / lib / SILGen / SILGen . cpp <nl> void SILGenModule : : postEmitFunction ( SILDeclRef constant , <nl> emitDifferentiabilityWitnessesForFunction ( constant , F ) ; <nl> } <nl> <nl> - / / / Returns the SIL differentiability witness generic signature given the <nl> - / / / original declaration ' s generic signature and the derivative generic <nl> - / / / signature . <nl> - / / / <nl> - / / / In general , the differentiability witness generic signature is equal to the <nl> - / / / derivative generic signature . <nl> - / / / <nl> - / / / Edge case , if two conditions are satisfied : <nl> - / / / 1 . The derivative generic signature is equal to the original generic <nl> - / / / signature . <nl> - / / / 2 . The derivative generic signature has * all concrete * generic parameters <nl> - / / / ( i . e . all generic parameters are bound to concrete types via same - type <nl> - / / / requirements ) . <nl> - / / / <nl> - / / / Then the differentiability witness generic signature is ` nullptr ` . <nl> - / / / <nl> - / / / Both the original and derivative declarations are lowered to SIL functions <nl> - / / / with a fully concrete type and no generic signature , so the <nl> - / / / differentiability witness should similarly have no generic signature . <nl> - static GenericSignature <nl> - getDifferentiabilityWitnessGenericSignature ( GenericSignature origGenSig , <nl> - GenericSignature derivativeGenSig ) { <nl> - / / If there is no derivative generic signature , return the original generic <nl> - / / signature . <nl> - if ( ! derivativeGenSig ) <nl> - return origGenSig ; <nl> - / / If derivative generic signature has all concrete generic parameters and is <nl> - / / equal to the original generic signature , return ` nullptr ` . <nl> - auto derivativeCanGenSig = derivativeGenSig . getCanonicalSignature ( ) ; <nl> - auto origCanGenSig = origGenSig . getCanonicalSignature ( ) ; <nl> - if ( origCanGenSig = = derivativeCanGenSig & & <nl> - derivativeCanGenSig - > areAllParamsConcrete ( ) ) <nl> - return GenericSignature ( ) ; <nl> - / / Otherwise , return the derivative generic signature . <nl> - return derivativeGenSig ; <nl> - } <nl> - <nl> void SILGenModule : : emitDifferentiabilityWitnessesForFunction ( <nl> SILDeclRef constant , SILFunction * F ) { <nl> / / Visit ` @ derivative ` attributes and generate SIL differentiability <nl> void SILGenModule : : emitDifferentiabilityWitnessesForFunction ( <nl> diffAttr - > getDerivativeGenericSignature ( ) ) & & <nl> " Type - checking should resolve derivative generic signatures for " <nl> " all original SIL functions with generic signatures " ) ; <nl> - auto witnessGenSig = getDifferentiabilityWitnessGenericSignature ( <nl> - AFD - > getGenericSignature ( ) , <nl> - diffAttr - > getDerivativeGenericSignature ( ) ) ; <nl> + auto witnessGenSig = <nl> + autodiff : : getDifferentiabilityWitnessGenericSignature ( <nl> + AFD - > getGenericSignature ( ) , <nl> + diffAttr - > getDerivativeGenericSignature ( ) ) ; <nl> AutoDiffConfig config ( diffAttr - > getParameterIndices ( ) , resultIndices , <nl> witnessGenSig ) ; <nl> emitDifferentiabilityWitness ( AFD , F , config , / * jvp * / nullptr , <nl> void SILGenModule : : emitDifferentiabilityWitnessesForFunction ( <nl> auto origDeclRef = <nl> SILDeclRef ( origAFD ) . asForeign ( requiresForeignEntryPoint ( origAFD ) ) ; <nl> auto * origFn = getFunction ( origDeclRef , NotForDefinition ) ; <nl> - auto witnessGenSig = getDifferentiabilityWitnessGenericSignature ( <nl> - origAFD - > getGenericSignature ( ) , AFD - > getGenericSignature ( ) ) ; <nl> + auto witnessGenSig = <nl> + autodiff : : getDifferentiabilityWitnessGenericSignature ( <nl> + origAFD - > getGenericSignature ( ) , AFD - > getGenericSignature ( ) ) ; <nl> auto * resultIndices = IndexSubset : : get ( getASTContext ( ) , 1 , { 0 } ) ; <nl> AutoDiffConfig config ( derivAttr - > getParameterIndices ( ) , resultIndices , <nl> witnessGenSig ) ; <nl> mmm a / lib / SILOptimizer / Differentiation / Common . cpp <nl> ppp b / lib / SILOptimizer / Differentiation / Common . cpp <nl> findMinimalDerivativeConfiguration ( AbstractFunctionDecl * original , <nl> silParameterIndices - > getNumIndices ( ) < <nl> minimalConfig - > parameterIndices - > getNumIndices ( ) ) ) { <nl> minimalASTParameterIndices = config . parameterIndices ; <nl> - minimalConfig = AutoDiffConfig ( silParameterIndices , config . resultIndices , <nl> - config . derivativeGenericSignature ) ; <nl> + minimalConfig = <nl> + AutoDiffConfig ( silParameterIndices , config . resultIndices , <nl> + autodiff : : getDifferentiabilityWitnessGenericSignature ( <nl> + original - > getGenericSignature ( ) , <nl> + config . derivativeGenericSignature ) ) ; <nl> } <nl> } <nl> return minimalConfig ; <nl> mmm a / lib / TBDGen / TBDGen . cpp <nl> ppp b / lib / TBDGen / TBDGen . cpp <nl> void TBDGenVisitor : : addAutoDiffLinearMapFunction ( AbstractFunctionDecl * original , <nl> config . parameterIndices , <nl> original - > getInterfaceType ( ) - > castTo < AnyFunctionType > ( ) ) ; <nl> Mangle : : ASTMangler mangler ; <nl> - AutoDiffConfig silConfig { loweredParamIndices , config . resultIndices , <nl> - config . derivativeGenericSignature } ; <nl> + AutoDiffConfig silConfig { <nl> + loweredParamIndices , config . resultIndices , <nl> + autodiff : : getDifferentiabilityWitnessGenericSignature ( <nl> + original - > getGenericSignature ( ) , config . derivativeGenericSignature ) } ; <nl> std : : string linearMapName = <nl> mangler . mangleAutoDiffLinearMapHelper ( declRef . mangle ( ) , kind , silConfig ) ; <nl> addSymbol ( linearMapName ) ; <nl> void TBDGenVisitor : : addAutoDiffDerivativeFunction ( <nl> GenericSignature derivativeGenericSignature , <nl> AutoDiffDerivativeFunctionKind kind ) { <nl> auto * assocFnId = AutoDiffDerivativeFunctionIdentifier : : get ( <nl> - kind , parameterIndices , derivativeGenericSignature , <nl> + kind , parameterIndices , <nl> + autodiff : : getDifferentiabilityWitnessGenericSignature ( <nl> + original - > getGenericSignature ( ) , derivativeGenericSignature ) , <nl> original - > getASTContext ( ) ) ; <nl> auto declRef = <nl> SILDeclRef ( original ) . asForeign ( requiresForeignEntryPoint ( original ) ) ; <nl> void TBDGenVisitor : : addDifferentiabilityWitness ( <nl> original - > getInterfaceType ( ) - > castTo < AnyFunctionType > ( ) ) ; <nl> <nl> auto originalMangledName = declRef . mangle ( ) ; <nl> - AutoDiffConfig config { silParamIndices , resultIndices , <nl> - derivativeGenericSignature } ; <nl> + AutoDiffConfig config { <nl> + silParamIndices , resultIndices , <nl> + autodiff : : getDifferentiabilityWitnessGenericSignature ( <nl> + original - > getGenericSignature ( ) , derivativeGenericSignature ) } ; <nl> SILDifferentiabilityWitnessKey key ( originalMangledName , config ) ; <nl> <nl> Mangle : : ASTMangler mangler ; <nl> mmm a / test / AutoDiff / SILOptimizer / Inputs / differentiation_diagnostics_other_file . swift <nl> ppp b / test / AutoDiff / SILOptimizer / Inputs / differentiation_diagnostics_other_file . swift <nl> class Class : Differentiable { <nl> set { } <nl> } <nl> } <nl> + <nl> + struct S : Differentiable { <nl> + var value : Float <nl> + } <nl> + <nl> + extension Array where Element = = S { <nl> + @ differentiable <nl> + func sum ( ) - > Float { <nl> + return 0 <nl> + } <nl> + } <nl> mmm a / test / AutoDiff / SILOptimizer / differentiation_diagnostics_cross_file . swift <nl> ppp b / test / AutoDiff / SILOptimizer / differentiation_diagnostics_cross_file . swift <nl> func classRequirementSetters ( _ x : inout Class , _ newValue : Float ) { <nl> x . property = newValue <nl> x [ ] = newValue <nl> } <nl> + <nl> + / / Test cross - file lookup of a derivative function with all - concrete derivative generic signature . <nl> + @ differentiable <nl> + func allConcreteDerivativeGenericSignature ( _ a : [ S ] ) - > Float { <nl> + / / No error expected . <nl> + return a . sum ( ) <nl> + } <nl> mmm a / test / AutoDiff / TBD / derivative_symbols . swift <nl> ppp b / test / AutoDiff / TBD / derivative_symbols . swift <nl> public func topLevelDerivative < T : Differentiable > ( _ x : T ) - > ( <nl> fatalError ( ) <nl> } <nl> <nl> - struct Struct : Differentiable { <nl> + public struct Struct : Differentiable { <nl> var stored : Float <nl> <nl> / / Test property . <nl> struct Struct : Differentiable { <nl> fatalError ( ) <nl> } <nl> } <nl> + <nl> + extension Array where Element = = Struct { <nl> + @ differentiable <nl> + public func sum ( ) - > Float { <nl> + return 0 <nl> + } <nl> + } <nl> | [ AutoDiff ] remove all - concrete gen sig from more places | apple/swift | 7191c9c21d11c9a1837dc29d77a13d456de7d251 | 2020-07-16T00:28:14Z |
mmm a / tensorflow / core / BUILD <nl> ppp b / tensorflow / core / BUILD <nl> cc_library ( <nl> ] , <nl> ) <nl> <nl> - alias ( <nl> - name = " png_internal " , <nl> - actual = " / / tensorflow / core / lib / png : png_io " , <nl> - ) <nl> - <nl> - alias ( <nl> - name = " portable_png_internal " , <nl> - actual = " / / tensorflow / core / lib / png : png_io " , <nl> - ) <nl> - <nl> - alias ( <nl> - name = " android_png_internal " , <nl> - actual = " / / tensorflow / core / lib / png : png_io " , <nl> - ) <nl> - <nl> cc_library ( <nl> name = " tflite_portable_logging " , <nl> hdrs = [ <nl> mmm a / tensorflow / core / kernels / BUILD <nl> ppp b / tensorflow / core / kernels / BUILD <nl> tf_kernel_library ( <nl> tf_kernel_library ( <nl> name = " summary_image_op " , <nl> prefix = " summary_image_op " , <nl> - deps = LOGGING_DEPS + [ " / / tensorflow / core : png_internal " ] , <nl> + deps = LOGGING_DEPS + [ " / / tensorflow / core / lib / png : png_io " ] , <nl> ) <nl> <nl> # TODO ( b / 162630222 ) : remove this target <nl> cc_library ( <nl> deps = [ <nl> " / / tensorflow / core : portable_gif_internal " , <nl> " / / tensorflow / core : portable_jpeg_internal " , <nl> - " / / tensorflow / core : portable_png_internal " , <nl> " / / tensorflow / core : portable_tensorflow_lib_lite " , <nl> " / / tensorflow / core : protos_all_cc_impl " , <nl> + " / / tensorflow / core / lib / png : png_io " , <nl> " / / tensorflow / core / platform : strong_hash " , <nl> " / / third_party / eigen3 " , <nl> " / / third_party / fft2d : fft2d_headers " , <nl> mmm a / tensorflow / core / kernels / image / BUILD <nl> ppp b / tensorflow / core / kernels / image / BUILD <nl> IMAGE_DEPS = [ <nl> " / / tensorflow / core : jpeg_internal " , <nl> " / / tensorflow / core : lib " , <nl> " / / tensorflow / core : lib_internal " , <nl> - " / / tensorflow / core : png_internal " , <nl> + " / / tensorflow / core / lib / png : png_io " , <nl> " / / tensorflow / core : protos_all_cc " , <nl> " / / tensorflow / core / kernels : bounds_check " , <nl> " / / tensorflow / core / kernels : eigen_helpers " , <nl> cc_library ( <nl> deps = [ <nl> " / / tensorflow / core : android_gif_internal " , <nl> " / / tensorflow / core : android_jpeg_internal " , <nl> - " / / tensorflow / core : android_png_internal " , <nl> " / / tensorflow / core : portable_tensorflow_lib_lite " , <nl> + " / / tensorflow / core / lib / png : png_io " , <nl> ] , <nl> alwayslink = 1 , <nl> ) <nl> mmm a / tensorflow / core / lib / png / BUILD <nl> ppp b / tensorflow / core / lib / png / BUILD <nl> load ( <nl> <nl> package ( <nl> default_visibility = [ <nl> - # tensorflow / core : lib effectively exposes all targets under tensorflow / core / lib / * * <nl> - " / / tensorflow / core : __pkg__ " , <nl> + " / / tensorflow : __subpackages__ " , <nl> ] , <nl> licenses = [ " notice " ] , # Apache 2 . 0 <nl> ) <nl> mmm a / tensorflow / core / summary / BUILD <nl> ppp b / tensorflow / core / summary / BUILD <nl> cc_library ( <nl> " / / tensorflow / core : framework " , <nl> " / / tensorflow / core : lib " , <nl> " / / tensorflow / core : lib_internal " , <nl> - " / / tensorflow / core : png_internal " , <nl> " / / tensorflow / core : protos_all_cc " , <nl> + " / / tensorflow / core / lib / png : png_io " , <nl> ] , <nl> ) <nl> <nl> mmm a / tensorflow / lite / delegates / flex / BUILD <nl> ppp b / tensorflow / lite / delegates / flex / BUILD <nl> cc_library ( <nl> deps = [ <nl> " / / tensorflow / core : portable_gif_internal " , <nl> " / / tensorflow / core : portable_jpeg_internal " , <nl> - " / / tensorflow / core : portable_png_internal " , <nl> + " / / tensorflow / core / lib / png : png_io " , <nl> ] , <nl> ) <nl> | [ CLEANUP ] Remove unnecessary aliases to core / lib / png : png_io | tensorflow/tensorflow | 92a328a4735183d4b203e1bf2abeb73dffb601b1 | 2020-09-30T01:07:04Z |
mmm a / build - aux / compile <nl> ppp b / build - aux / compile <nl> <nl> # ! / bin / sh <nl> # Wrapper for compilers which do not understand ' - c - o ' . <nl> <nl> - scriptversion = 2012 - 10 - 14 . 11 ; # UTC <nl> + scriptversion = 2018 - 03 - 07 . 03 ; # UTC <nl> <nl> - # Copyright ( C ) 1999 - 2014 Free Software Foundation , Inc . <nl> + # Copyright ( C ) 1999 - 2018 Free Software Foundation , Inc . <nl> # Written by Tom Tromey < tromey @ cygnus . com > . <nl> # <nl> # This program is free software ; you can redistribute it and / or modify <nl> scriptversion = 2012 - 10 - 14 . 11 ; # UTC <nl> # GNU General Public License for more details . <nl> # <nl> # You should have received a copy of the GNU General Public License <nl> - # along with this program . If not , see < http : / / www . gnu . org / licenses / > . <nl> + # along with this program . If not , see < https : / / www . gnu . org / licenses / > . <nl> <nl> # As a special exception to the GNU General Public License , if you <nl> # distribute this file as part of a program that contains a <nl> EOF <nl> echo " compile $ scriptversion " <nl> exit $ ? <nl> ; ; <nl> - cl | * [ / \ \ ] cl | cl . exe | * [ / \ \ ] cl . exe ) <nl> + cl | * [ / \ \ ] cl | cl . exe | * [ / \ \ ] cl . exe | \ <nl> + icl | * [ / \ \ ] icl | icl . exe | * [ / \ \ ] icl . exe ) <nl> func_cl_wrapper " $ @ " # Doesn ' t return . . . <nl> ; ; <nl> esac <nl> exit $ ret <nl> # Local Variables : <nl> # mode : shell - script <nl> # sh - indentation : 2 <nl> - # eval : ( add - hook ' write - file - hooks ' time - stamp ) <nl> + # eval : ( add - hook ' before - save - hook ' time - stamp ) <nl> # time - stamp - start : " scriptversion = " <nl> # time - stamp - format : " % : y - % 02m - % 02d . % 02H " <nl> - # time - stamp - time - zone : " UTC " <nl> + # time - stamp - time - zone : " UTC0 " <nl> # time - stamp - end : " ; # UTC " <nl> # End : <nl> mmm a / build - aux / config . guess <nl> ppp b / build - aux / config . guess <nl> <nl> # ! / bin / sh <nl> # Attempt to guess a canonical system name . <nl> - # Copyright 1992 - 2014 Free Software Foundation , Inc . <nl> + # Copyright 1992 - 2019 Free Software Foundation , Inc . <nl> <nl> - timestamp = ' 2014 - 11 - 04 ' <nl> + timestamp = ' 2019 - 09 - 10 ' <nl> <nl> # This file is free software ; you can redistribute it and / or modify it <nl> # under the terms of the GNU General Public License as published by <nl> timestamp = ' 2014 - 11 - 04 ' <nl> # General Public License for more details . <nl> # <nl> # You should have received a copy of the GNU General Public License <nl> - # along with this program ; if not , see < http : / / www . gnu . org / licenses / > . <nl> + # along with this program ; if not , see < https : / / www . gnu . org / licenses / > . <nl> # <nl> # As a special exception to the GNU General Public License , if you <nl> # distribute this file as part of a program that contains a <nl> timestamp = ' 2014 - 11 - 04 ' <nl> # Originally written by Per Bothner ; maintained since 2000 by Ben Elliston . <nl> # <nl> # You can get the latest version of this script from : <nl> - # http : / / git . savannah . gnu . org / gitweb / ? p = config . git ; a = blob_plain ; f = config . guess ; hb = HEAD <nl> + # https : / / git . savannah . gnu . org / gitweb / ? p = config . git ; a = blob_plain ; f = config . guess <nl> # <nl> # Please send patches to < config - patches @ gnu . org > . <nl> <nl> Usage : $ 0 [ OPTION ] <nl> <nl> Output the configuration name of the system \ ` $ me ' is run on . <nl> <nl> - Operation modes : <nl> + Options : <nl> - h , - - help print this help , then exit <nl> - t , - - time - stamp print date of last modification , then exit <nl> - v , - - version print version number , then exit <nl> version = " \ <nl> GNU config . guess ( $ timestamp ) <nl> <nl> Originally written by Per Bothner . <nl> - Copyright 1992 - 2014 Free Software Foundation , Inc . <nl> + Copyright 1992 - 2019 Free Software Foundation , Inc . <nl> <nl> This is free software ; see the source for copying conditions . There is NO <nl> warranty ; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . " <nl> if test $ # ! = 0 ; then <nl> exit 1 <nl> fi <nl> <nl> - trap ' exit 1 ' 1 2 15 <nl> - <nl> # CC_FOR_BUILD - - compiler used by this script . Note that the use of a <nl> # compiler to aid in system detection is discouraged as it requires <nl> # temporary files to be created and , as you can see below , it is a <nl> trap ' exit 1 ' 1 2 15 <nl> <nl> # Portable tmp directory creation inspired by the Autoconf team . <nl> <nl> - set_cc_for_build = ' <nl> - trap " exitcode = \ $ ? ; ( rm - f \ $ tmpfiles 2 > / dev / null ; rmdir \ $ tmp 2 > / dev / null ) & & exit \ $ exitcode " 0 ; <nl> - trap " rm - f \ $ tmpfiles 2 > / dev / null ; rmdir \ $ tmp 2 > / dev / null ; exit 1 " 1 2 13 15 ; <nl> - : $ { TMPDIR = / tmp } ; <nl> - { tmp = ` ( umask 077 & & mktemp - d " $ TMPDIR / cgXXXXXX " ) 2 > / dev / null ` & & test - n " $ tmp " & & test - d " $ tmp " ; } | | <nl> - { test - n " $ RANDOM " & & tmp = $ TMPDIR / cg $ $ - $ RANDOM & & ( umask 077 & & mkdir $ tmp ) ; } | | <nl> - { tmp = $ TMPDIR / cg - $ $ & & ( umask 077 & & mkdir $ tmp ) & & echo " Warning : creating insecure temp directory " > & 2 ; } | | <nl> - { echo " $ me : cannot create a temporary directory in $ TMPDIR " > & 2 ; exit 1 ; } ; <nl> - dummy = $ tmp / dummy ; <nl> - tmpfiles = " $ dummy . c $ dummy . o $ dummy . rel $ dummy " ; <nl> - case $ CC_FOR_BUILD , $ HOST_CC , $ CC in <nl> - , , ) echo " int x ; " > $ dummy . c ; <nl> - for c in cc gcc c89 c99 ; do <nl> - if ( $ c - c - o $ dummy . o $ dummy . c ) > / dev / null 2 > & 1 ; then <nl> - CC_FOR_BUILD = " $ c " ; break ; <nl> - fi ; <nl> - done ; <nl> - if test x " $ CC_FOR_BUILD " = x ; then <nl> - CC_FOR_BUILD = no_compiler_found ; <nl> - fi <nl> - ; ; <nl> - , , * ) CC_FOR_BUILD = $ CC ; ; <nl> - , * , * ) CC_FOR_BUILD = $ HOST_CC ; ; <nl> - esac ; set_cc_for_build = ; ' <nl> + tmp = <nl> + # shellcheck disable = SC2172 <nl> + trap ' test - z " $ tmp " | | rm - fr " $ tmp " ' 0 1 2 13 15 <nl> + <nl> + set_cc_for_build ( ) { <nl> + : " $ { TMPDIR = / tmp } " <nl> + # shellcheck disable = SC2039 <nl> + { tmp = ` ( umask 077 & & mktemp - d " $ TMPDIR / cgXXXXXX " ) 2 > / dev / null ` & & test - n " $ tmp " & & test - d " $ tmp " ; } | | <nl> + { test - n " $ RANDOM " & & tmp = $ TMPDIR / cg $ $ - $ RANDOM & & ( umask 077 & & mkdir " $ tmp " 2 > / dev / null ) ; } | | <nl> + { tmp = $ TMPDIR / cg - $ $ & & ( umask 077 & & mkdir " $ tmp " 2 > / dev / null ) & & echo " Warning : creating insecure temp directory " > & 2 ; } | | <nl> + { echo " $ me : cannot create a temporary directory in $ TMPDIR " > & 2 ; exit 1 ; } <nl> + dummy = $ tmp / dummy <nl> + case $ { CC_FOR_BUILD - } , $ { HOST_CC - } , $ { CC - } in <nl> + , , ) echo " int x ; " > " $ dummy . c " <nl> + for driver in cc gcc c89 c99 ; do <nl> + if ( $ driver - c - o " $ dummy . o " " $ dummy . c " ) > / dev / null 2 > & 1 ; then <nl> + CC_FOR_BUILD = " $ driver " <nl> + break <nl> + fi <nl> + done <nl> + if test x " $ CC_FOR_BUILD " = x ; then <nl> + CC_FOR_BUILD = no_compiler_found <nl> + fi <nl> + ; ; <nl> + , , * ) CC_FOR_BUILD = $ CC ; ; <nl> + , * , * ) CC_FOR_BUILD = $ HOST_CC ; ; <nl> + esac <nl> + } <nl> <nl> # This is needed to find uname on a Pyramid OSx when run in the BSD universe . <nl> # ( ghazi @ noc . rutgers . edu 1994 - 08 - 24 ) <nl> - if ( test - f / . attbin / uname ) > / dev / null 2 > & 1 ; then <nl> + if test - f / . attbin / uname ; then <nl> PATH = $ PATH : / . attbin ; export PATH <nl> fi <nl> <nl> UNAME_RELEASE = ` ( uname - r ) 2 > / dev / null ` | | UNAME_RELEASE = unknown <nl> UNAME_SYSTEM = ` ( uname - s ) 2 > / dev / null ` | | UNAME_SYSTEM = unknown <nl> UNAME_VERSION = ` ( uname - v ) 2 > / dev / null ` | | UNAME_VERSION = unknown <nl> <nl> - case " $ { UNAME_SYSTEM } " in <nl> + case " $ UNAME_SYSTEM " in <nl> Linux | GNU | GNU / * ) <nl> # If the system lacks a compiler , then just pick glibc . <nl> # We could probably try harder . <nl> LIBC = gnu <nl> <nl> - eval $ set_cc_for_build <nl> - cat < < - EOF > $ dummy . c <nl> + set_cc_for_build <nl> + cat < < - EOF > " $ dummy . c " <nl> # include < features . h > <nl> # if defined ( __UCLIBC__ ) <nl> LIBC = uclibc <nl> Linux | GNU | GNU / * ) <nl> LIBC = gnu <nl> # endif <nl> EOF <nl> - eval ` $ CC_FOR_BUILD - E $ dummy . c 2 > / dev / null | grep ' ^ LIBC ' | sed ' s , , , g ' ` <nl> + eval " ` $ CC_FOR_BUILD - E " $ dummy . c " 2 > / dev / null | grep ' ^ LIBC ' | sed ' s , , , g ' ` " <nl> + <nl> + # If ldd exists , use it to detect musl libc . <nl> + if command - v ldd > / dev / null & & \ <nl> + ldd - - version 2 > & 1 | grep - q ^ musl <nl> + then <nl> + LIBC = musl <nl> + fi <nl> ; ; <nl> esac <nl> <nl> # Note : order is significant - the case branches are not exclusive . <nl> <nl> - case " $ { UNAME_MACHINE } : $ { UNAME_SYSTEM } : $ { UNAME_RELEASE } : $ { UNAME_VERSION } " in <nl> + case " $ UNAME_MACHINE : $ UNAME_SYSTEM : $ UNAME_RELEASE : $ UNAME_VERSION " in <nl> * : NetBSD : * : * ) <nl> # NetBSD ( nbsd ) targets should ( where applicable ) match one or <nl> # more of the tuples : * - * - netbsdelf * , * - * - netbsdaout * , <nl> case " $ { UNAME_MACHINE } : $ { UNAME_SYSTEM } : $ { UNAME_RELEASE } : $ { UNAME_VERSION } " in <nl> # Note : NetBSD doesn ' t particularly care about the vendor <nl> # portion of the name . We always set it to " unknown " . <nl> sysctl = " sysctl - n hw . machine_arch " <nl> - UNAME_MACHINE_ARCH = ` ( / sbin / $ sysctl 2 > / dev / null | | \ <nl> - / usr / sbin / $ sysctl 2 > / dev / null | | echo unknown ) ` <nl> - case " $ { UNAME_MACHINE_ARCH } " in <nl> + UNAME_MACHINE_ARCH = ` ( uname - p 2 > / dev / null | | \ <nl> + " / sbin / $ sysctl " 2 > / dev / null | | \ <nl> + " / usr / sbin / $ sysctl " 2 > / dev / null | | \ <nl> + echo unknown ) ` <nl> + case " $ UNAME_MACHINE_ARCH " in <nl> armeb ) machine = armeb - unknown ; ; <nl> arm * ) machine = arm - unknown ; ; <nl> sh3el ) machine = shl - unknown ; ; <nl> sh3eb ) machine = sh - unknown ; ; <nl> sh5el ) machine = sh5le - unknown ; ; <nl> - * ) machine = $ { UNAME_MACHINE_ARCH } - unknown ; ; <nl> + earmv * ) <nl> + arch = ` echo " $ UNAME_MACHINE_ARCH " | sed - e ' s , ^ e \ ( armv [ 0 - 9 ] \ ) . * $ , \ 1 , ' ` <nl> + endian = ` echo " $ UNAME_MACHINE_ARCH " | sed - ne ' s , ^ . * \ ( eb \ ) $ , \ 1 , p ' ` <nl> + machine = " $ { arch } $ { endian } " - unknown <nl> + ; ; <nl> + * ) machine = " $ UNAME_MACHINE_ARCH " - unknown ; ; <nl> esac <nl> # The Operating System including object format , if it has switched <nl> - # to ELF recently , or will in the future . <nl> - case " $ { UNAME_MACHINE_ARCH } " in <nl> + # to ELF recently ( or will in the future ) and ABI . <nl> + case " $ UNAME_MACHINE_ARCH " in <nl> + earm * ) <nl> + os = netbsdelf <nl> + ; ; <nl> arm * | i386 | m68k | ns32k | sh3 * | sparc | vax ) <nl> - eval $ set_cc_for_build <nl> + set_cc_for_build <nl> if echo __ELF__ | $ CC_FOR_BUILD - E - 2 > / dev / null \ <nl> | grep - q __ELF__ <nl> then <nl> case " $ { UNAME_MACHINE } : $ { UNAME_SYSTEM } : $ { UNAME_RELEASE } : $ { UNAME_VERSION } " in <nl> os = netbsd <nl> ; ; <nl> esac <nl> + # Determine ABI tags . <nl> + case " $ UNAME_MACHINE_ARCH " in <nl> + earm * ) <nl> + expr = ' s / ^ earmv [ 0 - 9 ] / - eabi / ; s / eb $ / / ' <nl> + abi = ` echo " $ UNAME_MACHINE_ARCH " | sed - e " $ expr " ` <nl> + ; ; <nl> + esac <nl> # The OS release <nl> # Debian GNU / NetBSD machines have a different userland , and <nl> # thus , need a distinct triplet . However , they do not need <nl> # kernel version information , so it can be replaced with a <nl> # suitable tag , in the style of linux - gnu . <nl> - case " $ { UNAME_VERSION } " in <nl> + case " $ UNAME_VERSION " in <nl> Debian * ) <nl> release = ' - gnu ' <nl> ; ; <nl> * ) <nl> - release = ` echo $ { UNAME_RELEASE } | sed - e ' s / [ - _ ] . * / \ . / ' ` <nl> + release = ` echo " $ UNAME_RELEASE " | sed - e ' s / [ - _ ] . * / / ' | cut - d . - f1 , 2 ` <nl> ; ; <nl> esac <nl> # Since CPU_TYPE - MANUFACTURER - KERNEL - OPERATING_SYSTEM : <nl> # contains redundant information , the shorter form : <nl> # CPU_TYPE - MANUFACTURER - OPERATING_SYSTEM is used . <nl> - echo " $ { machine } - $ { os } $ { release } " <nl> + echo " $ machine - $ { os } $ { release } $ { abi - } " <nl> exit ; ; <nl> * : Bitrig : * : * ) <nl> UNAME_MACHINE_ARCH = ` arch | sed ' s / Bitrig . / / ' ` <nl> - echo $ { UNAME_MACHINE_ARCH } - unknown - bitrig $ { UNAME_RELEASE } <nl> + echo " $ UNAME_MACHINE_ARCH " - unknown - bitrig " $ UNAME_RELEASE " <nl> exit ; ; <nl> * : OpenBSD : * : * ) <nl> UNAME_MACHINE_ARCH = ` arch | sed ' s / OpenBSD . / / ' ` <nl> - echo $ { UNAME_MACHINE_ARCH } - unknown - openbsd $ { UNAME_RELEASE } <nl> + echo " $ UNAME_MACHINE_ARCH " - unknown - openbsd " $ UNAME_RELEASE " <nl> + exit ; ; <nl> + * : LibertyBSD : * : * ) <nl> + UNAME_MACHINE_ARCH = ` arch | sed ' s / ^ . * BSD \ . / / ' ` <nl> + echo " $ UNAME_MACHINE_ARCH " - unknown - libertybsd " $ UNAME_RELEASE " <nl> + exit ; ; <nl> + * : MidnightBSD : * : * ) <nl> + echo " $ UNAME_MACHINE " - unknown - midnightbsd " $ UNAME_RELEASE " <nl> exit ; ; <nl> * : ekkoBSD : * : * ) <nl> - echo $ { UNAME_MACHINE } - unknown - ekkobsd $ { UNAME_RELEASE } <nl> + echo " $ UNAME_MACHINE " - unknown - ekkobsd " $ UNAME_RELEASE " <nl> exit ; ; <nl> * : SolidBSD : * : * ) <nl> - echo $ { UNAME_MACHINE } - unknown - solidbsd $ { UNAME_RELEASE } <nl> + echo " $ UNAME_MACHINE " - unknown - solidbsd " $ UNAME_RELEASE " <nl> + exit ; ; <nl> + * : OS108 : * : * ) <nl> + echo " $ UNAME_MACHINE " - unknown - os108_ " $ UNAME_RELEASE " <nl> exit ; ; <nl> macppc : MirBSD : * : * ) <nl> - echo powerpc - unknown - mirbsd $ { UNAME_RELEASE } <nl> + echo powerpc - unknown - mirbsd " $ UNAME_RELEASE " <nl> exit ; ; <nl> * : MirBSD : * : * ) <nl> - echo $ { UNAME_MACHINE } - unknown - mirbsd $ { UNAME_RELEASE } <nl> + echo " $ UNAME_MACHINE " - unknown - mirbsd " $ UNAME_RELEASE " <nl> + exit ; ; <nl> + * : Sortix : * : * ) <nl> + echo " $ UNAME_MACHINE " - unknown - sortix <nl> + exit ; ; <nl> + * : Twizzler : * : * ) <nl> + echo " $ UNAME_MACHINE " - unknown - twizzler <nl> + exit ; ; <nl> + * : Redox : * : * ) <nl> + echo " $ UNAME_MACHINE " - unknown - redox <nl> + exit ; ; <nl> + mips : OSF1 : * . * ) <nl> + echo mips - dec - osf1 <nl> exit ; ; <nl> alpha : OSF1 : * : * ) <nl> case $ UNAME_RELEASE in <nl> case " $ { UNAME_MACHINE } : $ { UNAME_SYSTEM } : $ { UNAME_RELEASE } : $ { UNAME_VERSION } " in <nl> ALPHA_CPU_TYPE = ` / usr / sbin / psrinfo - v | sed - n - e ' s / ^ The alpha \ ( . * \ ) processor . * $ / \ 1 / p ' | head - n 1 ` <nl> case " $ ALPHA_CPU_TYPE " in <nl> " EV4 ( 21064 ) " ) <nl> - UNAME_MACHINE = " alpha " ; ; <nl> + UNAME_MACHINE = alpha ; ; <nl> " EV4 . 5 ( 21064 ) " ) <nl> - UNAME_MACHINE = " alpha " ; ; <nl> + UNAME_MACHINE = alpha ; ; <nl> " LCA4 ( 21066 / 21068 ) " ) <nl> - UNAME_MACHINE = " alpha " ; ; <nl> + UNAME_MACHINE = alpha ; ; <nl> " EV5 ( 21164 ) " ) <nl> - UNAME_MACHINE = " alphaev5 " ; ; <nl> + UNAME_MACHINE = alphaev5 ; ; <nl> " EV5 . 6 ( 21164A ) " ) <nl> - UNAME_MACHINE = " alphaev56 " ; ; <nl> + UNAME_MACHINE = alphaev56 ; ; <nl> " EV5 . 6 ( 21164PC ) " ) <nl> - UNAME_MACHINE = " alphapca56 " ; ; <nl> + UNAME_MACHINE = alphapca56 ; ; <nl> " EV5 . 7 ( 21164PC ) " ) <nl> - UNAME_MACHINE = " alphapca57 " ; ; <nl> + UNAME_MACHINE = alphapca57 ; ; <nl> " EV6 ( 21264 ) " ) <nl> - UNAME_MACHINE = " alphaev6 " ; ; <nl> + UNAME_MACHINE = alphaev6 ; ; <nl> " EV6 . 7 ( 21264A ) " ) <nl> - UNAME_MACHINE = " alphaev67 " ; ; <nl> + UNAME_MACHINE = alphaev67 ; ; <nl> " EV6 . 8CB ( 21264C ) " ) <nl> - UNAME_MACHINE = " alphaev68 " ; ; <nl> + UNAME_MACHINE = alphaev68 ; ; <nl> " EV6 . 8AL ( 21264B ) " ) <nl> - UNAME_MACHINE = " alphaev68 " ; ; <nl> + UNAME_MACHINE = alphaev68 ; ; <nl> " EV6 . 8CX ( 21264D ) " ) <nl> - UNAME_MACHINE = " alphaev68 " ; ; <nl> + UNAME_MACHINE = alphaev68 ; ; <nl> " EV6 . 9A ( 21264 / EV69A ) " ) <nl> - UNAME_MACHINE = " alphaev69 " ; ; <nl> + UNAME_MACHINE = alphaev69 ; ; <nl> " EV7 ( 21364 ) " ) <nl> - UNAME_MACHINE = " alphaev7 " ; ; <nl> + UNAME_MACHINE = alphaev7 ; ; <nl> " EV7 . 9 ( 21364A ) " ) <nl> - UNAME_MACHINE = " alphaev79 " ; ; <nl> + UNAME_MACHINE = alphaev79 ; ; <nl> esac <nl> # A Pn . n version is a patched version . <nl> # A Vn . n version is a released version . <nl> # A Tn . n version is a released field test version . <nl> # A Xn . n version is an unreleased experimental baselevel . <nl> # 1 . 2 uses " 1 . 2 " for uname - r . <nl> - echo $ { UNAME_MACHINE } - dec - osf ` echo $ { UNAME_RELEASE } | sed - e ' s / ^ [ PVTX ] / / ' | tr ' ABCDEFGHIJKLMNOPQRSTUVWXYZ ' ' abcdefghijklmnopqrstuvwxyz ' ` <nl> + echo " $ UNAME_MACHINE " - dec - osf " ` echo " $ UNAME_RELEASE " | sed - e ' s / ^ [ PVTX ] / / ' | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz ` " <nl> # Reset EXIT trap before exiting to avoid spurious non - zero exit code . <nl> exitcode = $ ? <nl> trap ' ' 0 <nl> exit $ exitcode ; ; <nl> - Alpha \ * : Windows_NT * : * ) <nl> - # How do we know it ' s Interix rather than the generic POSIX subsystem ? <nl> - # Should we change UNAME_MACHINE based on the output of uname instead <nl> - # of the specific Alpha model ? <nl> - echo alpha - pc - interix <nl> - exit ; ; <nl> - 21064 : Windows_NT : 50 : 3 ) <nl> - echo alpha - dec - winnt3 . 5 <nl> - exit ; ; <nl> Amiga * : UNIX_System_V : 4 . 0 : * ) <nl> echo m68k - unknown - sysv4 <nl> exit ; ; <nl> * : [ Aa ] miga [ Oo ] [ Ss ] : * : * ) <nl> - echo $ { UNAME_MACHINE } - unknown - amigaos <nl> + echo " $ UNAME_MACHINE " - unknown - amigaos <nl> exit ; ; <nl> * : [ Mm ] orph [ Oo ] [ Ss ] : * : * ) <nl> - echo $ { UNAME_MACHINE } - unknown - morphos <nl> + echo " $ UNAME_MACHINE " - unknown - morphos <nl> exit ; ; <nl> * : OS / 390 : * : * ) <nl> echo i370 - ibm - openedition <nl> case " $ { UNAME_MACHINE } : $ { UNAME_SYSTEM } : $ { UNAME_RELEASE } : $ { UNAME_VERSION } " in <nl> echo powerpc - ibm - os400 <nl> exit ; ; <nl> arm : RISC * : 1 . [ 012 ] * : * | arm : riscix : 1 . [ 012 ] * : * ) <nl> - echo arm - acorn - riscix $ { UNAME_RELEASE } <nl> + echo arm - acorn - riscix " $ UNAME_RELEASE " <nl> exit ; ; <nl> arm * : riscos : * : * | arm * : RISCOS : * : * ) <nl> echo arm - unknown - riscos <nl> case " $ { UNAME_MACHINE } : $ { UNAME_SYSTEM } : $ { UNAME_RELEASE } : $ { UNAME_VERSION } " in <nl> sparc ) echo sparc - icl - nx7 ; exit ; ; <nl> esac ; ; <nl> s390x : SunOS : * : * ) <nl> - echo $ { UNAME_MACHINE } - ibm - solaris2 ` echo $ { UNAME_RELEASE } | sed - e ' s / [ ^ . ] * / / ' ` <nl> + echo " $ UNAME_MACHINE " - ibm - solaris2 " ` echo " $ UNAME_RELEASE " | sed - e ' s / [ ^ . ] * / / ' ` " <nl> exit ; ; <nl> sun4H : SunOS : 5 . * : * ) <nl> - echo sparc - hal - solaris2 ` echo $ { UNAME_RELEASE } | sed - e ' s / [ ^ . ] * / / ' ` <nl> + echo sparc - hal - solaris2 " ` echo " $ UNAME_RELEASE " | sed - e ' s / [ ^ . ] * / / ' ` " <nl> exit ; ; <nl> sun4 * : SunOS : 5 . * : * | tadpole * : SunOS : 5 . * : * ) <nl> - echo sparc - sun - solaris2 ` echo $ { UNAME_RELEASE } | sed - e ' s / [ ^ . ] * / / ' ` <nl> + echo sparc - sun - solaris2 " ` echo " $ UNAME_RELEASE " | sed - e ' s / [ ^ . ] * / / ' ` " <nl> exit ; ; <nl> i86pc : AuroraUX : 5 . * : * | i86xen : AuroraUX : 5 . * : * ) <nl> - echo i386 - pc - auroraux $ { UNAME_RELEASE } <nl> + echo i386 - pc - auroraux " $ UNAME_RELEASE " <nl> exit ; ; <nl> i86pc : SunOS : 5 . * : * | i86xen : SunOS : 5 . * : * ) <nl> - eval $ set_cc_for_build <nl> - SUN_ARCH = " i386 " <nl> + set_cc_for_build <nl> + SUN_ARCH = i386 <nl> # If there is a compiler , see if it is configured for 64 - bit objects . <nl> # Note that the Sun cc does not turn __LP64__ into 1 like gcc does . <nl> # This test works for both compilers . <nl> - if [ " $ CC_FOR_BUILD " ! = ' no_compiler_found ' ] ; then <nl> + if [ " $ CC_FOR_BUILD " ! = no_compiler_found ] ; then <nl> if ( echo ' # ifdef __amd64 ' ; echo IS_64BIT_ARCH ; echo ' # endif ' ) | \ <nl> - ( CCOPTS = $ CC_FOR_BUILD - E - 2 > / dev / null ) | \ <nl> + ( CCOPTS = " " $ CC_FOR_BUILD - E - 2 > / dev / null ) | \ <nl> grep IS_64BIT_ARCH > / dev / null <nl> then <nl> - SUN_ARCH = " x86_64 " <nl> + SUN_ARCH = x86_64 <nl> fi <nl> fi <nl> - echo $ { SUN_ARCH } - pc - solaris2 ` echo $ { UNAME_RELEASE } | sed - e ' s / [ ^ . ] * / / ' ` <nl> + echo " $ SUN_ARCH " - pc - solaris2 " ` echo " $ UNAME_RELEASE " | sed - e ' s / [ ^ . ] * / / ' ` " <nl> exit ; ; <nl> sun4 * : SunOS : 6 * : * ) <nl> # According to config . sub , this is the proper way to canonicalize <nl> # SunOS6 . Hard to guess exactly what SunOS6 will be like , but <nl> # it ' s likely to be more like Solaris than SunOS4 . <nl> - echo sparc - sun - solaris3 ` echo $ { UNAME_RELEASE } | sed - e ' s / [ ^ . ] * / / ' ` <nl> + echo sparc - sun - solaris3 " ` echo " $ UNAME_RELEASE " | sed - e ' s / [ ^ . ] * / / ' ` " <nl> exit ; ; <nl> sun4 * : SunOS : * : * ) <nl> case " ` / usr / bin / arch - k ` " in <nl> case " $ { UNAME_MACHINE } : $ { UNAME_SYSTEM } : $ { UNAME_RELEASE } : $ { UNAME_VERSION } " in <nl> ; ; <nl> esac <nl> # Japanese Language versions have a version number like ` 4 . 1 . 3 - JL ' . <nl> - echo sparc - sun - sunos ` echo $ { UNAME_RELEASE } | sed - e ' s / - / _ / ' ` <nl> + echo sparc - sun - sunos " ` echo " $ UNAME_RELEASE " | sed - e ' s / - / _ / ' ` " <nl> exit ; ; <nl> sun3 * : SunOS : * : * ) <nl> - echo m68k - sun - sunos $ { UNAME_RELEASE } <nl> + echo m68k - sun - sunos " $ UNAME_RELEASE " <nl> exit ; ; <nl> sun * : * : 4 . 2BSD : * ) <nl> UNAME_RELEASE = ` ( sed 1q / etc / motd | awk ' { print substr ( $ 5 , 1 , 3 ) } ' ) 2 > / dev / null ` <nl> - test " x $ { UNAME_RELEASE } " = " x " & & UNAME_RELEASE = 3 <nl> + test " x $ UNAME_RELEASE " = x & & UNAME_RELEASE = 3 <nl> case " ` / bin / arch ` " in <nl> sun3 ) <nl> - echo m68k - sun - sunos $ { UNAME_RELEASE } <nl> + echo m68k - sun - sunos " $ UNAME_RELEASE " <nl> ; ; <nl> sun4 ) <nl> - echo sparc - sun - sunos $ { UNAME_RELEASE } <nl> + echo sparc - sun - sunos " $ UNAME_RELEASE " <nl> ; ; <nl> esac <nl> exit ; ; <nl> aushp : SunOS : * : * ) <nl> - echo sparc - auspex - sunos $ { UNAME_RELEASE } <nl> + echo sparc - auspex - sunos " $ UNAME_RELEASE " <nl> exit ; ; <nl> # The situation for MiNT is a little confusing . The machine name <nl> # can be virtually everything ( everything which is not <nl> case " $ { UNAME_MACHINE } : $ { UNAME_SYSTEM } : $ { UNAME_RELEASE } : $ { UNAME_VERSION } " in <nl> # MiNT . But MiNT is downward compatible to TOS , so this should <nl> # be no problem . <nl> atarist [ e ] : * MiNT : * : * | atarist [ e ] : * mint : * : * | atarist [ e ] : * TOS : * : * ) <nl> - echo m68k - atari - mint $ { UNAME_RELEASE } <nl> + echo m68k - atari - mint " $ UNAME_RELEASE " <nl> exit ; ; <nl> atari * : * MiNT : * : * | atari * : * mint : * : * | atarist [ e ] : * TOS : * : * ) <nl> - echo m68k - atari - mint $ { UNAME_RELEASE } <nl> + echo m68k - atari - mint " $ UNAME_RELEASE " <nl> exit ; ; <nl> * falcon * : * MiNT : * : * | * falcon * : * mint : * : * | * falcon * : * TOS : * : * ) <nl> - echo m68k - atari - mint $ { UNAME_RELEASE } <nl> + echo m68k - atari - mint " $ UNAME_RELEASE " <nl> exit ; ; <nl> milan * : * MiNT : * : * | milan * : * mint : * : * | * milan * : * TOS : * : * ) <nl> - echo m68k - milan - mint $ { UNAME_RELEASE } <nl> + echo m68k - milan - mint " $ UNAME_RELEASE " <nl> exit ; ; <nl> hades * : * MiNT : * : * | hades * : * mint : * : * | * hades * : * TOS : * : * ) <nl> - echo m68k - hades - mint $ { UNAME_RELEASE } <nl> + echo m68k - hades - mint " $ UNAME_RELEASE " <nl> exit ; ; <nl> * : * MiNT : * : * | * : * mint : * : * | * : * TOS : * : * ) <nl> - echo m68k - unknown - mint $ { UNAME_RELEASE } <nl> + echo m68k - unknown - mint " $ UNAME_RELEASE " <nl> exit ; ; <nl> m68k : machten : * : * ) <nl> - echo m68k - apple - machten $ { UNAME_RELEASE } <nl> + echo m68k - apple - machten " $ UNAME_RELEASE " <nl> exit ; ; <nl> powerpc : machten : * : * ) <nl> - echo powerpc - apple - machten $ { UNAME_RELEASE } <nl> + echo powerpc - apple - machten " $ UNAME_RELEASE " <nl> exit ; ; <nl> RISC * : Mach : * : * ) <nl> echo mips - dec - mach_bsd4 . 3 <nl> exit ; ; <nl> RISC * : ULTRIX : * : * ) <nl> - echo mips - dec - ultrix $ { UNAME_RELEASE } <nl> + echo mips - dec - ultrix " $ UNAME_RELEASE " <nl> exit ; ; <nl> VAX * : ULTRIX * : * : * ) <nl> - echo vax - dec - ultrix $ { UNAME_RELEASE } <nl> + echo vax - dec - ultrix " $ UNAME_RELEASE " <nl> exit ; ; <nl> 2020 : CLIX : * : * | 2430 : CLIX : * : * ) <nl> - echo clipper - intergraph - clix $ { UNAME_RELEASE } <nl> + echo clipper - intergraph - clix " $ UNAME_RELEASE " <nl> exit ; ; <nl> mips : * : * : UMIPS | mips : * : * : RISCos ) <nl> - eval $ set_cc_for_build <nl> - sed ' s / ^ / / ' < < EOF > $ dummy . c <nl> + set_cc_for_build <nl> + sed ' s / ^ / / ' < < EOF > " $ dummy . c " <nl> # ifdef __cplusplus <nl> # include < stdio . h > / * for printf ( ) prototype * / <nl> int main ( int argc , char * argv [ ] ) { <nl> case " $ { UNAME_MACHINE } : $ { UNAME_SYSTEM } : $ { UNAME_RELEASE } : $ { UNAME_VERSION } " in <nl> # endif <nl> # if defined ( host_mips ) & & defined ( MIPSEB ) <nl> # if defined ( SYSTYPE_SYSV ) <nl> - printf ( " mips - mips - riscos % ssysv \ n " , argv [ 1 ] ) ; exit ( 0 ) ; <nl> + printf ( " mips - mips - riscos % ssysv \ \ n " , argv [ 1 ] ) ; exit ( 0 ) ; <nl> # endif <nl> # if defined ( SYSTYPE_SVR4 ) <nl> - printf ( " mips - mips - riscos % ssvr4 \ n " , argv [ 1 ] ) ; exit ( 0 ) ; <nl> + printf ( " mips - mips - riscos % ssvr4 \ \ n " , argv [ 1 ] ) ; exit ( 0 ) ; <nl> # endif <nl> # if defined ( SYSTYPE_BSD43 ) | | defined ( SYSTYPE_BSD ) <nl> - printf ( " mips - mips - riscos % sbsd \ n " , argv [ 1 ] ) ; exit ( 0 ) ; <nl> + printf ( " mips - mips - riscos % sbsd \ \ n " , argv [ 1 ] ) ; exit ( 0 ) ; <nl> # endif <nl> # endif <nl> exit ( - 1 ) ; <nl> } <nl> EOF <nl> - $ CC_FOR_BUILD - o $ dummy $ dummy . c & & <nl> - dummyarg = ` echo " $ { UNAME_RELEASE } " | sed - n ' s / \ ( [ 0 - 9 ] * \ ) . * / \ 1 / p ' ` & & <nl> - SYSTEM_NAME = ` $ dummy $ dummyarg ` & & <nl> + $ CC_FOR_BUILD - o " $ dummy " " $ dummy . c " & & <nl> + dummyarg = ` echo " $ UNAME_RELEASE " | sed - n ' s / \ ( [ 0 - 9 ] * \ ) . * / \ 1 / p ' ` & & <nl> + SYSTEM_NAME = ` " $ dummy " " $ dummyarg " ` & & <nl> { echo " $ SYSTEM_NAME " ; exit ; } <nl> - echo mips - mips - riscos $ { UNAME_RELEASE } <nl> + echo mips - mips - riscos " $ UNAME_RELEASE " <nl> exit ; ; <nl> Motorola : PowerMAX_OS : * : * ) <nl> echo powerpc - motorola - powermax <nl> EOF <nl> AViiON : dgux : * : * ) <nl> # DG / UX returns AViiON for all architectures <nl> UNAME_PROCESSOR = ` / usr / bin / uname - p ` <nl> - if [ $ UNAME_PROCESSOR = mc88100 ] | | [ $ UNAME_PROCESSOR = mc88110 ] <nl> + if [ " $ UNAME_PROCESSOR " = mc88100 ] | | [ " $ UNAME_PROCESSOR " = mc88110 ] <nl> then <nl> - if [ $ { TARGET_BINARY_INTERFACE } x = m88kdguxelfx ] | | \ <nl> - [ $ { TARGET_BINARY_INTERFACE } x = x ] <nl> + if [ " $ TARGET_BINARY_INTERFACE " x = m88kdguxelfx ] | | \ <nl> + [ " $ TARGET_BINARY_INTERFACE " x = x ] <nl> then <nl> - echo m88k - dg - dgux $ { UNAME_RELEASE } <nl> + echo m88k - dg - dgux " $ UNAME_RELEASE " <nl> else <nl> - echo m88k - dg - dguxbcs $ { UNAME_RELEASE } <nl> + echo m88k - dg - dguxbcs " $ UNAME_RELEASE " <nl> fi <nl> else <nl> - echo i586 - dg - dgux $ { UNAME_RELEASE } <nl> + echo i586 - dg - dgux " $ UNAME_RELEASE " <nl> fi <nl> exit ; ; <nl> M88 * : DolphinOS : * : * ) # DolphinOS ( SVR3 ) <nl> EOF <nl> echo m68k - tektronix - bsd <nl> exit ; ; <nl> * : IRIX * : * : * ) <nl> - echo mips - sgi - irix ` echo $ { UNAME_RELEASE } | sed - e ' s / - / _ / g ' ` <nl> + echo mips - sgi - irix " ` echo " $ UNAME_RELEASE " | sed - e ' s / - / _ / g ' ` " <nl> exit ; ; <nl> ? ? ? ? ? ? ? ? : AIX ? : [ 12 ] . 1 : 2 ) # AIX 2 . 2 . 1 or AIX 2 . 1 . 1 is RT / PC AIX . <nl> echo romp - ibm - aix # uname - m gives an 8 hex - code CPU id <nl> EOF <nl> if [ - x / usr / bin / oslevel ] ; then <nl> IBM_REV = ` / usr / bin / oslevel ` <nl> else <nl> - IBM_REV = $ { UNAME_VERSION } . $ { UNAME_RELEASE } <nl> + IBM_REV = " $ UNAME_VERSION . $ UNAME_RELEASE " <nl> fi <nl> - echo $ { UNAME_MACHINE } - ibm - aix $ { IBM_REV } <nl> + echo " $ UNAME_MACHINE " - ibm - aix " $ IBM_REV " <nl> exit ; ; <nl> * : AIX : 2 : 3 ) <nl> if grep bos325 / usr / include / stdio . h > / dev / null 2 > & 1 ; then <nl> - eval $ set_cc_for_build <nl> - sed ' s / ^ / / ' < < EOF > $ dummy . c <nl> + set_cc_for_build <nl> + sed ' s / ^ / / ' < < EOF > " $ dummy . c " <nl> # include < sys / systemcfg . h > <nl> <nl> main ( ) <nl> EOF <nl> exit ( 0 ) ; <nl> } <nl> EOF <nl> - if $ CC_FOR_BUILD - o $ dummy $ dummy . c & & SYSTEM_NAME = ` $ dummy ` <nl> + if $ CC_FOR_BUILD - o " $ dummy " " $ dummy . c " & & SYSTEM_NAME = ` " $ dummy " ` <nl> then <nl> echo " $ SYSTEM_NAME " <nl> else <nl> EOF <nl> exit ; ; <nl> * : AIX : * : [ 4567 ] ) <nl> IBM_CPU_ID = ` / usr / sbin / lsdev - C - c processor - S available | sed 1q | awk ' { print $ 1 } ' ` <nl> - if / usr / sbin / lsattr - El $ { IBM_CPU_ID } | grep ' POWER ' > / dev / null 2 > & 1 ; then <nl> + if / usr / sbin / lsattr - El " $ IBM_CPU_ID " | grep ' POWER ' > / dev / null 2 > & 1 ; then <nl> IBM_ARCH = rs6000 <nl> else <nl> IBM_ARCH = powerpc <nl> EOF <nl> IBM_REV = ` / usr / bin / lslpp - Lqc bos . rte . libc | <nl> awk - F : ' { print $ 3 } ' | sed s / [ 0 - 9 ] * $ / 0 / ` <nl> else <nl> - IBM_REV = $ { UNAME_VERSION } . $ { UNAME_RELEASE } <nl> + IBM_REV = " $ UNAME_VERSION . $ UNAME_RELEASE " <nl> fi <nl> - echo $ { IBM_ARCH } - ibm - aix $ { IBM_REV } <nl> + echo " $ IBM_ARCH " - ibm - aix " $ IBM_REV " <nl> exit ; ; <nl> * : AIX : * : * ) <nl> echo rs6000 - ibm - aix <nl> exit ; ; <nl> - ibmrt : 4 . 4BSD : * | romp - ibm : BSD : * ) <nl> + ibmrt : 4 . 4BSD : * | romp - ibm : 4 . 4BSD : * ) <nl> echo romp - ibm - bsd4 . 4 <nl> exit ; ; <nl> ibmrt : * BSD : * | romp - ibm : BSD : * ) # covers RT / PC BSD and <nl> - echo romp - ibm - bsd $ { UNAME_RELEASE } # 4 . 3 with uname added to <nl> + echo romp - ibm - bsd " $ UNAME_RELEASE " # 4 . 3 with uname added to <nl> exit ; ; # report : romp - ibm BSD 4 . 3 <nl> * : BOSX : * : * ) <nl> echo rs6000 - bull - bosx <nl> EOF <nl> echo m68k - hp - bsd4 . 4 <nl> exit ; ; <nl> 9000 / [ 34678 ] ? ? : HP - UX : * : * ) <nl> - HPUX_REV = ` echo $ { UNAME_RELEASE } | sed - e ' s / [ ^ . ] * . [ 0B ] * / / ' ` <nl> - case " $ { UNAME_MACHINE } " in <nl> - 9000 / 31 ? ) HP_ARCH = m68000 ; ; <nl> - 9000 / [ 34 ] ? ? ) HP_ARCH = m68k ; ; <nl> + HPUX_REV = ` echo " $ UNAME_RELEASE " | sed - e ' s / [ ^ . ] * . [ 0B ] * / / ' ` <nl> + case " $ UNAME_MACHINE " in <nl> + 9000 / 31 ? ) HP_ARCH = m68000 ; ; <nl> + 9000 / [ 34 ] ? ? ) HP_ARCH = m68k ; ; <nl> 9000 / [ 678 ] [ 0 - 9 ] [ 0 - 9 ] ) <nl> if [ - x / usr / bin / getconf ] ; then <nl> sc_cpu_version = ` / usr / bin / getconf SC_CPU_VERSION 2 > / dev / null ` <nl> sc_kernel_bits = ` / usr / bin / getconf SC_KERNEL_BITS 2 > / dev / null ` <nl> - case " $ { sc_cpu_version } " in <nl> - 523 ) HP_ARCH = " hppa1 . 0 " ; ; # CPU_PA_RISC1_0 <nl> - 528 ) HP_ARCH = " hppa1 . 1 " ; ; # CPU_PA_RISC1_1 <nl> + case " $ sc_cpu_version " in <nl> + 523 ) HP_ARCH = hppa1 . 0 ; ; # CPU_PA_RISC1_0 <nl> + 528 ) HP_ARCH = hppa1 . 1 ; ; # CPU_PA_RISC1_1 <nl> 532 ) # CPU_PA_RISC2_0 <nl> - case " $ { sc_kernel_bits } " in <nl> - 32 ) HP_ARCH = " hppa2 . 0n " ; ; <nl> - 64 ) HP_ARCH = " hppa2 . 0w " ; ; <nl> - ' ' ) HP_ARCH = " hppa2 . 0 " ; ; # HP - UX 10 . 20 <nl> + case " $ sc_kernel_bits " in <nl> + 32 ) HP_ARCH = hppa2 . 0n ; ; <nl> + 64 ) HP_ARCH = hppa2 . 0w ; ; <nl> + ' ' ) HP_ARCH = hppa2 . 0 ; ; # HP - UX 10 . 20 <nl> esac ; ; <nl> esac <nl> fi <nl> - if [ " $ { HP_ARCH } " = " " ] ; then <nl> - eval $ set_cc_for_build <nl> - sed ' s / ^ / / ' < < EOF > $ dummy . c <nl> + if [ " $ HP_ARCH " = " " ] ; then <nl> + set_cc_for_build <nl> + sed ' s / ^ / / ' < < EOF > " $ dummy . c " <nl> <nl> # define _HPUX_SOURCE <nl> # include < stdlib . h > <nl> EOF <nl> exit ( 0 ) ; <nl> } <nl> EOF <nl> - ( CCOPTS = $ CC_FOR_BUILD - o $ dummy $ dummy . c 2 > / dev / null ) & & HP_ARCH = ` $ dummy ` <nl> + ( CCOPTS = " " $ CC_FOR_BUILD - o " $ dummy " " $ dummy . c " 2 > / dev / null ) & & HP_ARCH = ` " $ dummy " ` <nl> test - z " $ HP_ARCH " & & HP_ARCH = hppa <nl> fi ; ; <nl> esac <nl> - if [ $ { HP_ARCH } = " hppa2 . 0w " ] <nl> + if [ " $ HP_ARCH " = hppa2 . 0w ] <nl> then <nl> - eval $ set_cc_for_build <nl> + set_cc_for_build <nl> <nl> # hppa2 . 0w - hp - hpux * has a 64 - bit kernel and a compiler generating <nl> # 32 - bit code . hppa64 - hp - hpux * has the same kernel and a compiler <nl> EOF <nl> # $ CC_FOR_BUILD = " cc + DA2 . 0w " . / config . guess <nl> # = > hppa64 - hp - hpux11 . 23 <nl> <nl> - if echo __LP64__ | ( CCOPTS = $ CC_FOR_BUILD - E - 2 > / dev / null ) | <nl> + if echo __LP64__ | ( CCOPTS = " " $ CC_FOR_BUILD - E - 2 > / dev / null ) | <nl> grep - q __LP64__ <nl> then <nl> - HP_ARCH = " hppa2 . 0w " <nl> + HP_ARCH = hppa2 . 0w <nl> else <nl> - HP_ARCH = " hppa64 " <nl> + HP_ARCH = hppa64 <nl> fi <nl> fi <nl> - echo $ { HP_ARCH } - hp - hpux $ { HPUX_REV } <nl> + echo " $ HP_ARCH " - hp - hpux " $ HPUX_REV " <nl> exit ; ; <nl> ia64 : HP - UX : * : * ) <nl> - HPUX_REV = ` echo $ { UNAME_RELEASE } | sed - e ' s / [ ^ . ] * . [ 0B ] * / / ' ` <nl> - echo ia64 - hp - hpux $ { HPUX_REV } <nl> + HPUX_REV = ` echo " $ UNAME_RELEASE " | sed - e ' s / [ ^ . ] * . [ 0B ] * / / ' ` <nl> + echo ia64 - hp - hpux " $ HPUX_REV " <nl> exit ; ; <nl> 3050 * : HI - UX : * : * ) <nl> - eval $ set_cc_for_build <nl> - sed ' s / ^ / / ' < < EOF > $ dummy . c <nl> + set_cc_for_build <nl> + sed ' s / ^ / / ' < < EOF > " $ dummy . c " <nl> # include < unistd . h > <nl> int <nl> main ( ) <nl> EOF <nl> exit ( 0 ) ; <nl> } <nl> EOF <nl> - $ CC_FOR_BUILD - o $ dummy $ dummy . c & & SYSTEM_NAME = ` $ dummy ` & & <nl> + $ CC_FOR_BUILD - o " $ dummy " " $ dummy . c " & & SYSTEM_NAME = ` " $ dummy " ` & & <nl> { echo " $ SYSTEM_NAME " ; exit ; } <nl> echo unknown - hitachi - hiuxwe2 <nl> exit ; ; <nl> - 9000 / 7 ? ? : 4 . 3bsd : * : * | 9000 / 8 ? [ 79 ] : 4 . 3bsd : * : * ) <nl> + 9000 / 7 ? ? : 4 . 3bsd : * : * | 9000 / 8 ? [ 79 ] : 4 . 3bsd : * : * ) <nl> echo hppa1 . 1 - hp - bsd <nl> exit ; ; <nl> 9000 / 8 ? ? : 4 . 3bsd : * : * ) <nl> EOF <nl> * 9 ? ? * : MPE / iX : * : * | * 3000 * : MPE / iX : * : * ) <nl> echo hppa1 . 0 - hp - mpeix <nl> exit ; ; <nl> - hp7 ? ? : OSF1 : * : * | hp8 ? [ 79 ] : OSF1 : * : * ) <nl> + hp7 ? ? : OSF1 : * : * | hp8 ? [ 79 ] : OSF1 : * : * ) <nl> echo hppa1 . 1 - hp - osf <nl> exit ; ; <nl> hp8 ? ? : OSF1 : * : * ) <nl> EOF <nl> exit ; ; <nl> i * 86 : OSF1 : * : * ) <nl> if [ - x / usr / sbin / sysversion ] ; then <nl> - echo $ { UNAME_MACHINE } - unknown - osf1mk <nl> + echo " $ UNAME_MACHINE " - unknown - osf1mk <nl> else <nl> - echo $ { UNAME_MACHINE } - unknown - osf1 <nl> + echo " $ UNAME_MACHINE " - unknown - osf1 <nl> fi <nl> exit ; ; <nl> parisc * : Lites * : * : * ) <nl> EOF <nl> echo c4 - convex - bsd <nl> exit ; ; <nl> CRAY * Y - MP : * : * : * ) <nl> - echo ymp - cray - unicos $ { UNAME_RELEASE } | sed - e ' s / \ . [ ^ . ] * $ / . X / ' <nl> + echo ymp - cray - unicos " $ UNAME_RELEASE " | sed - e ' s / \ . [ ^ . ] * $ / . X / ' <nl> exit ; ; <nl> CRAY * [ A - Z ] 90 : * : * : * ) <nl> - echo $ { UNAME_MACHINE } - cray - unicos $ { UNAME_RELEASE } \ <nl> + echo " $ UNAME_MACHINE " - cray - unicos " $ UNAME_RELEASE " \ <nl> | sed - e ' s / CRAY . * \ ( [ A - Z ] 90 \ ) / \ 1 / ' \ <nl> - e y / ABCDEFGHIJKLMNOPQRSTUVWXYZ / abcdefghijklmnopqrstuvwxyz / \ <nl> - e ' s / \ . [ ^ . ] * $ / . X / ' <nl> exit ; ; <nl> CRAY * TS : * : * : * ) <nl> - echo t90 - cray - unicos $ { UNAME_RELEASE } | sed - e ' s / \ . [ ^ . ] * $ / . X / ' <nl> + echo t90 - cray - unicos " $ UNAME_RELEASE " | sed - e ' s / \ . [ ^ . ] * $ / . X / ' <nl> exit ; ; <nl> CRAY * T3E : * : * : * ) <nl> - echo alphaev5 - cray - unicosmk $ { UNAME_RELEASE } | sed - e ' s / \ . [ ^ . ] * $ / . X / ' <nl> + echo alphaev5 - cray - unicosmk " $ UNAME_RELEASE " | sed - e ' s / \ . [ ^ . ] * $ / . X / ' <nl> exit ; ; <nl> CRAY * SV1 : * : * : * ) <nl> - echo sv1 - cray - unicos $ { UNAME_RELEASE } | sed - e ' s / \ . [ ^ . ] * $ / . X / ' <nl> + echo sv1 - cray - unicos " $ UNAME_RELEASE " | sed - e ' s / \ . [ ^ . ] * $ / . X / ' <nl> exit ; ; <nl> * : UNICOS / mp : * : * ) <nl> - echo craynv - cray - unicosmp $ { UNAME_RELEASE } | sed - e ' s / \ . [ ^ . ] * $ / . X / ' <nl> + echo craynv - cray - unicosmp " $ UNAME_RELEASE " | sed - e ' s / \ . [ ^ . ] * $ / . X / ' <nl> exit ; ; <nl> F30 [ 01 ] : UNIX_System_V : * : * | F700 : UNIX_System_V : * : * ) <nl> - FUJITSU_PROC = ` uname - m | tr ' ABCDEFGHIJKLMNOPQRSTUVWXYZ ' ' abcdefghijklmnopqrstuvwxyz ' ` <nl> - FUJITSU_SYS = ` uname - p | tr ' ABCDEFGHIJKLMNOPQRSTUVWXYZ ' ' abcdefghijklmnopqrstuvwxyz ' | sed - e ' s / \ / / / ' ` <nl> - FUJITSU_REL = ` echo $ { UNAME_RELEASE } | sed - e ' s / / _ / ' ` <nl> + FUJITSU_PROC = ` uname - m | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz ` <nl> + FUJITSU_SYS = ` uname - p | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz | sed - e ' s / \ / / / ' ` <nl> + FUJITSU_REL = ` echo " $ UNAME_RELEASE " | sed - e ' s / / _ / ' ` <nl> echo " $ { FUJITSU_PROC } - fujitsu - $ { FUJITSU_SYS } $ { FUJITSU_REL } " <nl> exit ; ; <nl> 5000 : UNIX_System_V : 4 . * : * ) <nl> - FUJITSU_SYS = ` uname - p | tr ' ABCDEFGHIJKLMNOPQRSTUVWXYZ ' ' abcdefghijklmnopqrstuvwxyz ' | sed - e ' s / \ / / / ' ` <nl> - FUJITSU_REL = ` echo $ { UNAME_RELEASE } | tr ' ABCDEFGHIJKLMNOPQRSTUVWXYZ ' ' abcdefghijklmnopqrstuvwxyz ' | sed - e ' s / / _ / ' ` <nl> + FUJITSU_SYS = ` uname - p | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz | sed - e ' s / \ / / / ' ` <nl> + FUJITSU_REL = ` echo " $ UNAME_RELEASE " | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz | sed - e ' s / / _ / ' ` <nl> echo " sparc - fujitsu - $ { FUJITSU_SYS } $ { FUJITSU_REL } " <nl> exit ; ; <nl> i * 86 : BSD / 386 : * : * | i * 86 : BSD / OS : * : * | * : Ascend \ Embedded / OS : * : * ) <nl> - echo $ { UNAME_MACHINE } - pc - bsdi $ { UNAME_RELEASE } <nl> + echo " $ UNAME_MACHINE " - pc - bsdi " $ UNAME_RELEASE " <nl> exit ; ; <nl> sparc * : BSD / OS : * : * ) <nl> - echo sparc - unknown - bsdi $ { UNAME_RELEASE } <nl> + echo sparc - unknown - bsdi " $ UNAME_RELEASE " <nl> exit ; ; <nl> * : BSD / OS : * : * ) <nl> - echo $ { UNAME_MACHINE } - unknown - bsdi $ { UNAME_RELEASE } <nl> + echo " $ UNAME_MACHINE " - unknown - bsdi " $ UNAME_RELEASE " <nl> + exit ; ; <nl> + arm : FreeBSD : * : * ) <nl> + UNAME_PROCESSOR = ` uname - p ` <nl> + set_cc_for_build <nl> + if echo __ARM_PCS_VFP | $ CC_FOR_BUILD - E - 2 > / dev / null \ <nl> + | grep - q __ARM_PCS_VFP <nl> + then <nl> + echo " $ { UNAME_PROCESSOR } " - unknown - freebsd " ` echo $ { UNAME_RELEASE } | sed - e ' s / [ - ( ] . * / / ' ` " - gnueabi <nl> + else <nl> + echo " $ { UNAME_PROCESSOR } " - unknown - freebsd " ` echo $ { UNAME_RELEASE } | sed - e ' s / [ - ( ] . * / / ' ` " - gnueabihf <nl> + fi <nl> exit ; ; <nl> * : FreeBSD : * : * ) <nl> UNAME_PROCESSOR = ` / usr / bin / uname - p ` <nl> - case $ { UNAME_PROCESSOR } in <nl> + case " $ UNAME_PROCESSOR " in <nl> amd64 ) <nl> - echo x86_64 - unknown - freebsd ` echo $ { UNAME_RELEASE } | sed - e ' s / [ - ( ] . * / / ' ` ; ; <nl> - * ) <nl> - echo $ { UNAME_PROCESSOR } - unknown - freebsd ` echo $ { UNAME_RELEASE } | sed - e ' s / [ - ( ] . * / / ' ` ; ; <nl> + UNAME_PROCESSOR = x86_64 ; ; <nl> + i386 ) <nl> + UNAME_PROCESSOR = i586 ; ; <nl> esac <nl> + echo " $ UNAME_PROCESSOR " - unknown - freebsd " ` echo " $ UNAME_RELEASE " | sed - e ' s / [ - ( ] . * / / ' ` " <nl> exit ; ; <nl> i * : CYGWIN * : * ) <nl> - echo $ { UNAME_MACHINE } - pc - cygwin <nl> + echo " $ UNAME_MACHINE " - pc - cygwin <nl> exit ; ; <nl> * : MINGW64 * : * ) <nl> - echo $ { UNAME_MACHINE } - pc - mingw64 <nl> + echo " $ UNAME_MACHINE " - pc - mingw64 <nl> exit ; ; <nl> * : MINGW * : * ) <nl> - echo $ { UNAME_MACHINE } - pc - mingw32 <nl> + echo " $ UNAME_MACHINE " - pc - mingw32 <nl> exit ; ; <nl> * : MSYS * : * ) <nl> - echo $ { UNAME_MACHINE } - pc - msys <nl> - exit ; ; <nl> - i * : windows32 * : * ) <nl> - # uname - m includes " - pc " on this system . <nl> - echo $ { UNAME_MACHINE } - mingw32 <nl> + echo " $ UNAME_MACHINE " - pc - msys <nl> exit ; ; <nl> i * : PW * : * ) <nl> - echo $ { UNAME_MACHINE } - pc - pw32 <nl> + echo " $ UNAME_MACHINE " - pc - pw32 <nl> exit ; ; <nl> * : Interix * : * ) <nl> - case $ { UNAME_MACHINE } in <nl> + case " $ UNAME_MACHINE " in <nl> x86 ) <nl> - echo i586 - pc - interix $ { UNAME_RELEASE } <nl> + echo i586 - pc - interix " $ UNAME_RELEASE " <nl> exit ; ; <nl> authenticamd | genuineintel | EM64T ) <nl> - echo x86_64 - unknown - interix $ { UNAME_RELEASE } <nl> + echo x86_64 - unknown - interix " $ UNAME_RELEASE " <nl> exit ; ; <nl> IA64 ) <nl> - echo ia64 - unknown - interix $ { UNAME_RELEASE } <nl> + echo ia64 - unknown - interix " $ UNAME_RELEASE " <nl> exit ; ; <nl> esac ; ; <nl> - [ 345 ] 86 : Windows_95 : * | [ 345 ] 86 : Windows_98 : * | [ 345 ] 86 : Windows_NT : * ) <nl> - echo i $ { UNAME_MACHINE } - pc - mks <nl> - exit ; ; <nl> - 8664 : Windows_NT : * ) <nl> - echo x86_64 - pc - mks <nl> - exit ; ; <nl> - i * : Windows_NT * : * | Pentium * : Windows_NT * : * ) <nl> - # How do we know it ' s Interix rather than the generic POSIX subsystem ? <nl> - # It also conflicts with pre - 2 . 0 versions of AT & T UWIN . Should we <nl> - # UNAME_MACHINE based on the output of uname instead of i386 ? <nl> - echo i586 - pc - interix <nl> - exit ; ; <nl> i * : UWIN * : * ) <nl> - echo $ { UNAME_MACHINE } - pc - uwin <nl> + echo " $ UNAME_MACHINE " - pc - uwin <nl> exit ; ; <nl> amd64 : CYGWIN * : * : * | x86_64 : CYGWIN * : * : * ) <nl> - echo x86_64 - unknown - cygwin <nl> - exit ; ; <nl> - p * : CYGWIN * : * ) <nl> - echo powerpcle - unknown - cygwin <nl> + echo x86_64 - pc - cygwin <nl> exit ; ; <nl> prep * : SunOS : 5 . * : * ) <nl> - echo powerpcle - unknown - solaris2 ` echo $ { UNAME_RELEASE } | sed - e ' s / [ ^ . ] * / / ' ` <nl> + echo powerpcle - unknown - solaris2 " ` echo " $ UNAME_RELEASE " | sed - e ' s / [ ^ . ] * / / ' ` " <nl> exit ; ; <nl> * : GNU : * : * ) <nl> # the GNU system <nl> - echo ` echo $ { UNAME_MACHINE } | sed - e ' s , [ - / ] . * $ , , ' ` - unknown - $ { LIBC } ` echo $ { UNAME_RELEASE } | sed - e ' s , / . * $ , , ' ` <nl> + echo " ` echo " $ UNAME_MACHINE " | sed - e ' s , [ - / ] . * $ , , ' ` - unknown - $ LIBC ` echo " $ UNAME_RELEASE " | sed - e ' s , / . * $ , , ' ` " <nl> exit ; ; <nl> * : GNU / * : * : * ) <nl> # other systems with GNU libc and userland <nl> - echo $ { UNAME_MACHINE } - unknown - ` echo $ { UNAME_SYSTEM } | sed ' s , ^ [ ^ / ] * / , , ' | tr ' [ A - Z ] ' ' [ a - z ] ' ` ` echo $ { UNAME_RELEASE } | sed - e ' s / [ - ( ] . * / / ' ` - $ { LIBC } <nl> + echo " $ UNAME_MACHINE - unknown - ` echo " $ UNAME_SYSTEM " | sed ' s , ^ [ ^ / ] * / , , ' | tr " [ : upper : ] " " [ : lower : ] " ` ` echo " $ UNAME_RELEASE " | sed - e ' s / [ - ( ] . * / / ' ` - $ LIBC " <nl> exit ; ; <nl> - i * 86 : Minix : * : * ) <nl> - echo $ { UNAME_MACHINE } - pc - minix <nl> + * : Minix : * : * ) <nl> + echo " $ UNAME_MACHINE " - unknown - minix <nl> exit ; ; <nl> aarch64 : Linux : * : * ) <nl> - echo $ { UNAME_MACHINE } - unknown - linux - $ { LIBC } <nl> + echo " $ UNAME_MACHINE " - unknown - linux - " $ LIBC " <nl> exit ; ; <nl> aarch64_be : Linux : * : * ) <nl> UNAME_MACHINE = aarch64_be <nl> - echo $ { UNAME_MACHINE } - unknown - linux - $ { LIBC } <nl> + echo " $ UNAME_MACHINE " - unknown - linux - " $ LIBC " <nl> exit ; ; <nl> alpha : Linux : * : * ) <nl> case ` sed - n ' / ^ cpu model / s / ^ . * : \ ( . * \ ) / \ 1 / p ' < / proc / cpuinfo ` in <nl> EOF <nl> EV68 * ) UNAME_MACHINE = alphaev68 ; ; <nl> esac <nl> objdump - - private - headers / bin / sh | grep - q ld . so . 1 <nl> - if test " $ ? " = 0 ; then LIBC = " gnulibc1 " ; fi <nl> - echo $ { UNAME_MACHINE } - unknown - linux - $ { LIBC } <nl> + if test " $ ? " = 0 ; then LIBC = gnulibc1 ; fi <nl> + echo " $ UNAME_MACHINE " - unknown - linux - " $ LIBC " <nl> exit ; ; <nl> arc : Linux : * : * | arceb : Linux : * : * ) <nl> - echo $ { UNAME_MACHINE } - unknown - linux - $ { LIBC } <nl> + echo " $ UNAME_MACHINE " - unknown - linux - " $ LIBC " <nl> exit ; ; <nl> arm * : Linux : * : * ) <nl> - eval $ set_cc_for_build <nl> + set_cc_for_build <nl> if echo __ARM_EABI__ | $ CC_FOR_BUILD - E - 2 > / dev / null \ <nl> | grep - q __ARM_EABI__ <nl> then <nl> - echo $ { UNAME_MACHINE } - unknown - linux - $ { LIBC } <nl> + echo " $ UNAME_MACHINE " - unknown - linux - " $ LIBC " <nl> else <nl> if echo __ARM_PCS_VFP | $ CC_FOR_BUILD - E - 2 > / dev / null \ <nl> | grep - q __ARM_PCS_VFP <nl> then <nl> - echo $ { UNAME_MACHINE } - unknown - linux - $ { LIBC } eabi <nl> + echo " $ UNAME_MACHINE " - unknown - linux - " $ LIBC " eabi <nl> else <nl> - echo $ { UNAME_MACHINE } - unknown - linux - $ { LIBC } eabihf <nl> + echo " $ UNAME_MACHINE " - unknown - linux - " $ LIBC " eabihf <nl> fi <nl> fi <nl> exit ; ; <nl> avr32 * : Linux : * : * ) <nl> - echo $ { UNAME_MACHINE } - unknown - linux - $ { LIBC } <nl> + echo " $ UNAME_MACHINE " - unknown - linux - " $ LIBC " <nl> exit ; ; <nl> cris : Linux : * : * ) <nl> - echo $ { UNAME_MACHINE } - axis - linux - $ { LIBC } <nl> + echo " $ UNAME_MACHINE " - axis - linux - " $ LIBC " <nl> exit ; ; <nl> crisv32 : Linux : * : * ) <nl> - echo $ { UNAME_MACHINE } - axis - linux - $ { LIBC } <nl> + echo " $ UNAME_MACHINE " - axis - linux - " $ LIBC " <nl> + exit ; ; <nl> + e2k : Linux : * : * ) <nl> + echo " $ UNAME_MACHINE " - unknown - linux - " $ LIBC " <nl> exit ; ; <nl> frv : Linux : * : * ) <nl> - echo $ { UNAME_MACHINE } - unknown - linux - $ { LIBC } <nl> + echo " $ UNAME_MACHINE " - unknown - linux - " $ LIBC " <nl> exit ; ; <nl> hexagon : Linux : * : * ) <nl> - echo $ { UNAME_MACHINE } - unknown - linux - $ { LIBC } <nl> + echo " $ UNAME_MACHINE " - unknown - linux - " $ LIBC " <nl> exit ; ; <nl> i * 86 : Linux : * : * ) <nl> - echo $ { UNAME_MACHINE } - pc - linux - $ { LIBC } <nl> + echo " $ UNAME_MACHINE " - pc - linux - " $ LIBC " <nl> exit ; ; <nl> ia64 : Linux : * : * ) <nl> - echo $ { UNAME_MACHINE } - unknown - linux - $ { LIBC } <nl> + echo " $ UNAME_MACHINE " - unknown - linux - " $ LIBC " <nl> + exit ; ; <nl> + k1om : Linux : * : * ) <nl> + echo " $ UNAME_MACHINE " - unknown - linux - " $ LIBC " <nl> exit ; ; <nl> m32r * : Linux : * : * ) <nl> - echo $ { UNAME_MACHINE } - unknown - linux - $ { LIBC } <nl> + echo " $ UNAME_MACHINE " - unknown - linux - " $ LIBC " <nl> exit ; ; <nl> m68 * : Linux : * : * ) <nl> - echo $ { UNAME_MACHINE } - unknown - linux - $ { LIBC } <nl> + echo " $ UNAME_MACHINE " - unknown - linux - " $ LIBC " <nl> exit ; ; <nl> mips : Linux : * : * | mips64 : Linux : * : * ) <nl> - eval $ set_cc_for_build <nl> - sed ' s / ^ / / ' < < EOF > $ dummy . c <nl> + set_cc_for_build <nl> + IS_GLIBC = 0 <nl> + test x " $ { LIBC } " = xgnu & & IS_GLIBC = 1 <nl> + sed ' s / ^ / / ' < < EOF > " $ dummy . c " <nl> # undef CPU <nl> - # undef $ { UNAME_MACHINE } <nl> - # undef $ { UNAME_MACHINE } el <nl> + # undef mips <nl> + # undef mipsel <nl> + # undef mips64 <nl> + # undef mips64el <nl> + # if $ { IS_GLIBC } & & defined ( _ABI64 ) <nl> + LIBCABI = gnuabi64 <nl> + # else <nl> + # if $ { IS_GLIBC } & & defined ( _ABIN32 ) <nl> + LIBCABI = gnuabin32 <nl> + # else <nl> + LIBCABI = $ { LIBC } <nl> + # endif <nl> + # endif <nl> + <nl> + # if $ { IS_GLIBC } & & defined ( __mips64 ) & & defined ( __mips_isa_rev ) & & __mips_isa_rev > = 6 <nl> + CPU = mipsisa64r6 <nl> + # else <nl> + # if $ { IS_GLIBC } & & ! defined ( __mips64 ) & & defined ( __mips_isa_rev ) & & __mips_isa_rev > = 6 <nl> + CPU = mipsisa32r6 <nl> + # else <nl> + # if defined ( __mips64 ) <nl> + CPU = mips64 <nl> + # else <nl> + CPU = mips <nl> + # endif <nl> + # endif <nl> + # endif <nl> + <nl> # if defined ( __MIPSEL__ ) | | defined ( __MIPSEL ) | | defined ( _MIPSEL ) | | defined ( MIPSEL ) <nl> - CPU = $ { UNAME_MACHINE } el <nl> + MIPS_ENDIAN = el <nl> # else <nl> # if defined ( __MIPSEB__ ) | | defined ( __MIPSEB ) | | defined ( _MIPSEB ) | | defined ( MIPSEB ) <nl> - CPU = $ { UNAME_MACHINE } <nl> + MIPS_ENDIAN = <nl> # else <nl> - CPU = <nl> + MIPS_ENDIAN = <nl> # endif <nl> # endif <nl> EOF <nl> - eval ` $ CC_FOR_BUILD - E $ dummy . c 2 > / dev / null | grep ' ^ CPU ' ` <nl> - test x " $ { CPU } " ! = x & & { echo " $ { CPU } - unknown - linux - $ { LIBC } " ; exit ; } <nl> + eval " ` $ CC_FOR_BUILD - E " $ dummy . c " 2 > / dev / null | grep ' ^ CPU \ | ^ MIPS_ENDIAN \ | ^ LIBCABI ' ` " <nl> + test " x $ CPU " ! = x & & { echo " $ CPU $ { MIPS_ENDIAN } - unknown - linux - $ LIBCABI " ; exit ; } <nl> ; ; <nl> + mips64el : Linux : * : * ) <nl> + echo " $ UNAME_MACHINE " - unknown - linux - " $ LIBC " <nl> + exit ; ; <nl> openrisc * : Linux : * : * ) <nl> - echo or1k - unknown - linux - $ { LIBC } <nl> + echo or1k - unknown - linux - " $ LIBC " <nl> exit ; ; <nl> or32 : Linux : * : * | or1k * : Linux : * : * ) <nl> - echo $ { UNAME_MACHINE } - unknown - linux - $ { LIBC } <nl> + echo " $ UNAME_MACHINE " - unknown - linux - " $ LIBC " <nl> exit ; ; <nl> padre : Linux : * : * ) <nl> - echo sparc - unknown - linux - $ { LIBC } <nl> + echo sparc - unknown - linux - " $ LIBC " <nl> exit ; ; <nl> parisc64 : Linux : * : * | hppa64 : Linux : * : * ) <nl> - echo hppa64 - unknown - linux - $ { LIBC } <nl> + echo hppa64 - unknown - linux - " $ LIBC " <nl> exit ; ; <nl> parisc : Linux : * : * | hppa : Linux : * : * ) <nl> # Look for CPU level <nl> case ` grep ' ^ cpu [ ^ a - z ] * : ' / proc / cpuinfo 2 > / dev / null | cut - d ' ' - f2 ` in <nl> - PA7 * ) echo hppa1 . 1 - unknown - linux - $ { LIBC } ; ; <nl> - PA8 * ) echo hppa2 . 0 - unknown - linux - $ { LIBC } ; ; <nl> - * ) echo hppa - unknown - linux - $ { LIBC } ; ; <nl> + PA7 * ) echo hppa1 . 1 - unknown - linux - " $ LIBC " ; ; <nl> + PA8 * ) echo hppa2 . 0 - unknown - linux - " $ LIBC " ; ; <nl> + * ) echo hppa - unknown - linux - " $ LIBC " ; ; <nl> esac <nl> exit ; ; <nl> ppc64 : Linux : * : * ) <nl> - echo powerpc64 - unknown - linux - $ { LIBC } <nl> + echo powerpc64 - unknown - linux - " $ LIBC " <nl> exit ; ; <nl> ppc : Linux : * : * ) <nl> - echo powerpc - unknown - linux - $ { LIBC } <nl> + echo powerpc - unknown - linux - " $ LIBC " <nl> exit ; ; <nl> ppc64le : Linux : * : * ) <nl> - echo powerpc64le - unknown - linux - $ { LIBC } <nl> + echo powerpc64le - unknown - linux - " $ LIBC " <nl> exit ; ; <nl> ppcle : Linux : * : * ) <nl> - echo powerpcle - unknown - linux - $ { LIBC } <nl> + echo powerpcle - unknown - linux - " $ LIBC " <nl> + exit ; ; <nl> + riscv32 : Linux : * : * | riscv64 : Linux : * : * ) <nl> + echo " $ UNAME_MACHINE " - unknown - linux - " $ LIBC " <nl> exit ; ; <nl> s390 : Linux : * : * | s390x : Linux : * : * ) <nl> - echo $ { UNAME_MACHINE } - ibm - linux - $ { LIBC } <nl> + echo " $ UNAME_MACHINE " - ibm - linux - " $ LIBC " <nl> exit ; ; <nl> sh64 * : Linux : * : * ) <nl> - echo $ { UNAME_MACHINE } - unknown - linux - $ { LIBC } <nl> + echo " $ UNAME_MACHINE " - unknown - linux - " $ LIBC " <nl> exit ; ; <nl> sh * : Linux : * : * ) <nl> - echo $ { UNAME_MACHINE } - unknown - linux - $ { LIBC } <nl> + echo " $ UNAME_MACHINE " - unknown - linux - " $ LIBC " <nl> exit ; ; <nl> sparc : Linux : * : * | sparc64 : Linux : * : * ) <nl> - echo $ { UNAME_MACHINE } - unknown - linux - $ { LIBC } <nl> + echo " $ UNAME_MACHINE " - unknown - linux - " $ LIBC " <nl> exit ; ; <nl> tile * : Linux : * : * ) <nl> - echo $ { UNAME_MACHINE } - unknown - linux - $ { LIBC } <nl> + echo " $ UNAME_MACHINE " - unknown - linux - " $ LIBC " <nl> exit ; ; <nl> vax : Linux : * : * ) <nl> - echo $ { UNAME_MACHINE } - dec - linux - $ { LIBC } <nl> + echo " $ UNAME_MACHINE " - dec - linux - " $ LIBC " <nl> exit ; ; <nl> x86_64 : Linux : * : * ) <nl> - echo $ { UNAME_MACHINE } - unknown - linux - $ { LIBC } <nl> + echo " $ UNAME_MACHINE " - pc - linux - " $ LIBC " <nl> exit ; ; <nl> xtensa * : Linux : * : * ) <nl> - echo $ { UNAME_MACHINE } - unknown - linux - $ { LIBC } <nl> + echo " $ UNAME_MACHINE " - unknown - linux - " $ LIBC " <nl> exit ; ; <nl> i * 86 : DYNIX / ptx : 4 * : * ) <nl> # ptx 4 . 0 does uname - s correctly , with DYNIX / ptx in there . <nl> EOF <nl> # I am not positive that other SVR4 systems won ' t match this , <nl> # I just have to hope . - - rms . <nl> # Use sysv4 . 2uw . . . so that sysv4 * matches it . <nl> - echo $ { UNAME_MACHINE } - pc - sysv4 . 2uw $ { UNAME_VERSION } <nl> + echo " $ UNAME_MACHINE " - pc - sysv4 . 2uw " $ UNAME_VERSION " <nl> exit ; ; <nl> i * 86 : OS / 2 : * : * ) <nl> # If we were able to find ` uname ' , then EMX Unix compatibility <nl> # is probably installed . <nl> - echo $ { UNAME_MACHINE } - pc - os2 - emx <nl> + echo " $ UNAME_MACHINE " - pc - os2 - emx <nl> exit ; ; <nl> i * 86 : XTS - 300 : * : STOP ) <nl> - echo $ { UNAME_MACHINE } - unknown - stop <nl> + echo " $ UNAME_MACHINE " - unknown - stop <nl> exit ; ; <nl> i * 86 : atheos : * : * ) <nl> - echo $ { UNAME_MACHINE } - unknown - atheos <nl> + echo " $ UNAME_MACHINE " - unknown - atheos <nl> exit ; ; <nl> i * 86 : syllable : * : * ) <nl> - echo $ { UNAME_MACHINE } - pc - syllable <nl> + echo " $ UNAME_MACHINE " - pc - syllable <nl> exit ; ; <nl> i * 86 : LynxOS : 2 . * : * | i * 86 : LynxOS : 3 . [ 01 ] * : * | i * 86 : LynxOS : 4 . [ 02 ] * : * ) <nl> - echo i386 - unknown - lynxos $ { UNAME_RELEASE } <nl> + echo i386 - unknown - lynxos " $ UNAME_RELEASE " <nl> exit ; ; <nl> i * 86 : * DOS : * : * ) <nl> - echo $ { UNAME_MACHINE } - pc - msdosdjgpp <nl> + echo " $ UNAME_MACHINE " - pc - msdosdjgpp <nl> exit ; ; <nl> - i * 86 : * : 4 . * : * | i * 86 : SYSTEM_V : 4 . * : * ) <nl> - UNAME_REL = ` echo $ { UNAME_RELEASE } | sed ' s / \ / MP $ / / ' ` <nl> + i * 86 : * : 4 . * : * ) <nl> + UNAME_REL = ` echo " $ UNAME_RELEASE " | sed ' s / \ / MP $ / / ' ` <nl> if grep Novell / usr / include / link . h > / dev / null 2 > / dev / null ; then <nl> - echo $ { UNAME_MACHINE } - univel - sysv $ { UNAME_REL } <nl> + echo " $ UNAME_MACHINE " - univel - sysv " $ UNAME_REL " <nl> else <nl> - echo $ { UNAME_MACHINE } - pc - sysv $ { UNAME_REL } <nl> + echo " $ UNAME_MACHINE " - pc - sysv " $ UNAME_REL " <nl> fi <nl> exit ; ; <nl> i * 86 : * : 5 : [ 678 ] * ) <nl> EOF <nl> * Pentium ) UNAME_MACHINE = i586 ; ; <nl> * Pent * | * Celeron ) UNAME_MACHINE = i686 ; ; <nl> esac <nl> - echo $ { UNAME_MACHINE } - unknown - sysv $ { UNAME_RELEASE } $ { UNAME_SYSTEM } $ { UNAME_VERSION } <nl> + echo " $ UNAME_MACHINE - unknown - sysv $ { UNAME_RELEASE } $ { UNAME_SYSTEM } $ { UNAME_VERSION } " <nl> exit ; ; <nl> i * 86 : * : 3 . 2 : * ) <nl> if test - f / usr / options / cb . name ; then <nl> UNAME_REL = ` sed - n ' s / . * Version / / p ' < / usr / options / cb . name ` <nl> - echo $ { UNAME_MACHINE } - pc - isc $ UNAME_REL <nl> + echo " $ UNAME_MACHINE " - pc - isc " $ UNAME_REL " <nl> elif / bin / uname - X 2 > / dev / null > / dev / null ; then <nl> UNAME_REL = ` ( / bin / uname - X | grep Release | sed - e ' s / . * = / / ' ) ` <nl> ( / bin / uname - X | grep i80486 > / dev / null ) & & UNAME_MACHINE = i486 <nl> EOF <nl> & & UNAME_MACHINE = i686 <nl> ( / bin / uname - X | grep ' ^ Machine . * Pentium Pro ' > / dev / null ) \ <nl> & & UNAME_MACHINE = i686 <nl> - echo $ { UNAME_MACHINE } - pc - sco $ UNAME_REL <nl> + echo " $ UNAME_MACHINE " - pc - sco " $ UNAME_REL " <nl> else <nl> - echo $ { UNAME_MACHINE } - pc - sysv32 <nl> + echo " $ UNAME_MACHINE " - pc - sysv32 <nl> fi <nl> exit ; ; <nl> pc : * : * : * ) <nl> EOF <nl> # uname - m prints for DJGPP always ' pc ' , but it prints nothing about <nl> # the processor , so we play safe by assuming i586 . <nl> # Note : whatever this is , it MUST be the same as what config . sub <nl> - # prints for the " djgpp " host , or else GDB configury will decide that <nl> + # prints for the " djgpp " host , or else GDB configure will decide that <nl> # this is a cross - build . <nl> echo i586 - pc - msdosdjgpp <nl> exit ; ; <nl> EOF <nl> exit ; ; <nl> i860 : * : 4 . * : * ) # i860 - SVR4 <nl> if grep Stardent / usr / include / sys / uadmin . h > / dev / null 2 > & 1 ; then <nl> - echo i860 - stardent - sysv $ { UNAME_RELEASE } # Stardent Vistra i860 - SVR4 <nl> + echo i860 - stardent - sysv " $ UNAME_RELEASE " # Stardent Vistra i860 - SVR4 <nl> else # Add other i860 - SVR4 vendors below as they are discovered . <nl> - echo i860 - unknown - sysv $ { UNAME_RELEASE } # Unknown i860 - SVR4 <nl> + echo i860 - unknown - sysv " $ UNAME_RELEASE " # Unknown i860 - SVR4 <nl> fi <nl> exit ; ; <nl> mini * : CTIX : SYS * 5 : * ) <nl> EOF <nl> test - r / etc / . relid \ <nl> & & OS_REL = . ` sed - n ' s / [ ^ ] * [ ^ ] * \ ( [ 0 - 9 ] [ 0 - 9 ] \ ) . * / \ 1 / p ' < / etc / . relid ` <nl> / bin / uname - p 2 > / dev / null | grep 86 > / dev / null \ <nl> - & & { echo i486 - ncr - sysv4 . 3 $ { OS_REL } ; exit ; } <nl> + & & { echo i486 - ncr - sysv4 . 3 " $ OS_REL " ; exit ; } <nl> / bin / uname - p 2 > / dev / null | / bin / grep entium > / dev / null \ <nl> - & & { echo i586 - ncr - sysv4 . 3 $ { OS_REL } ; exit ; } ; ; <nl> + & & { echo i586 - ncr - sysv4 . 3 " $ OS_REL " ; exit ; } ; ; <nl> 3 [ 34 ] ? ? : * : 4 . 0 : * | 3 [ 34 ] ? ? , * : * : 4 . 0 : * ) <nl> / bin / uname - p 2 > / dev / null | grep 86 > / dev / null \ <nl> & & { echo i486 - ncr - sysv4 ; exit ; } ; ; <nl> EOF <nl> test - r / etc / . relid \ <nl> & & OS_REL = . ` sed - n ' s / [ ^ ] * [ ^ ] * \ ( [ 0 - 9 ] [ 0 - 9 ] \ ) . * / \ 1 / p ' < / etc / . relid ` <nl> / bin / uname - p 2 > / dev / null | grep 86 > / dev / null \ <nl> - & & { echo i486 - ncr - sysv4 . 3 $ { OS_REL } ; exit ; } <nl> + & & { echo i486 - ncr - sysv4 . 3 " $ OS_REL " ; exit ; } <nl> / bin / uname - p 2 > / dev / null | / bin / grep entium > / dev / null \ <nl> - & & { echo i586 - ncr - sysv4 . 3 $ { OS_REL } ; exit ; } <nl> + & & { echo i586 - ncr - sysv4 . 3 " $ OS_REL " ; exit ; } <nl> / bin / uname - p 2 > / dev / null | / bin / grep pteron > / dev / null \ <nl> - & & { echo i586 - ncr - sysv4 . 3 $ { OS_REL } ; exit ; } ; ; <nl> + & & { echo i586 - ncr - sysv4 . 3 " $ OS_REL " ; exit ; } ; ; <nl> m68 * : LynxOS : 2 . * : * | m68 * : LynxOS : 3 . 0 * : * ) <nl> - echo m68k - unknown - lynxos $ { UNAME_RELEASE } <nl> + echo m68k - unknown - lynxos " $ UNAME_RELEASE " <nl> exit ; ; <nl> mc68030 : UNIX_System_V : 4 . * : * ) <nl> echo m68k - atari - sysv4 <nl> exit ; ; <nl> TSUNAMI : LynxOS : 2 . * : * ) <nl> - echo sparc - unknown - lynxos $ { UNAME_RELEASE } <nl> + echo sparc - unknown - lynxos " $ UNAME_RELEASE " <nl> exit ; ; <nl> rs6000 : LynxOS : 2 . * : * ) <nl> - echo rs6000 - unknown - lynxos $ { UNAME_RELEASE } <nl> + echo rs6000 - unknown - lynxos " $ UNAME_RELEASE " <nl> exit ; ; <nl> PowerPC : LynxOS : 2 . * : * | PowerPC : LynxOS : 3 . [ 01 ] * : * | PowerPC : LynxOS : 4 . [ 02 ] * : * ) <nl> - echo powerpc - unknown - lynxos $ { UNAME_RELEASE } <nl> + echo powerpc - unknown - lynxos " $ UNAME_RELEASE " <nl> exit ; ; <nl> SM [ BE ] S : UNIX_SV : * : * ) <nl> - echo mips - dde - sysv $ { UNAME_RELEASE } <nl> + echo mips - dde - sysv " $ UNAME_RELEASE " <nl> exit ; ; <nl> RM * : ReliantUNIX - * : * : * ) <nl> echo mips - sni - sysv4 <nl> EOF <nl> * : SINIX - * : * : * ) <nl> if uname - p 2 > / dev / null > / dev / null ; then <nl> UNAME_MACHINE = ` ( uname - p ) 2 > / dev / null ` <nl> - echo $ { UNAME_MACHINE } - sni - sysv4 <nl> + echo " $ UNAME_MACHINE " - sni - sysv4 <nl> else <nl> echo ns32k - sni - sysv <nl> fi <nl> EOF <nl> exit ; ; <nl> i * 86 : VOS : * : * ) <nl> # From Paul . Green @ stratus . com . <nl> - echo $ { UNAME_MACHINE } - stratus - vos <nl> + echo " $ UNAME_MACHINE " - stratus - vos <nl> exit ; ; <nl> * : VOS : * : * ) <nl> # From Paul . Green @ stratus . com . <nl> echo hppa1 . 1 - stratus - vos <nl> exit ; ; <nl> mc68 * : A / UX : * : * ) <nl> - echo m68k - apple - aux $ { UNAME_RELEASE } <nl> + echo m68k - apple - aux " $ UNAME_RELEASE " <nl> exit ; ; <nl> news * : NEWS - OS : 6 * : * ) <nl> echo mips - sony - newsos6 <nl> exit ; ; <nl> R [ 34 ] 000 : * System_V * : * : * | R4000 : UNIX_SYSV : * : * | R * 000 : UNIX_SV : * : * ) <nl> if [ - d / usr / nec ] ; then <nl> - echo mips - nec - sysv $ { UNAME_RELEASE } <nl> + echo mips - nec - sysv " $ UNAME_RELEASE " <nl> else <nl> - echo mips - unknown - sysv $ { UNAME_RELEASE } <nl> + echo mips - unknown - sysv " $ UNAME_RELEASE " <nl> fi <nl> exit ; ; <nl> BeBox : BeOS : * : * ) # BeOS running on hardware made by Be , PPC only . <nl> EOF <nl> echo x86_64 - unknown - haiku <nl> exit ; ; <nl> SX - 4 : SUPER - UX : * : * ) <nl> - echo sx4 - nec - superux $ { UNAME_RELEASE } <nl> + echo sx4 - nec - superux " $ UNAME_RELEASE " <nl> exit ; ; <nl> SX - 5 : SUPER - UX : * : * ) <nl> - echo sx5 - nec - superux $ { UNAME_RELEASE } <nl> + echo sx5 - nec - superux " $ UNAME_RELEASE " <nl> exit ; ; <nl> SX - 6 : SUPER - UX : * : * ) <nl> - echo sx6 - nec - superux $ { UNAME_RELEASE } <nl> + echo sx6 - nec - superux " $ UNAME_RELEASE " <nl> exit ; ; <nl> SX - 7 : SUPER - UX : * : * ) <nl> - echo sx7 - nec - superux $ { UNAME_RELEASE } <nl> + echo sx7 - nec - superux " $ UNAME_RELEASE " <nl> exit ; ; <nl> SX - 8 : SUPER - UX : * : * ) <nl> - echo sx8 - nec - superux $ { UNAME_RELEASE } <nl> + echo sx8 - nec - superux " $ UNAME_RELEASE " <nl> exit ; ; <nl> SX - 8R : SUPER - UX : * : * ) <nl> - echo sx8r - nec - superux $ { UNAME_RELEASE } <nl> + echo sx8r - nec - superux " $ UNAME_RELEASE " <nl> + exit ; ; <nl> + SX - ACE : SUPER - UX : * : * ) <nl> + echo sxace - nec - superux " $ UNAME_RELEASE " <nl> exit ; ; <nl> Power * : Rhapsody : * : * ) <nl> - echo powerpc - apple - rhapsody $ { UNAME_RELEASE } <nl> + echo powerpc - apple - rhapsody " $ UNAME_RELEASE " <nl> exit ; ; <nl> * : Rhapsody : * : * ) <nl> - echo $ { UNAME_MACHINE } - apple - rhapsody $ { UNAME_RELEASE } <nl> + echo " $ UNAME_MACHINE " - apple - rhapsody " $ UNAME_RELEASE " <nl> exit ; ; <nl> * : Darwin : * : * ) <nl> - UNAME_PROCESSOR = ` uname - p ` | | UNAME_PROCESSOR = unknown <nl> - eval $ set_cc_for_build <nl> - if test " $ UNAME_PROCESSOR " = unknown ; then <nl> - UNAME_PROCESSOR = powerpc <nl> + UNAME_PROCESSOR = ` uname - p ` <nl> + case $ UNAME_PROCESSOR in <nl> + unknown ) UNAME_PROCESSOR = powerpc ; ; <nl> + esac <nl> + if command - v xcode - select > / dev / null 2 > / dev / null & & \ <nl> + ! xcode - select - - print - path > / dev / null 2 > / dev / null ; then <nl> + # Avoid executing cc if there is no toolchain installed as <nl> + # cc will be a stub that puts up a graphical alert <nl> + # prompting the user to install developer tools . <nl> + CC_FOR_BUILD = no_compiler_found <nl> + else <nl> + set_cc_for_build <nl> fi <nl> - if test ` echo " $ UNAME_RELEASE " | sed - e ' s / \ . . * / / ' ` - le 10 ; then <nl> - if [ " $ CC_FOR_BUILD " ! = ' no_compiler_found ' ] ; then <nl> - if ( echo ' # ifdef __LP64__ ' ; echo IS_64BIT_ARCH ; echo ' # endif ' ) | \ <nl> - ( CCOPTS = $ CC_FOR_BUILD - E - 2 > / dev / null ) | \ <nl> - grep IS_64BIT_ARCH > / dev / null <nl> - then <nl> - case $ UNAME_PROCESSOR in <nl> - i386 ) UNAME_PROCESSOR = x86_64 ; ; <nl> - powerpc ) UNAME_PROCESSOR = powerpc64 ; ; <nl> - esac <nl> - fi <nl> + if [ " $ CC_FOR_BUILD " ! = no_compiler_found ] ; then <nl> + if ( echo ' # ifdef __LP64__ ' ; echo IS_64BIT_ARCH ; echo ' # endif ' ) | \ <nl> + ( CCOPTS = " " $ CC_FOR_BUILD - E - 2 > / dev / null ) | \ <nl> + grep IS_64BIT_ARCH > / dev / null <nl> + then <nl> + case $ UNAME_PROCESSOR in <nl> + i386 ) UNAME_PROCESSOR = x86_64 ; ; <nl> + powerpc ) UNAME_PROCESSOR = powerpc64 ; ; <nl> + esac <nl> + fi <nl> + # On 10 . 4 - 10 . 6 one might compile for PowerPC via gcc - arch ppc <nl> + if ( echo ' # ifdef __POWERPC__ ' ; echo IS_PPC ; echo ' # endif ' ) | \ <nl> + ( CCOPTS = " " $ CC_FOR_BUILD - E - 2 > / dev / null ) | \ <nl> + grep IS_PPC > / dev / null <nl> + then <nl> + UNAME_PROCESSOR = powerpc <nl> fi <nl> elif test " $ UNAME_PROCESSOR " = i386 ; then <nl> - # Avoid executing cc on OS X 10 . 9 , as it ships with a stub <nl> - # that puts up a graphical alert prompting to install <nl> - # developer tools . Any system running Mac OS X 10 . 7 or <nl> - # later ( Darwin 11 and later ) is required to have a 64 - bit <nl> - # processor . This is not true of the ARM version of Darwin <nl> - # that Apple uses in portable devices . <nl> - UNAME_PROCESSOR = x86_64 <nl> + # uname - m returns i386 or x86_64 <nl> + UNAME_PROCESSOR = $ UNAME_MACHINE <nl> fi <nl> - echo $ { UNAME_PROCESSOR } - apple - darwin $ { UNAME_RELEASE } <nl> + echo " $ UNAME_PROCESSOR " - apple - darwin " $ UNAME_RELEASE " <nl> exit ; ; <nl> * : procnto * : * : * | * : QNX : [ 0123456789 ] * : * ) <nl> UNAME_PROCESSOR = ` uname - p ` <nl> - if test " $ UNAME_PROCESSOR " = " x86 " ; then <nl> + if test " $ UNAME_PROCESSOR " = x86 ; then <nl> UNAME_PROCESSOR = i386 <nl> UNAME_MACHINE = pc <nl> fi <nl> - echo $ { UNAME_PROCESSOR } - $ { UNAME_MACHINE } - nto - qnx $ { UNAME_RELEASE } <nl> + echo " $ UNAME_PROCESSOR " - " $ UNAME_MACHINE " - nto - qnx " $ UNAME_RELEASE " <nl> exit ; ; <nl> * : QNX : * : 4 * ) <nl> echo i386 - pc - qnx <nl> exit ; ; <nl> - NEO - ? : NONSTOP_KERNEL : * : * ) <nl> - echo neo - tandem - nsk $ { UNAME_RELEASE } <nl> + NEO - * : NONSTOP_KERNEL : * : * ) <nl> + echo neo - tandem - nsk " $ UNAME_RELEASE " <nl> exit ; ; <nl> NSE - * : NONSTOP_KERNEL : * : * ) <nl> - echo nse - tandem - nsk $ { UNAME_RELEASE } <nl> + echo nse - tandem - nsk " $ UNAME_RELEASE " <nl> + exit ; ; <nl> + NSR - * : NONSTOP_KERNEL : * : * ) <nl> + echo nsr - tandem - nsk " $ UNAME_RELEASE " <nl> + exit ; ; <nl> + NSV - * : NONSTOP_KERNEL : * : * ) <nl> + echo nsv - tandem - nsk " $ UNAME_RELEASE " <nl> exit ; ; <nl> - NSR - ? : NONSTOP_KERNEL : * : * ) <nl> - echo nsr - tandem - nsk $ { UNAME_RELEASE } <nl> + NSX - * : NONSTOP_KERNEL : * : * ) <nl> + echo nsx - tandem - nsk " $ UNAME_RELEASE " <nl> exit ; ; <nl> * : NonStop - UX : * : * ) <nl> echo mips - compaq - nonstopux <nl> EOF <nl> echo bs2000 - siemens - sysv <nl> exit ; ; <nl> DS / * : UNIX_System_V : * : * ) <nl> - echo $ { UNAME_MACHINE } - $ { UNAME_SYSTEM } - $ { UNAME_RELEASE } <nl> + echo " $ UNAME_MACHINE " - " $ UNAME_SYSTEM " - " $ UNAME_RELEASE " <nl> exit ; ; <nl> * : Plan9 : * : * ) <nl> # " uname - m " is not consistent , so use $ cputype instead . 386 <nl> # is converted to i386 for consistency with other x86 <nl> # operating systems . <nl> - if test " $ cputype " = " 386 " ; then <nl> + # shellcheck disable = SC2154 <nl> + if test " $ cputype " = 386 ; then <nl> UNAME_MACHINE = i386 <nl> else <nl> UNAME_MACHINE = " $ cputype " <nl> fi <nl> - echo $ { UNAME_MACHINE } - unknown - plan9 <nl> + echo " $ UNAME_MACHINE " - unknown - plan9 <nl> exit ; ; <nl> * : TOPS - 10 : * : * ) <nl> echo pdp10 - unknown - tops10 <nl> EOF <nl> echo pdp10 - unknown - its <nl> exit ; ; <nl> SEI : * : * : SEIUX ) <nl> - echo mips - sei - seiux $ { UNAME_RELEASE } <nl> + echo mips - sei - seiux " $ UNAME_RELEASE " <nl> exit ; ; <nl> * : DragonFly : * : * ) <nl> - echo $ { UNAME_MACHINE } - unknown - dragonfly ` echo $ { UNAME_RELEASE } | sed - e ' s / [ - ( ] . * / / ' ` <nl> + echo " $ UNAME_MACHINE " - unknown - dragonfly " ` echo " $ UNAME_RELEASE " | sed - e ' s / [ - ( ] . * / / ' ` " <nl> exit ; ; <nl> * : * VMS : * : * ) <nl> UNAME_MACHINE = ` ( uname - p ) 2 > / dev / null ` <nl> - case " $ { UNAME_MACHINE } " in <nl> + case " $ UNAME_MACHINE " in <nl> A * ) echo alpha - dec - vms ; exit ; ; <nl> I * ) echo ia64 - dec - vms ; exit ; ; <nl> V * ) echo vax - dec - vms ; exit ; ; <nl> EOF <nl> echo i386 - pc - xenix <nl> exit ; ; <nl> i * 86 : skyos : * : * ) <nl> - echo $ { UNAME_MACHINE } - pc - skyos ` echo $ { UNAME_RELEASE } ` | sed - e ' s / . * $ / / ' <nl> + echo " $ UNAME_MACHINE " - pc - skyos " ` echo " $ UNAME_RELEASE " | sed - e ' s / . * $ / / ' ` " <nl> exit ; ; <nl> i * 86 : rdos : * : * ) <nl> - echo $ { UNAME_MACHINE } - pc - rdos <nl> + echo " $ UNAME_MACHINE " - pc - rdos <nl> exit ; ; <nl> i * 86 : AROS : * : * ) <nl> - echo $ { UNAME_MACHINE } - pc - aros <nl> + echo " $ UNAME_MACHINE " - pc - aros <nl> exit ; ; <nl> x86_64 : VMkernel : * : * ) <nl> - echo $ { UNAME_MACHINE } - unknown - esx <nl> + echo " $ UNAME_MACHINE " - unknown - esx <nl> exit ; ; <nl> + amd64 : Isilon \ OneFS : * : * ) <nl> + echo x86_64 - unknown - onefs <nl> + exit ; ; <nl> + * : Unleashed : * : * ) <nl> + echo " $ UNAME_MACHINE " - unknown - unleashed " $ UNAME_RELEASE " <nl> + exit ; ; <nl> + esac <nl> + <nl> + # No uname command or uname output not recognized . <nl> + set_cc_for_build <nl> + cat > " $ dummy . c " < < EOF <nl> + # ifdef _SEQUENT_ <nl> + # include < sys / types . h > <nl> + # include < sys / utsname . h > <nl> + # endif <nl> + # if defined ( ultrix ) | | defined ( _ultrix ) | | defined ( __ultrix ) | | defined ( __ultrix__ ) <nl> + # if defined ( vax ) | | defined ( __vax ) | | defined ( __vax__ ) | | defined ( mips ) | | defined ( __mips ) | | defined ( __mips__ ) | | defined ( MIPS ) | | defined ( __MIPS__ ) <nl> + # include < signal . h > <nl> + # if defined ( _SIZE_T_ ) | | defined ( SIGLOST ) <nl> + # include < sys / utsname . h > <nl> + # endif <nl> + # endif <nl> + # endif <nl> + main ( ) <nl> + { <nl> + # if defined ( sony ) <nl> + # if defined ( MIPSEB ) <nl> + / * BFD wants " bsd " instead of " newsos " . Perhaps BFD should be changed , <nl> + I don ' t know . . . . * / <nl> + printf ( " mips - sony - bsd \ n " ) ; exit ( 0 ) ; <nl> + # else <nl> + # include < sys / param . h > <nl> + printf ( " m68k - sony - newsos % s \ n " , <nl> + # ifdef NEWSOS4 <nl> + " 4 " <nl> + # else <nl> + " " <nl> + # endif <nl> + ) ; exit ( 0 ) ; <nl> + # endif <nl> + # endif <nl> + <nl> + # if defined ( NeXT ) <nl> + # if ! defined ( __ARCHITECTURE__ ) <nl> + # define __ARCHITECTURE__ " m68k " <nl> + # endif <nl> + int version ; <nl> + version = ` ( hostinfo | sed - n ' s / . * NeXT Mach \ ( [ 0 - 9 ] * \ ) . * / \ 1 / p ' ) 2 > / dev / null ` ; <nl> + if ( version < 4 ) <nl> + printf ( " % s - next - nextstep % d \ n " , __ARCHITECTURE__ , version ) ; <nl> + else <nl> + printf ( " % s - next - openstep % d \ n " , __ARCHITECTURE__ , version ) ; <nl> + exit ( 0 ) ; <nl> + # endif <nl> + <nl> + # if defined ( MULTIMAX ) | | defined ( n16 ) <nl> + # if defined ( UMAXV ) <nl> + printf ( " ns32k - encore - sysv \ n " ) ; exit ( 0 ) ; <nl> + # else <nl> + # if defined ( CMU ) <nl> + printf ( " ns32k - encore - mach \ n " ) ; exit ( 0 ) ; <nl> + # else <nl> + printf ( " ns32k - encore - bsd \ n " ) ; exit ( 0 ) ; <nl> + # endif <nl> + # endif <nl> + # endif <nl> + <nl> + # if defined ( __386BSD__ ) <nl> + printf ( " i386 - pc - bsd \ n " ) ; exit ( 0 ) ; <nl> + # endif <nl> + <nl> + # if defined ( sequent ) <nl> + # if defined ( i386 ) <nl> + printf ( " i386 - sequent - dynix \ n " ) ; exit ( 0 ) ; <nl> + # endif <nl> + # if defined ( ns32000 ) <nl> + printf ( " ns32k - sequent - dynix \ n " ) ; exit ( 0 ) ; <nl> + # endif <nl> + # endif <nl> + <nl> + # if defined ( _SEQUENT_ ) <nl> + struct utsname un ; <nl> + <nl> + uname ( & un ) ; <nl> + if ( strncmp ( un . version , " V2 " , 2 ) = = 0 ) { <nl> + printf ( " i386 - sequent - ptx2 \ n " ) ; exit ( 0 ) ; <nl> + } <nl> + if ( strncmp ( un . version , " V1 " , 2 ) = = 0 ) { / * XXX is V1 correct ? * / <nl> + printf ( " i386 - sequent - ptx1 \ n " ) ; exit ( 0 ) ; <nl> + } <nl> + printf ( " i386 - sequent - ptx \ n " ) ; exit ( 0 ) ; <nl> + # endif <nl> + <nl> + # if defined ( vax ) <nl> + # if ! defined ( ultrix ) <nl> + # include < sys / param . h > <nl> + # if defined ( BSD ) <nl> + # if BSD = = 43 <nl> + printf ( " vax - dec - bsd4 . 3 \ n " ) ; exit ( 0 ) ; <nl> + # else <nl> + # if BSD = = 199006 <nl> + printf ( " vax - dec - bsd4 . 3reno \ n " ) ; exit ( 0 ) ; <nl> + # else <nl> + printf ( " vax - dec - bsd \ n " ) ; exit ( 0 ) ; <nl> + # endif <nl> + # endif <nl> + # else <nl> + printf ( " vax - dec - bsd \ n " ) ; exit ( 0 ) ; <nl> + # endif <nl> + # else <nl> + # if defined ( _SIZE_T_ ) | | defined ( SIGLOST ) <nl> + struct utsname un ; <nl> + uname ( & un ) ; <nl> + printf ( " vax - dec - ultrix % s \ n " , un . release ) ; exit ( 0 ) ; <nl> + # else <nl> + printf ( " vax - dec - ultrix \ n " ) ; exit ( 0 ) ; <nl> + # endif <nl> + # endif <nl> + # endif <nl> + # if defined ( ultrix ) | | defined ( _ultrix ) | | defined ( __ultrix ) | | defined ( __ultrix__ ) <nl> + # if defined ( mips ) | | defined ( __mips ) | | defined ( __mips__ ) | | defined ( MIPS ) | | defined ( __MIPS__ ) <nl> + # if defined ( _SIZE_T_ ) | | defined ( SIGLOST ) <nl> + struct utsname * un ; <nl> + uname ( & un ) ; <nl> + printf ( " mips - dec - ultrix % s \ n " , un . release ) ; exit ( 0 ) ; <nl> + # else <nl> + printf ( " mips - dec - ultrix \ n " ) ; exit ( 0 ) ; <nl> + # endif <nl> + # endif <nl> + # endif <nl> + <nl> + # if defined ( alliant ) & & defined ( i860 ) <nl> + printf ( " i860 - alliant - bsd \ n " ) ; exit ( 0 ) ; <nl> + # endif <nl> + <nl> + exit ( 1 ) ; <nl> + } <nl> + EOF <nl> + <nl> + $ CC_FOR_BUILD - o " $ dummy " " $ dummy . c " 2 > / dev / null & & SYSTEM_NAME = ` $ dummy ` & & <nl> + { echo " $ SYSTEM_NAME " ; exit ; } <nl> + <nl> + # Apollos put the system type in the environment . <nl> + test - d / usr / apollo & & { echo " $ ISP - apollo - $ SYSTYPE " ; exit ; } <nl> + <nl> + echo " $ 0 : unable to guess system type " > & 2 <nl> + <nl> + case " $ UNAME_MACHINE : $ UNAME_SYSTEM " in <nl> + mips : Linux | mips64 : Linux ) <nl> + # If we got here on MIPS GNU / Linux , output extra information . <nl> + cat > & 2 < < EOF <nl> + <nl> + NOTE : MIPS GNU / Linux systems require a C compiler to fully recognize <nl> + the system type . Please install a C compiler and try again . <nl> + EOF <nl> + ; ; <nl> esac <nl> <nl> cat > & 2 < < EOF <nl> - $ 0 : unable to guess system type <nl> <nl> - This script , last modified $ timestamp , has failed to recognize <nl> - the operating system you are using . It is advised that you <nl> - download the most up to date version of the config scripts from <nl> + This script ( version $ timestamp ) , has failed to recognize the <nl> + operating system you are using . If your script is old , overwrite * all * <nl> + copies of config . guess and config . sub with the latest versions from : <nl> <nl> - http : / / git . savannah . gnu . org / gitweb / ? p = config . git ; a = blob_plain ; f = config . guess ; hb = HEAD <nl> + https : / / git . savannah . gnu . org / gitweb / ? p = config . git ; a = blob_plain ; f = config . guess <nl> and <nl> - http : / / git . savannah . gnu . org / gitweb / ? p = config . git ; a = blob_plain ; f = config . sub ; hb = HEAD <nl> + https : / / git . savannah . gnu . org / gitweb / ? p = config . git ; a = blob_plain ; f = config . sub <nl> <nl> - If the version you run ( $ 0 ) is already up to date , please <nl> - send the following data and any information you think might be <nl> - pertinent to < config - patches @ gnu . org > in order to provide the needed <nl> - information to handle your system . <nl> + If $ 0 has already been updated , send the following data and any <nl> + information you think might be pertinent to config - patches @ gnu . org to <nl> + provide the necessary information to handle your system . <nl> <nl> config . guess timestamp = $ timestamp <nl> <nl> hostinfo = ` ( hostinfo ) 2 > / dev / null ` <nl> / usr / bin / oslevel = ` ( / usr / bin / oslevel ) 2 > / dev / null ` <nl> / usr / convex / getsysinfo = ` ( / usr / convex / getsysinfo ) 2 > / dev / null ` <nl> <nl> - UNAME_MACHINE = $ { UNAME_MACHINE } <nl> - UNAME_RELEASE = $ { UNAME_RELEASE } <nl> - UNAME_SYSTEM = $ { UNAME_SYSTEM } <nl> - UNAME_VERSION = $ { UNAME_VERSION } <nl> + UNAME_MACHINE = " $ UNAME_MACHINE " <nl> + UNAME_RELEASE = " $ UNAME_RELEASE " <nl> + UNAME_SYSTEM = " $ UNAME_SYSTEM " <nl> + UNAME_VERSION = " $ UNAME_VERSION " <nl> EOF <nl> <nl> exit 1 <nl> <nl> # Local variables : <nl> - # eval : ( add - hook ' write - file - hooks ' time - stamp ) <nl> + # eval : ( add - hook ' before - save - hook ' time - stamp ) <nl> # time - stamp - start : " timestamp = ' " <nl> # time - stamp - format : " % : y - % 02m - % 02d " <nl> # time - stamp - end : " ' " <nl> mmm a / build - aux / config . sub <nl> ppp b / build - aux / config . sub <nl> <nl> # ! / bin / sh <nl> # Configuration validation subroutine script . <nl> - # Copyright 1992 - 2014 Free Software Foundation , Inc . <nl> + # Copyright 1992 - 2019 Free Software Foundation , Inc . <nl> <nl> - timestamp = ' 2014 - 12 - 03 ' <nl> + timestamp = ' 2019 - 06 - 30 ' <nl> <nl> # This file is free software ; you can redistribute it and / or modify it <nl> # under the terms of the GNU General Public License as published by <nl> timestamp = ' 2014 - 12 - 03 ' <nl> # General Public License for more details . <nl> # <nl> # You should have received a copy of the GNU General Public License <nl> - # along with this program ; if not , see < http : / / www . gnu . org / licenses / > . <nl> + # along with this program ; if not , see < https : / / www . gnu . org / licenses / > . <nl> # <nl> # As a special exception to the GNU General Public License , if you <nl> # distribute this file as part of a program that contains a <nl> timestamp = ' 2014 - 12 - 03 ' <nl> # Otherwise , we print the canonical config type on stdout and succeed . <nl> <nl> # You can get the latest version of this script from : <nl> - # http : / / git . savannah . gnu . org / gitweb / ? p = config . git ; a = blob_plain ; f = config . sub ; hb = HEAD <nl> + # https : / / git . savannah . gnu . org / gitweb / ? p = config . git ; a = blob_plain ; f = config . sub <nl> <nl> # This file is supposed to be the same for all GNU packages <nl> # and recognize all the CPU types , system types and aliases <nl> timestamp = ' 2014 - 12 - 03 ' <nl> me = ` echo " $ 0 " | sed - e ' s , . * / , , ' ` <nl> <nl> usage = " \ <nl> - Usage : $ 0 [ OPTION ] CPU - MFR - OPSYS <nl> - $ 0 [ OPTION ] ALIAS <nl> + Usage : $ 0 [ OPTION ] CPU - MFR - OPSYS or ALIAS <nl> <nl> Canonicalize a configuration name . <nl> <nl> - Operation modes : <nl> + Options : <nl> - h , - - help print this help , then exit <nl> - t , - - time - stamp print date of last modification , then exit <nl> - v , - - version print version number , then exit <nl> Report bugs and patches to < config - patches @ gnu . org > . " <nl> version = " \ <nl> GNU config . sub ( $ timestamp ) <nl> <nl> - Copyright 1992 - 2014 Free Software Foundation , Inc . <nl> + Copyright 1992 - 2019 Free Software Foundation , Inc . <nl> <nl> This is free software ; see the source for copying conditions . There is NO <nl> warranty ; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . " <nl> while test $ # - gt 0 ; do <nl> - ) # Use stdin as input . <nl> break ; ; <nl> - * ) <nl> - echo " $ me : invalid option $ 1 $ help " <nl> + echo " $ me : invalid option $ 1 $ help " > & 2 <nl> exit 1 ; ; <nl> <nl> * local * ) <nl> # First pass through any local machine types . <nl> - echo $ 1 <nl> + echo " $ 1 " <nl> exit ; ; <nl> <nl> * ) <nl> case $ # in <nl> exit 1 ; ; <nl> esac <nl> <nl> - # Separate what the user gave into CPU - COMPANY and OS or KERNEL - OS ( if any ) . <nl> - # Here we must recognize all the valid KERNEL - OS combinations . <nl> - maybe_os = ` echo $ 1 | sed ' s / ^ \ ( . * \ ) - \ ( [ ^ - ] * - [ ^ - ] * \ ) $ / \ 2 / ' ` <nl> - case $ maybe_os in <nl> - nto - qnx * | linux - gnu * | linux - android * | linux - dietlibc | linux - newlib * | \ <nl> - linux - musl * | linux - uclibc * | uclinux - uclibc * | uclinux - gnu * | kfreebsd * - gnu * | \ <nl> - knetbsd * - gnu * | netbsd * - gnu * | \ <nl> - kopensolaris * - gnu * | \ <nl> - storm - chaos * | os2 - emx * | rtmk - nova * ) <nl> - os = - $ maybe_os <nl> - basic_machine = ` echo $ 1 | sed ' s / ^ \ ( . * \ ) - \ ( [ ^ - ] * - [ ^ - ] * \ ) $ / \ 1 / ' ` <nl> - ; ; <nl> - android - linux ) <nl> - os = - linux - android <nl> - basic_machine = ` echo $ 1 | sed ' s / ^ \ ( . * \ ) - \ ( [ ^ - ] * - [ ^ - ] * \ ) $ / \ 1 / ' ` - unknown <nl> - ; ; <nl> - * ) <nl> - basic_machine = ` echo $ 1 | sed ' s / - [ ^ - ] * $ / / ' ` <nl> - if [ $ basic_machine ! = $ 1 ] <nl> - then os = ` echo $ 1 | sed ' s / . * - / - / ' ` <nl> - else os = ; fi <nl> - ; ; <nl> - esac <nl> + # Split fields of configuration type <nl> + # shellcheck disable = SC2162 <nl> + IFS = " - " read field1 field2 field3 field4 < < EOF <nl> + $ 1 <nl> + EOF <nl> <nl> - # # # Let ' s recognize common machines as not being operating systems so <nl> - # # # that things like config . sub decstation - 3100 work . We also <nl> - # # # recognize some manufacturers as not being operating systems , so we <nl> - # # # can provide default operating systems below . <nl> - case $ os in <nl> - - sun * os * ) <nl> - # Prevent following clause from handling this invalid input . <nl> - ; ; <nl> - - dec * | - mips * | - sequent * | - encore * | - pc532 * | - sgi * | - sony * | \ <nl> - - att * | - 7300 * | - 3300 * | - delta * | - motorola * | - sun [ 234 ] * | \ <nl> - - unicom * | - ibm * | - next | - hp | - isi * | - apollo | - altos * | \ <nl> - - convergent * | - ncr * | - news | - 32 * | - 3600 * | - 3100 * | - hitachi * | \ <nl> - - c [ 123 ] * | - convex * | - sun | - crds | - omron * | - dg | - ultra | - tti * | \ <nl> - - harris | - dolphin | - highlevel | - gould | - cbm | - ns | - masscomp | \ <nl> - - apple | - axis | - knuth | - cray | - microblaze * ) <nl> - os = <nl> - basic_machine = $ 1 <nl> - ; ; <nl> - - bluegene * ) <nl> - os = - cnk <nl> - ; ; <nl> - - sim | - cisco | - oki | - wec | - winbond ) <nl> - os = <nl> - basic_machine = $ 1 <nl> - ; ; <nl> - - scout ) <nl> - ; ; <nl> - - wrs ) <nl> - os = - vxworks <nl> - basic_machine = $ 1 <nl> - ; ; <nl> - - chorusos * ) <nl> - os = - chorusos <nl> - basic_machine = $ 1 <nl> - ; ; <nl> - - chorusrdb ) <nl> - os = - chorusrdb <nl> - basic_machine = $ 1 <nl> - ; ; <nl> - - hiux * ) <nl> - os = - hiuxwe2 <nl> - ; ; <nl> - - sco6 ) <nl> - os = - sco5v6 <nl> - basic_machine = ` echo $ 1 | sed - e ' s / 86 - . * / 86 - pc / ' ` <nl> - ; ; <nl> - - sco5 ) <nl> - os = - sco3 . 2v5 <nl> - basic_machine = ` echo $ 1 | sed - e ' s / 86 - . * / 86 - pc / ' ` <nl> - ; ; <nl> - - sco4 ) <nl> - os = - sco3 . 2v4 <nl> - basic_machine = ` echo $ 1 | sed - e ' s / 86 - . * / 86 - pc / ' ` <nl> - ; ; <nl> - - sco3 . 2 . [ 4 - 9 ] * ) <nl> - os = ` echo $ os | sed - e ' s / sco3 . 2 . / sco3 . 2v / ' ` <nl> - basic_machine = ` echo $ 1 | sed - e ' s / 86 - . * / 86 - pc / ' ` <nl> - ; ; <nl> - - sco3 . 2v [ 4 - 9 ] * ) <nl> - # Don ' t forget version if it is 3 . 2v4 or newer . <nl> - basic_machine = ` echo $ 1 | sed - e ' s / 86 - . * / 86 - pc / ' ` <nl> - ; ; <nl> - - sco5v6 * ) <nl> - # Don ' t forget version if it is 3 . 2v4 or newer . <nl> - basic_machine = ` echo $ 1 | sed - e ' s / 86 - . * / 86 - pc / ' ` <nl> - ; ; <nl> - - sco * ) <nl> - os = - sco3 . 2v2 <nl> - basic_machine = ` echo $ 1 | sed - e ' s / 86 - . * / 86 - pc / ' ` <nl> - ; ; <nl> - - udk * ) <nl> - basic_machine = ` echo $ 1 | sed - e ' s / 86 - . * / 86 - pc / ' ` <nl> - ; ; <nl> - - isc ) <nl> - os = - isc2 . 2 <nl> - basic_machine = ` echo $ 1 | sed - e ' s / 86 - . * / 86 - pc / ' ` <nl> - ; ; <nl> - - clix * ) <nl> - basic_machine = clipper - intergraph <nl> - ; ; <nl> - - isc * ) <nl> - basic_machine = ` echo $ 1 | sed - e ' s / 86 - . * / 86 - pc / ' ` <nl> - ; ; <nl> - - lynx * 178 ) <nl> - os = - lynxos178 <nl> - ; ; <nl> - - lynx * 5 ) <nl> - os = - lynxos5 <nl> - ; ; <nl> - - lynx * ) <nl> - os = - lynxos <nl> + # Separate into logical components for further validation <nl> + case $ 1 in <nl> + * - * - * - * - * ) <nl> + echo Invalid configuration \ ` " $ 1 " \ ' : more than four components > & 2 <nl> + exit 1 <nl> ; ; <nl> - - ptx * ) <nl> - basic_machine = ` echo $ 1 | sed - e ' s / 86 - . * / 86 - sequent / ' ` <nl> + * - * - * - * ) <nl> + basic_machine = $ field1 - $ field2 <nl> + os = $ field3 - $ field4 <nl> ; ; <nl> - - windowsnt * ) <nl> - os = ` echo $ os | sed - e ' s / windowsnt / winnt / ' ` <nl> + * - * - * ) <nl> + # Ambiguous whether COMPANY is present , or skipped and KERNEL - OS is two <nl> + # parts <nl> + maybe_os = $ field2 - $ field3 <nl> + case $ maybe_os in <nl> + nto - qnx * | linux - gnu * | linux - android * | linux - dietlibc \ <nl> + | linux - newlib * | linux - musl * | linux - uclibc * | uclinux - uclibc * \ <nl> + | uclinux - gnu * | kfreebsd * - gnu * | knetbsd * - gnu * | netbsd * - gnu * \ <nl> + | netbsd * - eabi * | kopensolaris * - gnu * | cloudabi * - eabi * \ <nl> + | storm - chaos * | os2 - emx * | rtmk - nova * ) <nl> + basic_machine = $ field1 <nl> + os = $ maybe_os <nl> + ; ; <nl> + android - linux ) <nl> + basic_machine = $ field1 - unknown <nl> + os = linux - android <nl> + ; ; <nl> + * ) <nl> + basic_machine = $ field1 - $ field2 <nl> + os = $ field3 <nl> + ; ; <nl> + esac <nl> ; ; <nl> - - psos * ) <nl> - os = - psos <nl> + * - * ) <nl> + # A lone config we happen to match not fitting any pattern <nl> + case $ field1 - $ field2 in <nl> + decstation - 3100 ) <nl> + basic_machine = mips - dec <nl> + os = <nl> + ; ; <nl> + * - * ) <nl> + # Second component is usually , but not always the OS <nl> + case $ field2 in <nl> + # Prevent following clause from handling this valid os <nl> + sun * os * ) <nl> + basic_machine = $ field1 <nl> + os = $ field2 <nl> + ; ; <nl> + # Manufacturers <nl> + dec * | mips * | sequent * | encore * | pc533 * | sgi * | sony * \ <nl> + | att * | 7300 * | 3300 * | delta * | motorola * | sun [ 234 ] * \ <nl> + | unicom * | ibm * | next | hp | isi * | apollo | altos * \ <nl> + | convergent * | ncr * | news | 32 * | 3600 * | 3100 * \ <nl> + | hitachi * | c [ 123 ] * | convex * | sun | crds | omron * | dg \ <nl> + | ultra | tti * | harris | dolphin | highlevel | gould \ <nl> + | cbm | ns | masscomp | apple | axis | knuth | cray \ <nl> + | microblaze * | sim | cisco \ <nl> + | oki | wec | wrs | winbond ) <nl> + basic_machine = $ field1 - $ field2 <nl> + os = <nl> + ; ; <nl> + * ) <nl> + basic_machine = $ field1 <nl> + os = $ field2 <nl> + ; ; <nl> + esac <nl> + ; ; <nl> + esac <nl> ; ; <nl> - - mint | - mint [ 0 - 9 ] * ) <nl> - basic_machine = m68k - atari <nl> - os = - mint <nl> + * ) <nl> + # Convert single - component short - hands not valid as part of <nl> + # multi - component configurations . <nl> + case $ field1 in <nl> + 386bsd ) <nl> + basic_machine = i386 - pc <nl> + os = bsd <nl> + ; ; <nl> + a29khif ) <nl> + basic_machine = a29k - amd <nl> + os = udi <nl> + ; ; <nl> + adobe68k ) <nl> + basic_machine = m68010 - adobe <nl> + os = scout <nl> + ; ; <nl> + alliant ) <nl> + basic_machine = fx80 - alliant <nl> + os = <nl> + ; ; <nl> + altos | altos3068 ) <nl> + basic_machine = m68k - altos <nl> + os = <nl> + ; ; <nl> + am29k ) <nl> + basic_machine = a29k - none <nl> + os = bsd <nl> + ; ; <nl> + amdahl ) <nl> + basic_machine = 580 - amdahl <nl> + os = sysv <nl> + ; ; <nl> + amiga ) <nl> + basic_machine = m68k - unknown <nl> + os = <nl> + ; ; <nl> + amigaos | amigados ) <nl> + basic_machine = m68k - unknown <nl> + os = amigaos <nl> + ; ; <nl> + amigaunix | amix ) <nl> + basic_machine = m68k - unknown <nl> + os = sysv4 <nl> + ; ; <nl> + apollo68 ) <nl> + basic_machine = m68k - apollo <nl> + os = sysv <nl> + ; ; <nl> + apollo68bsd ) <nl> + basic_machine = m68k - apollo <nl> + os = bsd <nl> + ; ; <nl> + aros ) <nl> + basic_machine = i386 - pc <nl> + os = aros <nl> + ; ; <nl> + aux ) <nl> + basic_machine = m68k - apple <nl> + os = aux <nl> + ; ; <nl> + balance ) <nl> + basic_machine = ns32k - sequent <nl> + os = dynix <nl> + ; ; <nl> + blackfin ) <nl> + basic_machine = bfin - unknown <nl> + os = linux <nl> + ; ; <nl> + cegcc ) <nl> + basic_machine = arm - unknown <nl> + os = cegcc <nl> + ; ; <nl> + convex - c1 ) <nl> + basic_machine = c1 - convex <nl> + os = bsd <nl> + ; ; <nl> + convex - c2 ) <nl> + basic_machine = c2 - convex <nl> + os = bsd <nl> + ; ; <nl> + convex - c32 ) <nl> + basic_machine = c32 - convex <nl> + os = bsd <nl> + ; ; <nl> + convex - c34 ) <nl> + basic_machine = c34 - convex <nl> + os = bsd <nl> + ; ; <nl> + convex - c38 ) <nl> + basic_machine = c38 - convex <nl> + os = bsd <nl> + ; ; <nl> + cray ) <nl> + basic_machine = j90 - cray <nl> + os = unicos <nl> + ; ; <nl> + crds | unos ) <nl> + basic_machine = m68k - crds <nl> + os = <nl> + ; ; <nl> + da30 ) <nl> + basic_machine = m68k - da30 <nl> + os = <nl> + ; ; <nl> + decstation | pmax | pmin | dec3100 | decstatn ) <nl> + basic_machine = mips - dec <nl> + os = <nl> + ; ; <nl> + delta88 ) <nl> + basic_machine = m88k - motorola <nl> + os = sysv3 <nl> + ; ; <nl> + dicos ) <nl> + basic_machine = i686 - pc <nl> + os = dicos <nl> + ; ; <nl> + djgpp ) <nl> + basic_machine = i586 - pc <nl> + os = msdosdjgpp <nl> + ; ; <nl> + ebmon29k ) <nl> + basic_machine = a29k - amd <nl> + os = ebmon <nl> + ; ; <nl> + es1800 | OSE68k | ose68k | ose | OSE ) <nl> + basic_machine = m68k - ericsson <nl> + os = ose <nl> + ; ; <nl> + gmicro ) <nl> + basic_machine = tron - gmicro <nl> + os = sysv <nl> + ; ; <nl> + go32 ) <nl> + basic_machine = i386 - pc <nl> + os = go32 <nl> + ; ; <nl> + h8300hms ) <nl> + basic_machine = h8300 - hitachi <nl> + os = hms <nl> + ; ; <nl> + h8300xray ) <nl> + basic_machine = h8300 - hitachi <nl> + os = xray <nl> + ; ; <nl> + h8500hms ) <nl> + basic_machine = h8500 - hitachi <nl> + os = hms <nl> + ; ; <nl> + harris ) <nl> + basic_machine = m88k - harris <nl> + os = sysv3 <nl> + ; ; <nl> + hp300 | hp300hpux ) <nl> + basic_machine = m68k - hp <nl> + os = hpux <nl> + ; ; <nl> + hp300bsd ) <nl> + basic_machine = m68k - hp <nl> + os = bsd <nl> + ; ; <nl> + hppaosf ) <nl> + basic_machine = hppa1 . 1 - hp <nl> + os = osf <nl> + ; ; <nl> + hppro ) <nl> + basic_machine = hppa1 . 1 - hp <nl> + os = proelf <nl> + ; ; <nl> + i386mach ) <nl> + basic_machine = i386 - mach <nl> + os = mach <nl> + ; ; <nl> + isi68 | isi ) <nl> + basic_machine = m68k - isi <nl> + os = sysv <nl> + ; ; <nl> + m68knommu ) <nl> + basic_machine = m68k - unknown <nl> + os = linux <nl> + ; ; <nl> + magnum | m3230 ) <nl> + basic_machine = mips - mips <nl> + os = sysv <nl> + ; ; <nl> + merlin ) <nl> + basic_machine = ns32k - utek <nl> + os = sysv <nl> + ; ; <nl> + mingw64 ) <nl> + basic_machine = x86_64 - pc <nl> + os = mingw64 <nl> + ; ; <nl> + mingw32 ) <nl> + basic_machine = i686 - pc <nl> + os = mingw32 <nl> + ; ; <nl> + mingw32ce ) <nl> + basic_machine = arm - unknown <nl> + os = mingw32ce <nl> + ; ; <nl> + monitor ) <nl> + basic_machine = m68k - rom68k <nl> + os = coff <nl> + ; ; <nl> + morphos ) <nl> + basic_machine = powerpc - unknown <nl> + os = morphos <nl> + ; ; <nl> + moxiebox ) <nl> + basic_machine = moxie - unknown <nl> + os = moxiebox <nl> + ; ; <nl> + msdos ) <nl> + basic_machine = i386 - pc <nl> + os = msdos <nl> + ; ; <nl> + msys ) <nl> + basic_machine = i686 - pc <nl> + os = msys <nl> + ; ; <nl> + mvs ) <nl> + basic_machine = i370 - ibm <nl> + os = mvs <nl> + ; ; <nl> + nacl ) <nl> + basic_machine = le32 - unknown <nl> + os = nacl <nl> + ; ; <nl> + ncr3000 ) <nl> + basic_machine = i486 - ncr <nl> + os = sysv4 <nl> + ; ; <nl> + netbsd386 ) <nl> + basic_machine = i386 - pc <nl> + os = netbsd <nl> + ; ; <nl> + netwinder ) <nl> + basic_machine = armv4l - rebel <nl> + os = linux <nl> + ; ; <nl> + news | news700 | news800 | news900 ) <nl> + basic_machine = m68k - sony <nl> + os = newsos <nl> + ; ; <nl> + news1000 ) <nl> + basic_machine = m68030 - sony <nl> + os = newsos <nl> + ; ; <nl> + necv70 ) <nl> + basic_machine = v70 - nec <nl> + os = sysv <nl> + ; ; <nl> + nh3000 ) <nl> + basic_machine = m68k - harris <nl> + os = cxux <nl> + ; ; <nl> + nh [ 45 ] 000 ) <nl> + basic_machine = m88k - harris <nl> + os = cxux <nl> + ; ; <nl> + nindy960 ) <nl> + basic_machine = i960 - intel <nl> + os = nindy <nl> + ; ; <nl> + mon960 ) <nl> + basic_machine = i960 - intel <nl> + os = mon960 <nl> + ; ; <nl> + nonstopux ) <nl> + basic_machine = mips - compaq <nl> + os = nonstopux <nl> + ; ; <nl> + os400 ) <nl> + basic_machine = powerpc - ibm <nl> + os = os400 <nl> + ; ; <nl> + OSE68000 | ose68000 ) <nl> + basic_machine = m68000 - ericsson <nl> + os = ose <nl> + ; ; <nl> + os68k ) <nl> + basic_machine = m68k - none <nl> + os = os68k <nl> + ; ; <nl> + paragon ) <nl> + basic_machine = i860 - intel <nl> + os = osf <nl> + ; ; <nl> + parisc ) <nl> + basic_machine = hppa - unknown <nl> + os = linux <nl> + ; ; <nl> + pw32 ) <nl> + basic_machine = i586 - unknown <nl> + os = pw32 <nl> + ; ; <nl> + rdos | rdos64 ) <nl> + basic_machine = x86_64 - pc <nl> + os = rdos <nl> + ; ; <nl> + rdos32 ) <nl> + basic_machine = i386 - pc <nl> + os = rdos <nl> + ; ; <nl> + rom68k ) <nl> + basic_machine = m68k - rom68k <nl> + os = coff <nl> + ; ; <nl> + sa29200 ) <nl> + basic_machine = a29k - amd <nl> + os = udi <nl> + ; ; <nl> + sei ) <nl> + basic_machine = mips - sei <nl> + os = seiux <nl> + ; ; <nl> + sequent ) <nl> + basic_machine = i386 - sequent <nl> + os = <nl> + ; ; <nl> + sps7 ) <nl> + basic_machine = m68k - bull <nl> + os = sysv2 <nl> + ; ; <nl> + st2000 ) <nl> + basic_machine = m68k - tandem <nl> + os = <nl> + ; ; <nl> + stratus ) <nl> + basic_machine = i860 - stratus <nl> + os = sysv4 <nl> + ; ; <nl> + sun2 ) <nl> + basic_machine = m68000 - sun <nl> + os = <nl> + ; ; <nl> + sun2os3 ) <nl> + basic_machine = m68000 - sun <nl> + os = sunos3 <nl> + ; ; <nl> + sun2os4 ) <nl> + basic_machine = m68000 - sun <nl> + os = sunos4 <nl> + ; ; <nl> + sun3 ) <nl> + basic_machine = m68k - sun <nl> + os = <nl> + ; ; <nl> + sun3os3 ) <nl> + basic_machine = m68k - sun <nl> + os = sunos3 <nl> + ; ; <nl> + sun3os4 ) <nl> + basic_machine = m68k - sun <nl> + os = sunos4 <nl> + ; ; <nl> + sun4 ) <nl> + basic_machine = sparc - sun <nl> + os = <nl> + ; ; <nl> + sun4os3 ) <nl> + basic_machine = sparc - sun <nl> + os = sunos3 <nl> + ; ; <nl> + sun4os4 ) <nl> + basic_machine = sparc - sun <nl> + os = sunos4 <nl> + ; ; <nl> + sun4sol2 ) <nl> + basic_machine = sparc - sun <nl> + os = solaris2 <nl> + ; ; <nl> + sun386 | sun386i | roadrunner ) <nl> + basic_machine = i386 - sun <nl> + os = <nl> + ; ; <nl> + sv1 ) <nl> + basic_machine = sv1 - cray <nl> + os = unicos <nl> + ; ; <nl> + symmetry ) <nl> + basic_machine = i386 - sequent <nl> + os = dynix <nl> + ; ; <nl> + t3e ) <nl> + basic_machine = alphaev5 - cray <nl> + os = unicos <nl> + ; ; <nl> + t90 ) <nl> + basic_machine = t90 - cray <nl> + os = unicos <nl> + ; ; <nl> + toad1 ) <nl> + basic_machine = pdp10 - xkl <nl> + os = tops20 <nl> + ; ; <nl> + tpf ) <nl> + basic_machine = s390x - ibm <nl> + os = tpf <nl> + ; ; <nl> + udi29k ) <nl> + basic_machine = a29k - amd <nl> + os = udi <nl> + ; ; <nl> + ultra3 ) <nl> + basic_machine = a29k - nyu <nl> + os = sym1 <nl> + ; ; <nl> + v810 | necv810 ) <nl> + basic_machine = v810 - nec <nl> + os = none <nl> + ; ; <nl> + vaxv ) <nl> + basic_machine = vax - dec <nl> + os = sysv <nl> + ; ; <nl> + vms ) <nl> + basic_machine = vax - dec <nl> + os = vms <nl> + ; ; <nl> + vsta ) <nl> + basic_machine = i386 - pc <nl> + os = vsta <nl> + ; ; <nl> + vxworks960 ) <nl> + basic_machine = i960 - wrs <nl> + os = vxworks <nl> + ; ; <nl> + vxworks68 ) <nl> + basic_machine = m68k - wrs <nl> + os = vxworks <nl> + ; ; <nl> + vxworks29k ) <nl> + basic_machine = a29k - wrs <nl> + os = vxworks <nl> + ; ; <nl> + xbox ) <nl> + basic_machine = i686 - pc <nl> + os = mingw32 <nl> + ; ; <nl> + ymp ) <nl> + basic_machine = ymp - cray <nl> + os = unicos <nl> + ; ; <nl> + * ) <nl> + basic_machine = $ 1 <nl> + os = <nl> + ; ; <nl> + esac <nl> ; ; <nl> esac <nl> <nl> - # Decode aliases for certain CPU - COMPANY combinations . <nl> + # Decode 1 - component or ad - hoc basic machines <nl> case $ basic_machine in <nl> - # Recognize the basic CPU types without company name . <nl> - # Some are omitted here because they have special meanings below . <nl> - 1750a | 580 \ <nl> - | a29k \ <nl> - | aarch64 | aarch64_be \ <nl> - | alpha | alphaev [ 4 - 8 ] | alphaev56 | alphaev6 [ 78 ] | alphapca5 [ 67 ] \ <nl> - | alpha64 | alpha64ev [ 4 - 8 ] | alpha64ev56 | alpha64ev6 [ 78 ] | alpha64pca5 [ 67 ] \ <nl> - | am33_2 . 0 \ <nl> - | arc | arceb \ <nl> - | arm | arm [ bl ] e | arme [ lb ] | armv [ 2 - 8 ] | armv [ 3 - 8 ] [ lb ] | armv7 [ arm ] \ <nl> - | avr | avr32 \ <nl> - | be32 | be64 \ <nl> - | bfin \ <nl> - | c4x | c8051 | clipper \ <nl> - | d10v | d30v | dlx | dsp16xx \ <nl> - | epiphany \ <nl> - | fido | fr30 | frv \ <nl> - | h8300 | h8500 | hppa | hppa1 . [ 01 ] | hppa2 . 0 | hppa2 . 0 [ nw ] | hppa64 \ <nl> - | hexagon \ <nl> - | i370 | i860 | i960 | ia64 \ <nl> - | ip2k | iq2000 \ <nl> - | k1om \ <nl> - | le32 | le64 \ <nl> - | lm32 \ <nl> - | m32c | m32r | m32rle | m68000 | m68k | m88k \ <nl> - | maxq | mb | microblaze | microblazeel | mcore | mep | metag \ <nl> - | mips | mipsbe | mipseb | mipsel | mipsle \ <nl> - | mips16 \ <nl> - | mips64 | mips64el \ <nl> - | mips64octeon | mips64octeonel \ <nl> - | mips64orion | mips64orionel \ <nl> - | mips64r5900 | mips64r5900el \ <nl> - | mips64vr | mips64vrel \ <nl> - | mips64vr4100 | mips64vr4100el \ <nl> - | mips64vr4300 | mips64vr4300el \ <nl> - | mips64vr5000 | mips64vr5000el \ <nl> - | mips64vr5900 | mips64vr5900el \ <nl> - | mipsisa32 | mipsisa32el \ <nl> - | mipsisa32r2 | mipsisa32r2el \ <nl> - | mipsisa32r6 | mipsisa32r6el \ <nl> - | mipsisa64 | mipsisa64el \ <nl> - | mipsisa64r2 | mipsisa64r2el \ <nl> - | mipsisa64r6 | mipsisa64r6el \ <nl> - | mipsisa64sb1 | mipsisa64sb1el \ <nl> - | mipsisa64sr71k | mipsisa64sr71kel \ <nl> - | mipsr5900 | mipsr5900el \ <nl> - | mipstx39 | mipstx39el \ <nl> - | mn10200 | mn10300 \ <nl> - | moxie \ <nl> - | mt \ <nl> - | msp430 \ <nl> - | nds32 | nds32le | nds32be \ <nl> - | nios | nios2 | nios2eb | nios2el \ <nl> - | ns16k | ns32k \ <nl> - | open8 | or1k | or1knd | or32 \ <nl> - | pdp10 | pdp11 | pj | pjl \ <nl> - | powerpc | powerpc64 | powerpc64le | powerpcle \ <nl> - | pyramid \ <nl> - | riscv32 | riscv64 \ <nl> - | rl78 | rx \ <nl> - | score \ <nl> - | sh | sh [ 1234 ] | sh [ 24 ] a | sh [ 24 ] aeb | sh [ 23 ] e | sh [ 34 ] eb | sheb | shbe | shle | sh [ 1234 ] le | sh3ele \ <nl> - | sh64 | sh64le \ <nl> - | sparc | sparc64 | sparc64b | sparc64v | sparc86x | sparclet | sparclite \ <nl> - | sparcv8 | sparcv9 | sparcv9b | sparcv9v \ <nl> - | spu \ <nl> - | tahoe | tic4x | tic54x | tic55x | tic6x | tic80 | tron \ <nl> - | ubicom32 \ <nl> - | v850 | v850e | v850e1 | v850e2 | v850es | v850e2v3 \ <nl> - | visium \ <nl> - | we32k \ <nl> - | x86 | xc16x | xstormy16 | xtensa \ <nl> - | z8k | z80 ) <nl> - basic_machine = $ basic_machine - unknown <nl> - ; ; <nl> - c54x ) <nl> - basic_machine = tic54x - unknown <nl> - ; ; <nl> - c55x ) <nl> - basic_machine = tic55x - unknown <nl> - ; ; <nl> - c6x ) <nl> - basic_machine = tic6x - unknown <nl> - ; ; <nl> - leon | leon [ 3 - 9 ] ) <nl> - basic_machine = sparc - $ basic_machine <nl> - ; ; <nl> - m6811 | m68hc11 | m6812 | m68hc12 | m68hcs12x | nvptx | picochip ) <nl> - basic_machine = $ basic_machine - unknown <nl> - os = - none <nl> + # Here we handle the default manufacturer of certain CPU types . It is in <nl> + # some cases the only manufacturer , in others , it is the most popular . <nl> + w89k ) <nl> + cpu = hppa1 . 1 <nl> + vendor = winbond <nl> ; ; <nl> - m88110 | m680 [ 12346 ] 0 | m683 ? 2 | m68360 | m5200 | v70 | w65 | z8k ) <nl> + op50n ) <nl> + cpu = hppa1 . 1 <nl> + vendor = oki <nl> ; ; <nl> - ms1 ) <nl> - basic_machine = mt - unknown <nl> + op60c ) <nl> + cpu = hppa1 . 1 <nl> + vendor = oki <nl> ; ; <nl> - <nl> - strongarm | thumb | xscale ) <nl> - basic_machine = arm - unknown <nl> + ibm * ) <nl> + cpu = i370 <nl> + vendor = ibm <nl> ; ; <nl> - xgate ) <nl> - basic_machine = $ basic_machine - unknown <nl> - os = - none <nl> + orion105 ) <nl> + cpu = clipper <nl> + vendor = highlevel <nl> ; ; <nl> - xscaleeb ) <nl> - basic_machine = armeb - unknown <nl> + mac | mpw | mac - mpw ) <nl> + cpu = m68k <nl> + vendor = apple <nl> ; ; <nl> - <nl> - xscaleel ) <nl> - basic_machine = armel - unknown <nl> + pmac | pmac - mpw ) <nl> + cpu = powerpc <nl> + vendor = apple <nl> ; ; <nl> <nl> - # We use ` pc ' rather than ` unknown ' <nl> - # because ( 1 ) that ' s what they normally are , and <nl> - # ( 2 ) the word " unknown " tends to confuse beginning users . <nl> - i * 86 | x86_64 ) <nl> - basic_machine = $ basic_machine - pc <nl> - ; ; <nl> - # Object if more than one company name word . <nl> - * - * - * ) <nl> - echo Invalid configuration \ ` $ 1 \ ' : machine \ ` $ basic_machine \ ' not recognized 1 > & 2 <nl> - exit 1 <nl> - ; ; <nl> - # Recognize the basic CPU types with company name . <nl> - 580 - * \ <nl> - | a29k - * \ <nl> - | aarch64 - * | aarch64_be - * \ <nl> - | alpha - * | alphaev [ 4 - 8 ] - * | alphaev56 - * | alphaev6 [ 78 ] - * \ <nl> - | alpha64 - * | alpha64ev [ 4 - 8 ] - * | alpha64ev56 - * | alpha64ev6 [ 78 ] - * \ <nl> - | alphapca5 [ 67 ] - * | alpha64pca5 [ 67 ] - * | arc - * | arceb - * \ <nl> - | arm - * | armbe - * | armle - * | armeb - * | armv * - * \ <nl> - | avr - * | avr32 - * \ <nl> - | be32 - * | be64 - * \ <nl> - | bfin - * | bs2000 - * \ <nl> - | c [ 123 ] * | c30 - * | [ cjt ] 90 - * | c4x - * \ <nl> - | c8051 - * | clipper - * | craynv - * | cydra - * \ <nl> - | d10v - * | d30v - * | dlx - * \ <nl> - | elxsi - * \ <nl> - | f30 [ 01 ] - * | f700 - * | fido - * | fr30 - * | frv - * | fx80 - * \ <nl> - | h8300 - * | h8500 - * \ <nl> - | hppa - * | hppa1 . [ 01 ] - * | hppa2 . 0 - * | hppa2 . 0 [ nw ] - * | hppa64 - * \ <nl> - | hexagon - * \ <nl> - | i * 86 - * | i860 - * | i960 - * | ia64 - * \ <nl> - | ip2k - * | iq2000 - * \ <nl> - | k1om - * \ <nl> - | le32 - * | le64 - * \ <nl> - | lm32 - * \ <nl> - | m32c - * | m32r - * | m32rle - * \ <nl> - | m68000 - * | m680 [ 012346 ] 0 - * | m68360 - * | m683 ? 2 - * | m68k - * \ <nl> - | m88110 - * | m88k - * | maxq - * | mcore - * | metag - * \ <nl> - | microblaze - * | microblazeel - * \ <nl> - | mips - * | mipsbe - * | mipseb - * | mipsel - * | mipsle - * \ <nl> - | mips16 - * \ <nl> - | mips64 - * | mips64el - * \ <nl> - | mips64octeon - * | mips64octeonel - * \ <nl> - | mips64orion - * | mips64orionel - * \ <nl> - | mips64r5900 - * | mips64r5900el - * \ <nl> - | mips64vr - * | mips64vrel - * \ <nl> - | mips64vr4100 - * | mips64vr4100el - * \ <nl> - | mips64vr4300 - * | mips64vr4300el - * \ <nl> - | mips64vr5000 - * | mips64vr5000el - * \ <nl> - | mips64vr5900 - * | mips64vr5900el - * \ <nl> - | mipsisa32 - * | mipsisa32el - * \ <nl> - | mipsisa32r2 - * | mipsisa32r2el - * \ <nl> - | mipsisa32r6 - * | mipsisa32r6el - * \ <nl> - | mipsisa64 - * | mipsisa64el - * \ <nl> - | mipsisa64r2 - * | mipsisa64r2el - * \ <nl> - | mipsisa64r6 - * | mipsisa64r6el - * \ <nl> - | mipsisa64sb1 - * | mipsisa64sb1el - * \ <nl> - | mipsisa64sr71k - * | mipsisa64sr71kel - * \ <nl> - | mipsr5900 - * | mipsr5900el - * \ <nl> - | mipstx39 - * | mipstx39el - * \ <nl> - | mmix - * \ <nl> - | mt - * \ <nl> - | msp430 - * \ <nl> - | nds32 - * | nds32le - * | nds32be - * \ <nl> - | nios - * | nios2 - * | nios2eb - * | nios2el - * \ <nl> - | none - * | np1 - * | ns16k - * | ns32k - * \ <nl> - | open8 - * \ <nl> - | or1k * - * \ <nl> - | orion - * \ <nl> - | pdp10 - * | pdp11 - * | pj - * | pjl - * | pn - * | power - * \ <nl> - | powerpc - * | powerpc64 - * | powerpc64le - * | powerpcle - * \ <nl> - | pyramid - * \ <nl> - | rl78 - * | romp - * | rs6000 - * | rx - * \ <nl> - | sh - * | sh [ 1234 ] - * | sh [ 24 ] a - * | sh [ 24 ] aeb - * | sh [ 23 ] e - * | sh [ 34 ] eb - * | sheb - * | shbe - * \ <nl> - | shle - * | sh [ 1234 ] le - * | sh3ele - * | sh64 - * | sh64le - * \ <nl> - | sparc - * | sparc64 - * | sparc64b - * | sparc64v - * | sparc86x - * | sparclet - * \ <nl> - | sparclite - * \ <nl> - | sparcv8 - * | sparcv9 - * | sparcv9b - * | sparcv9v - * | sv1 - * | sx ? - * \ <nl> - | tahoe - * \ <nl> - | tic30 - * | tic4x - * | tic54x - * | tic55x - * | tic6x - * | tic80 - * \ <nl> - | tile * - * \ <nl> - | tron - * \ <nl> - | ubicom32 - * \ <nl> - | v850 - * | v850e - * | v850e1 - * | v850es - * | v850e2 - * | v850e2v3 - * \ <nl> - | vax - * \ <nl> - | visium - * \ <nl> - | we32k - * \ <nl> - | x86 - * | x86_64 - * | xc16x - * | xps100 - * \ <nl> - | xstormy16 - * | xtensa * - * \ <nl> - | ymp - * \ <nl> - | z8k - * | z80 - * ) <nl> - ; ; <nl> - # Recognize the basic CPU types without company name , with glob match . <nl> - xtensa * ) <nl> - basic_machine = $ basic_machine - unknown <nl> - ; ; <nl> # Recognize the various machine names and aliases which stand <nl> # for a CPU type and a company and sometimes even an OS . <nl> - 386bsd ) <nl> - basic_machine = i386 - unknown <nl> - os = - bsd <nl> - ; ; <nl> 3b1 | 7300 | 7300 - att | att - 7300 | pc7300 | safari | unixpc ) <nl> - basic_machine = m68000 - att <nl> + cpu = m68000 <nl> + vendor = att <nl> ; ; <nl> 3b * ) <nl> - basic_machine = we32k - att <nl> - ; ; <nl> - a29khif ) <nl> - basic_machine = a29k - amd <nl> - os = - udi <nl> - ; ; <nl> - abacus ) <nl> - basic_machine = abacus - unknown <nl> - ; ; <nl> - adobe68k ) <nl> - basic_machine = m68010 - adobe <nl> - os = - scout <nl> - ; ; <nl> - alliant | fx80 ) <nl> - basic_machine = fx80 - alliant <nl> - ; ; <nl> - altos | altos3068 ) <nl> - basic_machine = m68k - altos <nl> - ; ; <nl> - am29k ) <nl> - basic_machine = a29k - none <nl> - os = - bsd <nl> - ; ; <nl> - amd64 ) <nl> - basic_machine = x86_64 - pc <nl> - ; ; <nl> - amd64 - * ) <nl> - basic_machine = x86_64 - ` echo $ basic_machine | sed ' s / ^ [ ^ - ] * - / / ' ` <nl> - ; ; <nl> - amdahl ) <nl> - basic_machine = 580 - amdahl <nl> - os = - sysv <nl> - ; ; <nl> - amiga | amiga - * ) <nl> - basic_machine = m68k - unknown <nl> - ; ; <nl> - amigaos | amigados ) <nl> - basic_machine = m68k - unknown <nl> - os = - amigaos <nl> - ; ; <nl> - amigaunix | amix ) <nl> - basic_machine = m68k - unknown <nl> - os = - sysv4 <nl> - ; ; <nl> - apollo68 ) <nl> - basic_machine = m68k - apollo <nl> - os = - sysv <nl> - ; ; <nl> - apollo68bsd ) <nl> - basic_machine = m68k - apollo <nl> - os = - bsd <nl> - ; ; <nl> - aros ) <nl> - basic_machine = i386 - pc <nl> - os = - aros <nl> - ; ; <nl> - aux ) <nl> - basic_machine = m68k - apple <nl> - os = - aux <nl> - ; ; <nl> - balance ) <nl> - basic_machine = ns32k - sequent <nl> - os = - dynix <nl> - ; ; <nl> - blackfin ) <nl> - basic_machine = bfin - unknown <nl> - os = - linux <nl> - ; ; <nl> - blackfin - * ) <nl> - basic_machine = bfin - ` echo $ basic_machine | sed ' s / ^ [ ^ - ] * - / / ' ` <nl> - os = - linux <nl> + cpu = we32k <nl> + vendor = att <nl> ; ; <nl> bluegene * ) <nl> - basic_machine = powerpc - ibm <nl> - os = - cnk <nl> - ; ; <nl> - c54x - * ) <nl> - basic_machine = tic54x - ` echo $ basic_machine | sed ' s / ^ [ ^ - ] * - / / ' ` <nl> - ; ; <nl> - c55x - * ) <nl> - basic_machine = tic55x - ` echo $ basic_machine | sed ' s / ^ [ ^ - ] * - / / ' ` <nl> - ; ; <nl> - c6x - * ) <nl> - basic_machine = tic6x - ` echo $ basic_machine | sed ' s / ^ [ ^ - ] * - / / ' ` <nl> - ; ; <nl> - c90 ) <nl> - basic_machine = c90 - cray <nl> - os = - unicos <nl> - ; ; <nl> - cegcc ) <nl> - basic_machine = arm - unknown <nl> - os = - cegcc <nl> - ; ; <nl> - convex - c1 ) <nl> - basic_machine = c1 - convex <nl> - os = - bsd <nl> - ; ; <nl> - convex - c2 ) <nl> - basic_machine = c2 - convex <nl> - os = - bsd <nl> - ; ; <nl> - convex - c32 ) <nl> - basic_machine = c32 - convex <nl> - os = - bsd <nl> - ; ; <nl> - convex - c34 ) <nl> - basic_machine = c34 - convex <nl> - os = - bsd <nl> - ; ; <nl> - convex - c38 ) <nl> - basic_machine = c38 - convex <nl> - os = - bsd <nl> - ; ; <nl> - cray | j90 ) <nl> - basic_machine = j90 - cray <nl> - os = - unicos <nl> - ; ; <nl> - craynv ) <nl> - basic_machine = craynv - cray <nl> - os = - unicosmp <nl> - ; ; <nl> - cr16 | cr16 - * ) <nl> - basic_machine = cr16 - unknown <nl> - os = - elf <nl> - ; ; <nl> - crds | unos ) <nl> - basic_machine = m68k - crds <nl> - ; ; <nl> - crisv32 | crisv32 - * | etraxfs * ) <nl> - basic_machine = crisv32 - axis <nl> - ; ; <nl> - cris | cris - * | etrax * ) <nl> - basic_machine = cris - axis <nl> - ; ; <nl> - crx ) <nl> - basic_machine = crx - unknown <nl> - os = - elf <nl> - ; ; <nl> - da30 | da30 - * ) <nl> - basic_machine = m68k - da30 <nl> - ; ; <nl> - decstation | decstation - 3100 | pmax | pmax - * | pmin | dec3100 | decstatn ) <nl> - basic_machine = mips - dec <nl> + cpu = powerpc <nl> + vendor = ibm <nl> + os = cnk <nl> ; ; <nl> decsystem10 * | dec10 * ) <nl> - basic_machine = pdp10 - dec <nl> - os = - tops10 <nl> + cpu = pdp10 <nl> + vendor = dec <nl> + os = tops10 <nl> ; ; <nl> decsystem20 * | dec20 * ) <nl> - basic_machine = pdp10 - dec <nl> - os = - tops20 <nl> + cpu = pdp10 <nl> + vendor = dec <nl> + os = tops20 <nl> ; ; <nl> delta | 3300 | motorola - 3300 | motorola - delta \ <nl> | 3300 - motorola | delta - motorola ) <nl> - basic_machine = m68k - motorola <nl> - ; ; <nl> - delta88 ) <nl> - basic_machine = m88k - motorola <nl> - os = - sysv3 <nl> - ; ; <nl> - dicos ) <nl> - basic_machine = i686 - pc <nl> - os = - dicos <nl> + cpu = m68k <nl> + vendor = motorola <nl> ; ; <nl> - djgpp ) <nl> - basic_machine = i586 - pc <nl> - os = - msdosdjgpp <nl> - ; ; <nl> - dpx20 | dpx20 - * ) <nl> - basic_machine = rs6000 - bull <nl> - os = - bosx <nl> - ; ; <nl> - dpx2 * | dpx2 * - bull ) <nl> - basic_machine = m68k - bull <nl> - os = - sysv3 <nl> - ; ; <nl> - ebmon29k ) <nl> - basic_machine = a29k - amd <nl> - os = - ebmon <nl> - ; ; <nl> - elxsi ) <nl> - basic_machine = elxsi - elxsi <nl> - os = - bsd <nl> + dpx2 * ) <nl> + cpu = m68k <nl> + vendor = bull <nl> + os = sysv3 <nl> ; ; <nl> encore | umax | mmax ) <nl> - basic_machine = ns32k - encore <nl> + cpu = ns32k <nl> + vendor = encore <nl> ; ; <nl> - es1800 | OSE68k | ose68k | ose | OSE ) <nl> - basic_machine = m68k - ericsson <nl> - os = - ose <nl> + elxsi ) <nl> + cpu = elxsi <nl> + vendor = elxsi <nl> + os = $ { os : - bsd } <nl> ; ; <nl> fx2800 ) <nl> - basic_machine = i860 - alliant <nl> + cpu = i860 <nl> + vendor = alliant <nl> ; ; <nl> genix ) <nl> - basic_machine = ns32k - ns <nl> - ; ; <nl> - gmicro ) <nl> - basic_machine = tron - gmicro <nl> - os = - sysv <nl> - ; ; <nl> - go32 ) <nl> - basic_machine = i386 - pc <nl> - os = - go32 <nl> + cpu = ns32k <nl> + vendor = ns <nl> ; ; <nl> h3050r * | hiux * ) <nl> - basic_machine = hppa1 . 1 - hitachi <nl> - os = - hiuxwe2 <nl> - ; ; <nl> - h8300hms ) <nl> - basic_machine = h8300 - hitachi <nl> - os = - hms <nl> - ; ; <nl> - h8300xray ) <nl> - basic_machine = h8300 - hitachi <nl> - os = - xray <nl> - ; ; <nl> - h8500hms ) <nl> - basic_machine = h8500 - hitachi <nl> - os = - hms <nl> - ; ; <nl> - harris ) <nl> - basic_machine = m88k - harris <nl> - os = - sysv3 <nl> - ; ; <nl> - hp300 - * ) <nl> - basic_machine = m68k - hp <nl> - ; ; <nl> - hp300bsd ) <nl> - basic_machine = m68k - hp <nl> - os = - bsd <nl> - ; ; <nl> - hp300hpux ) <nl> - basic_machine = m68k - hp <nl> - os = - hpux <nl> + cpu = hppa1 . 1 <nl> + vendor = hitachi <nl> + os = hiuxwe2 <nl> ; ; <nl> hp3k9 [ 0 - 9 ] [ 0 - 9 ] | hp9 [ 0 - 9 ] [ 0 - 9 ] ) <nl> - basic_machine = hppa1 . 0 - hp <nl> + cpu = hppa1 . 0 <nl> + vendor = hp <nl> ; ; <nl> hp9k2 [ 0 - 9 ] [ 0 - 9 ] | hp9k31 [ 0 - 9 ] ) <nl> - basic_machine = m68000 - hp <nl> + cpu = m68000 <nl> + vendor = hp <nl> ; ; <nl> hp9k3 [ 2 - 9 ] [ 0 - 9 ] ) <nl> - basic_machine = m68k - hp <nl> + cpu = m68k <nl> + vendor = hp <nl> ; ; <nl> hp9k6 [ 0 - 9 ] [ 0 - 9 ] | hp6 [ 0 - 9 ] [ 0 - 9 ] ) <nl> - basic_machine = hppa1 . 0 - hp <nl> + cpu = hppa1 . 0 <nl> + vendor = hp <nl> ; ; <nl> hp9k7 [ 0 - 79 ] [ 0 - 9 ] | hp7 [ 0 - 79 ] [ 0 - 9 ] ) <nl> - basic_machine = hppa1 . 1 - hp <nl> + cpu = hppa1 . 1 <nl> + vendor = hp <nl> ; ; <nl> hp9k78 [ 0 - 9 ] | hp78 [ 0 - 9 ] ) <nl> # FIXME : really hppa2 . 0 - hp <nl> - basic_machine = hppa1 . 1 - hp <nl> + cpu = hppa1 . 1 <nl> + vendor = hp <nl> ; ; <nl> hp9k8 [ 67 ] 1 | hp8 [ 67 ] 1 | hp9k80 [ 24 ] | hp80 [ 24 ] | hp9k8 [ 78 ] 9 | hp8 [ 78 ] 9 | hp9k893 | hp893 ) <nl> # FIXME : really hppa2 . 0 - hp <nl> - basic_machine = hppa1 . 1 - hp <nl> + cpu = hppa1 . 1 <nl> + vendor = hp <nl> ; ; <nl> hp9k8 [ 0 - 9 ] [ 13679 ] | hp8 [ 0 - 9 ] [ 13679 ] ) <nl> - basic_machine = hppa1 . 1 - hp <nl> + cpu = hppa1 . 1 <nl> + vendor = hp <nl> ; ; <nl> hp9k8 [ 0 - 9 ] [ 0 - 9 ] | hp8 [ 0 - 9 ] [ 0 - 9 ] ) <nl> - basic_machine = hppa1 . 0 - hp <nl> - ; ; <nl> - hppa - next ) <nl> - os = - nextstep3 <nl> - ; ; <nl> - hppaosf ) <nl> - basic_machine = hppa1 . 1 - hp <nl> - os = - osf <nl> - ; ; <nl> - hppro ) <nl> - basic_machine = hppa1 . 1 - hp <nl> - os = - proelf <nl> - ; ; <nl> - i370 - ibm * | ibm * ) <nl> - basic_machine = i370 - ibm <nl> + cpu = hppa1 . 0 <nl> + vendor = hp <nl> ; ; <nl> i * 86v32 ) <nl> - basic_machine = ` echo $ 1 | sed - e ' s / 86 . * / 86 - pc / ' ` <nl> - os = - sysv32 <nl> + cpu = ` echo " $ 1 " | sed - e ' s / 86 . * / 86 / ' ` <nl> + vendor = pc <nl> + os = sysv32 <nl> ; ; <nl> i * 86v4 * ) <nl> - basic_machine = ` echo $ 1 | sed - e ' s / 86 . * / 86 - pc / ' ` <nl> - os = - sysv4 <nl> + cpu = ` echo " $ 1 " | sed - e ' s / 86 . * / 86 / ' ` <nl> + vendor = pc <nl> + os = sysv4 <nl> ; ; <nl> i * 86v ) <nl> - basic_machine = ` echo $ 1 | sed - e ' s / 86 . * / 86 - pc / ' ` <nl> - os = - sysv <nl> + cpu = ` echo " $ 1 " | sed - e ' s / 86 . * / 86 / ' ` <nl> + vendor = pc <nl> + os = sysv <nl> ; ; <nl> i * 86sol2 ) <nl> - basic_machine = ` echo $ 1 | sed - e ' s / 86 . * / 86 - pc / ' ` <nl> - os = - solaris2 <nl> - ; ; <nl> - i386mach ) <nl> - basic_machine = i386 - mach <nl> - os = - mach <nl> + cpu = ` echo " $ 1 " | sed - e ' s / 86 . * / 86 / ' ` <nl> + vendor = pc <nl> + os = solaris2 <nl> ; ; <nl> - i386 - vsta | vsta ) <nl> - basic_machine = i386 - unknown <nl> - os = - vsta <nl> + j90 | j90 - cray ) <nl> + cpu = j90 <nl> + vendor = cray <nl> + os = $ { os : - unicos } <nl> ; ; <nl> iris | iris4d ) <nl> - basic_machine = mips - sgi <nl> + cpu = mips <nl> + vendor = sgi <nl> case $ os in <nl> - - irix * ) <nl> + irix * ) <nl> ; ; <nl> * ) <nl> - os = - irix4 <nl> + os = irix4 <nl> ; ; <nl> esac <nl> ; ; <nl> - isi68 | isi ) <nl> - basic_machine = m68k - isi <nl> - os = - sysv <nl> - ; ; <nl> - leon - * | leon [ 3 - 9 ] - * ) <nl> - basic_machine = sparc - ` echo $ basic_machine | sed ' s / - . * / / ' ` <nl> - ; ; <nl> - m68knommu ) <nl> - basic_machine = m68k - unknown <nl> - os = - linux <nl> - ; ; <nl> - m68knommu - * ) <nl> - basic_machine = m68k - ` echo $ basic_machine | sed ' s / ^ [ ^ - ] * - / / ' ` <nl> - os = - linux <nl> - ; ; <nl> - m88k - omron * ) <nl> - basic_machine = m88k - omron <nl> - ; ; <nl> - magnum | m3230 ) <nl> - basic_machine = mips - mips <nl> - os = - sysv <nl> - ; ; <nl> - merlin ) <nl> - basic_machine = ns32k - utek <nl> - os = - sysv <nl> - ; ; <nl> - microblaze * ) <nl> - basic_machine = microblaze - xilinx <nl> - ; ; <nl> - mingw64 ) <nl> - basic_machine = x86_64 - pc <nl> - os = - mingw64 <nl> - ; ; <nl> - mingw32 ) <nl> - basic_machine = i686 - pc <nl> - os = - mingw32 <nl> - ; ; <nl> - mingw32ce ) <nl> - basic_machine = arm - unknown <nl> - os = - mingw32ce <nl> - ; ; <nl> miniframe ) <nl> - basic_machine = m68000 - convergent <nl> - ; ; <nl> - * mint | - mint [ 0 - 9 ] * | * MiNT | * MiNT [ 0 - 9 ] * ) <nl> - basic_machine = m68k - atari <nl> - os = - mint <nl> - ; ; <nl> - mips3 * - * ) <nl> - basic_machine = ` echo $ basic_machine | sed - e ' s / mips3 / mips64 / ' ` <nl> - ; ; <nl> - mips3 * ) <nl> - basic_machine = ` echo $ basic_machine | sed - e ' s / mips3 / mips64 / ' ` - unknown <nl> - ; ; <nl> - monitor ) <nl> - basic_machine = m68k - rom68k <nl> - os = - coff <nl> - ; ; <nl> - morphos ) <nl> - basic_machine = powerpc - unknown <nl> - os = - morphos <nl> - ; ; <nl> - moxiebox ) <nl> - basic_machine = moxie - unknown <nl> - os = - moxiebox <nl> - ; ; <nl> - msdos ) <nl> - basic_machine = i386 - pc <nl> - os = - msdos <nl> - ; ; <nl> - ms1 - * ) <nl> - basic_machine = ` echo $ basic_machine | sed - e ' s / ms1 - / mt - / ' ` <nl> - ; ; <nl> - msys ) <nl> - basic_machine = i686 - pc <nl> - os = - msys <nl> - ; ; <nl> - mvs ) <nl> - basic_machine = i370 - ibm <nl> - os = - mvs <nl> + cpu = m68000 <nl> + vendor = convergent <nl> ; ; <nl> - nacl ) <nl> - basic_machine = le32 - unknown <nl> - os = - nacl <nl> - ; ; <nl> - ncr3000 ) <nl> - basic_machine = i486 - ncr <nl> - os = - sysv4 <nl> - ; ; <nl> - netbsd386 ) <nl> - basic_machine = i386 - unknown <nl> - os = - netbsd <nl> - ; ; <nl> - netwinder ) <nl> - basic_machine = armv4l - rebel <nl> - os = - linux <nl> - ; ; <nl> - news | news700 | news800 | news900 ) <nl> - basic_machine = m68k - sony <nl> - os = - newsos <nl> - ; ; <nl> - news1000 ) <nl> - basic_machine = m68030 - sony <nl> - os = - newsos <nl> + * mint | mint [ 0 - 9 ] * | * MiNT | * MiNT [ 0 - 9 ] * ) <nl> + cpu = m68k <nl> + vendor = atari <nl> + os = mint <nl> ; ; <nl> news - 3600 | risc - news ) <nl> - basic_machine = mips - sony <nl> - os = - newsos <nl> - ; ; <nl> - necv70 ) <nl> - basic_machine = v70 - nec <nl> - os = - sysv <nl> + cpu = mips <nl> + vendor = sony <nl> + os = newsos <nl> ; ; <nl> - next | m * - next ) <nl> - basic_machine = m68k - next <nl> + next | m * - next ) <nl> + cpu = m68k <nl> + vendor = next <nl> case $ os in <nl> - - nextstep * ) <nl> + openstep * ) <nl> + ; ; <nl> + nextstep * ) <nl> ; ; <nl> - - ns2 * ) <nl> - os = - nextstep2 <nl> + ns2 * ) <nl> + os = nextstep2 <nl> ; ; <nl> * ) <nl> - os = - nextstep3 <nl> + os = nextstep3 <nl> ; ; <nl> esac <nl> ; ; <nl> - nh3000 ) <nl> - basic_machine = m68k - harris <nl> - os = - cxux <nl> - ; ; <nl> - nh [ 45 ] 000 ) <nl> - basic_machine = m88k - harris <nl> - os = - cxux <nl> - ; ; <nl> - nindy960 ) <nl> - basic_machine = i960 - intel <nl> - os = - nindy <nl> - ; ; <nl> - mon960 ) <nl> - basic_machine = i960 - intel <nl> - os = - mon960 <nl> - ; ; <nl> - nonstopux ) <nl> - basic_machine = mips - compaq <nl> - os = - nonstopux <nl> - ; ; <nl> np1 ) <nl> - basic_machine = np1 - gould <nl> - ; ; <nl> - neo - tandem ) <nl> - basic_machine = neo - tandem <nl> - ; ; <nl> - nse - tandem ) <nl> - basic_machine = nse - tandem <nl> - ; ; <nl> - nsr - tandem ) <nl> - basic_machine = nsr - tandem <nl> + cpu = np1 <nl> + vendor = gould <nl> ; ; <nl> op50n - * | op60c - * ) <nl> - basic_machine = hppa1 . 1 - oki <nl> - os = - proelf <nl> - ; ; <nl> - openrisc | openrisc - * ) <nl> - basic_machine = or32 - unknown <nl> - ; ; <nl> - os400 ) <nl> - basic_machine = powerpc - ibm <nl> - os = - os400 <nl> - ; ; <nl> - OSE68000 | ose68000 ) <nl> - basic_machine = m68000 - ericsson <nl> - os = - ose <nl> - ; ; <nl> - os68k ) <nl> - basic_machine = m68k - none <nl> - os = - os68k <nl> + cpu = hppa1 . 1 <nl> + vendor = oki <nl> + os = proelf <nl> ; ; <nl> pa - hitachi ) <nl> - basic_machine = hppa1 . 1 - hitachi <nl> - os = - hiuxwe2 <nl> - ; ; <nl> - paragon ) <nl> - basic_machine = i860 - intel <nl> - os = - osf <nl> - ; ; <nl> - parisc ) <nl> - basic_machine = hppa - unknown <nl> - os = - linux <nl> - ; ; <nl> - parisc - * ) <nl> - basic_machine = hppa - ` echo $ basic_machine | sed ' s / ^ [ ^ - ] * - / / ' ` <nl> - os = - linux <nl> + cpu = hppa1 . 1 <nl> + vendor = hitachi <nl> + os = hiuxwe2 <nl> ; ; <nl> pbd ) <nl> - basic_machine = sparc - tti <nl> + cpu = sparc <nl> + vendor = tti <nl> ; ; <nl> pbb ) <nl> - basic_machine = m68k - tti <nl> - ; ; <nl> - pc532 | pc532 - * ) <nl> - basic_machine = ns32k - pc532 <nl> + cpu = m68k <nl> + vendor = tti <nl> ; ; <nl> - pc98 ) <nl> - basic_machine = i386 - pc <nl> - ; ; <nl> - pc98 - * ) <nl> - basic_machine = i386 - ` echo $ basic_machine | sed ' s / ^ [ ^ - ] * - / / ' ` <nl> - ; ; <nl> - pentium | p5 | k5 | k6 | nexgen | viac3 ) <nl> - basic_machine = i586 - pc <nl> - ; ; <nl> - pentiumpro | p6 | 6x86 | athlon | athlon_ * ) <nl> - basic_machine = i686 - pc <nl> - ; ; <nl> - pentiumii | pentium2 | pentiumiii | pentium3 ) <nl> - basic_machine = i686 - pc <nl> - ; ; <nl> - pentium4 ) <nl> - basic_machine = i786 - pc <nl> - ; ; <nl> - pentium - * | p5 - * | k5 - * | k6 - * | nexgen - * | viac3 - * ) <nl> - basic_machine = i586 - ` echo $ basic_machine | sed ' s / ^ [ ^ - ] * - / / ' ` <nl> - ; ; <nl> - pentiumpro - * | p6 - * | 6x86 - * | athlon - * ) <nl> - basic_machine = i686 - ` echo $ basic_machine | sed ' s / ^ [ ^ - ] * - / / ' ` <nl> - ; ; <nl> - pentiumii - * | pentium2 - * | pentiumiii - * | pentium3 - * ) <nl> - basic_machine = i686 - ` echo $ basic_machine | sed ' s / ^ [ ^ - ] * - / / ' ` <nl> - ; ; <nl> - pentium4 - * ) <nl> - basic_machine = i786 - ` echo $ basic_machine | sed ' s / ^ [ ^ - ] * - / / ' ` <nl> + pc532 ) <nl> + cpu = ns32k <nl> + vendor = pc532 <nl> ; ; <nl> pn ) <nl> - basic_machine = pn - gould <nl> - ; ; <nl> - power ) basic_machine = power - ibm <nl> + cpu = pn <nl> + vendor = gould <nl> ; ; <nl> - ppc | ppcbe ) basic_machine = powerpc - unknown <nl> - ; ; <nl> - ppc - * | ppcbe - * ) <nl> - basic_machine = powerpc - ` echo $ basic_machine | sed ' s / ^ [ ^ - ] * - / / ' ` <nl> - ; ; <nl> - ppcle | powerpclittle | ppc - le | powerpc - little ) <nl> - basic_machine = powerpcle - unknown <nl> - ; ; <nl> - ppcle - * | powerpclittle - * ) <nl> - basic_machine = powerpcle - ` echo $ basic_machine | sed ' s / ^ [ ^ - ] * - / / ' ` <nl> - ; ; <nl> - ppc64 ) basic_machine = powerpc64 - unknown <nl> - ; ; <nl> - ppc64 - * ) basic_machine = powerpc64 - ` echo $ basic_machine | sed ' s / ^ [ ^ - ] * - / / ' ` <nl> - ; ; <nl> - ppc64le | powerpc64little | ppc64 - le | powerpc64 - little ) <nl> - basic_machine = powerpc64le - unknown <nl> - ; ; <nl> - ppc64le - * | powerpc64little - * ) <nl> - basic_machine = powerpc64le - ` echo $ basic_machine | sed ' s / ^ [ ^ - ] * - / / ' ` <nl> + power ) <nl> + cpu = power <nl> + vendor = ibm <nl> ; ; <nl> ps2 ) <nl> - basic_machine = i386 - ibm <nl> - ; ; <nl> - pw32 ) <nl> - basic_machine = i586 - unknown <nl> - os = - pw32 <nl> - ; ; <nl> - rdos | rdos64 ) <nl> - basic_machine = x86_64 - pc <nl> - os = - rdos <nl> - ; ; <nl> - rdos32 ) <nl> - basic_machine = i386 - pc <nl> - os = - rdos <nl> - ; ; <nl> - rom68k ) <nl> - basic_machine = m68k - rom68k <nl> - os = - coff <nl> + cpu = i386 <nl> + vendor = ibm <nl> ; ; <nl> rm [ 46 ] 00 ) <nl> - basic_machine = mips - siemens <nl> + cpu = mips <nl> + vendor = siemens <nl> ; ; <nl> rtpc | rtpc - * ) <nl> - basic_machine = romp - ibm <nl> - ; ; <nl> - s390 | s390 - * ) <nl> - basic_machine = s390 - ibm <nl> + cpu = romp <nl> + vendor = ibm <nl> ; ; <nl> - s390x | s390x - * ) <nl> - basic_machine = s390x - ibm <nl> - ; ; <nl> - sa29200 ) <nl> - basic_machine = a29k - amd <nl> - os = - udi <nl> - ; ; <nl> - sb1 ) <nl> - basic_machine = mipsisa64sb1 - unknown <nl> + sde ) <nl> + cpu = mipsisa32 <nl> + vendor = sde <nl> + os = $ { os : - elf } <nl> ; ; <nl> - sb1el ) <nl> - basic_machine = mipsisa64sb1el - unknown <nl> + simso - wrs ) <nl> + cpu = sparclite <nl> + vendor = wrs <nl> + os = vxworks <nl> ; ; <nl> - sde ) <nl> - basic_machine = mipsisa32 - sde <nl> - os = - elf <nl> + tower | tower - 32 ) <nl> + cpu = m68k <nl> + vendor = ncr <nl> ; ; <nl> - sei ) <nl> - basic_machine = mips - sei <nl> - os = - seiux <nl> + vpp * | vx | vx - * ) <nl> + cpu = f301 <nl> + vendor = fujitsu <nl> ; ; <nl> - sequent ) <nl> - basic_machine = i386 - sequent <nl> + w65 ) <nl> + cpu = w65 <nl> + vendor = wdc <nl> ; ; <nl> - sh ) <nl> - basic_machine = sh - hitachi <nl> - os = - hms <nl> + w89k - * ) <nl> + cpu = hppa1 . 1 <nl> + vendor = winbond <nl> + os = proelf <nl> ; ; <nl> - sh5el ) <nl> - basic_machine = sh5le - unknown <nl> + none ) <nl> + cpu = none <nl> + vendor = none <nl> ; ; <nl> - sh64 ) <nl> - basic_machine = sh64 - unknown <nl> + leon | leon [ 3 - 9 ] ) <nl> + cpu = sparc <nl> + vendor = $ basic_machine <nl> ; ; <nl> - sparclite - wrs | simso - wrs ) <nl> - basic_machine = sparclite - wrs <nl> - os = - vxworks <nl> + leon - * | leon [ 3 - 9 ] - * ) <nl> + cpu = sparc <nl> + vendor = ` echo " $ basic_machine " | sed ' s / - . * / / ' ` <nl> ; ; <nl> - sps7 ) <nl> - basic_machine = m68k - bull <nl> - os = - sysv2 <nl> + <nl> + * - * ) <nl> + # shellcheck disable = SC2162 <nl> + IFS = " - " read cpu vendor < < EOF <nl> + $ basic_machine <nl> + EOF <nl> ; ; <nl> - spur ) <nl> - basic_machine = spur - unknown <nl> + # We use ` pc ' rather than ` unknown ' <nl> + # because ( 1 ) that ' s what they normally are , and <nl> + # ( 2 ) the word " unknown " tends to confuse beginning users . <nl> + i * 86 | x86_64 ) <nl> + cpu = $ basic_machine <nl> + vendor = pc <nl> ; ; <nl> - st2000 ) <nl> - basic_machine = m68k - tandem <nl> + # These rules are duplicated from below for sake of the special case above ; <nl> + # i . e . things that normalized to x86 arches should also default to " pc " <nl> + pc98 ) <nl> + cpu = i386 <nl> + vendor = pc <nl> ; ; <nl> - stratus ) <nl> - basic_machine = i860 - stratus <nl> - os = - sysv4 <nl> + x64 | amd64 ) <nl> + cpu = x86_64 <nl> + vendor = pc <nl> ; ; <nl> - strongarm - * | thumb - * ) <nl> - basic_machine = arm - ` echo $ basic_machine | sed ' s / ^ [ ^ - ] * - / / ' ` <nl> + # Recognize the basic CPU types without company name . <nl> + * ) <nl> + cpu = $ basic_machine <nl> + vendor = unknown <nl> ; ; <nl> - sun2 ) <nl> - basic_machine = m68000 - sun <nl> + esac <nl> + <nl> + unset - v basic_machine <nl> + <nl> + # Decode basic machines in the full and proper CPU - Company form . <nl> + case $ cpu - $ vendor in <nl> + # Here we handle the default manufacturer of certain CPU types in canonical form . It is in <nl> + # some cases the only manufacturer , in others , it is the most popular . <nl> + craynv - unknown ) <nl> + vendor = cray <nl> + os = $ { os : - unicosmp } <nl> ; ; <nl> - sun2os3 ) <nl> - basic_machine = m68000 - sun <nl> - os = - sunos3 <nl> + c90 - unknown | c90 - cray ) <nl> + vendor = cray <nl> + os = $ { os : - unicos } <nl> ; ; <nl> - sun2os4 ) <nl> - basic_machine = m68000 - sun <nl> - os = - sunos4 <nl> + fx80 - unknown ) <nl> + vendor = alliant <nl> ; ; <nl> - sun3os3 ) <nl> - basic_machine = m68k - sun <nl> - os = - sunos3 <nl> + romp - unknown ) <nl> + vendor = ibm <nl> ; ; <nl> - sun3os4 ) <nl> - basic_machine = m68k - sun <nl> - os = - sunos4 <nl> + mmix - unknown ) <nl> + vendor = knuth <nl> ; ; <nl> - sun4os3 ) <nl> - basic_machine = sparc - sun <nl> - os = - sunos3 <nl> + microblaze - unknown | microblazeel - unknown ) <nl> + vendor = xilinx <nl> ; ; <nl> - sun4os4 ) <nl> - basic_machine = sparc - sun <nl> - os = - sunos4 <nl> + rs6000 - unknown ) <nl> + vendor = ibm <nl> ; ; <nl> - sun4sol2 ) <nl> - basic_machine = sparc - sun <nl> - os = - solaris2 <nl> + vax - unknown ) <nl> + vendor = dec <nl> ; ; <nl> - sun3 | sun3 - * ) <nl> - basic_machine = m68k - sun <nl> + pdp11 - unknown ) <nl> + vendor = dec <nl> ; ; <nl> - sun4 ) <nl> - basic_machine = sparc - sun <nl> + we32k - unknown ) <nl> + vendor = att <nl> ; ; <nl> - sun386 | sun386i | roadrunner ) <nl> - basic_machine = i386 - sun <nl> + cydra - unknown ) <nl> + vendor = cydrome <nl> ; ; <nl> - sv1 ) <nl> - basic_machine = sv1 - cray <nl> - os = - unicos <nl> + i370 - ibm * ) <nl> + vendor = ibm <nl> ; ; <nl> - symmetry ) <nl> - basic_machine = i386 - sequent <nl> - os = - dynix <nl> + orion - unknown ) <nl> + vendor = highlevel <nl> ; ; <nl> - t3e ) <nl> - basic_machine = alphaev5 - cray <nl> - os = - unicos <nl> + xps - unknown | xps100 - unknown ) <nl> + cpu = xps100 <nl> + vendor = honeywell <nl> ; ; <nl> - t90 ) <nl> - basic_machine = t90 - cray <nl> - os = - unicos <nl> + <nl> + # Here we normalize CPU types with a missing or matching vendor <nl> + dpx20 - unknown | dpx20 - bull ) <nl> + cpu = rs6000 <nl> + vendor = bull <nl> + os = $ { os : - bosx } <nl> ; ; <nl> - tile * ) <nl> - basic_machine = $ basic_machine - unknown <nl> - os = - linux - gnu <nl> + <nl> + # Here we normalize CPU types irrespective of the vendor <nl> + amd64 - * ) <nl> + cpu = x86_64 <nl> ; ; <nl> - tx39 ) <nl> - basic_machine = mipstx39 - unknown <nl> + blackfin - * ) <nl> + cpu = bfin <nl> + os = linux <nl> ; ; <nl> - tx39el ) <nl> - basic_machine = mipstx39el - unknown <nl> + c54x - * ) <nl> + cpu = tic54x <nl> ; ; <nl> - toad1 ) <nl> - basic_machine = pdp10 - xkl <nl> - os = - tops20 <nl> + c55x - * ) <nl> + cpu = tic55x <nl> ; ; <nl> - tower | tower - 32 ) <nl> - basic_machine = m68k - ncr <nl> + c6x - * ) <nl> + cpu = tic6x <nl> ; ; <nl> - tpf ) <nl> - basic_machine = s390x - ibm <nl> - os = - tpf <nl> + e500v [ 12 ] - * ) <nl> + cpu = powerpc <nl> + os = $ os " spe " <nl> ; ; <nl> - udi29k ) <nl> - basic_machine = a29k - amd <nl> - os = - udi <nl> + mips3 * - * ) <nl> + cpu = mips64 <nl> ; ; <nl> - ultra3 ) <nl> - basic_machine = a29k - nyu <nl> - os = - sym1 <nl> + ms1 - * ) <nl> + cpu = mt <nl> ; ; <nl> - v810 | necv810 ) <nl> - basic_machine = v810 - nec <nl> - os = - none <nl> + m68knommu - * ) <nl> + cpu = m68k <nl> + os = linux <nl> ; ; <nl> - vaxv ) <nl> - basic_machine = vax - dec <nl> - os = - sysv <nl> + m9s12z - * | m68hcs12z - * | hcs12z - * | s12z - * ) <nl> + cpu = s12z <nl> ; ; <nl> - vms ) <nl> - basic_machine = vax - dec <nl> - os = - vms <nl> + openrisc - * ) <nl> + cpu = or32 <nl> ; ; <nl> - vpp * | vx | vx - * ) <nl> - basic_machine = f301 - fujitsu <nl> + parisc - * ) <nl> + cpu = hppa <nl> + os = linux <nl> ; ; <nl> - vxworks960 ) <nl> - basic_machine = i960 - wrs <nl> - os = - vxworks <nl> + pentium - * | p5 - * | k5 - * | k6 - * | nexgen - * | viac3 - * ) <nl> + cpu = i586 <nl> ; ; <nl> - vxworks68 ) <nl> - basic_machine = m68k - wrs <nl> - os = - vxworks <nl> + pentiumpro - * | p6 - * | 6x86 - * | athlon - * | athalon_ * - * ) <nl> + cpu = i686 <nl> ; ; <nl> - vxworks29k ) <nl> - basic_machine = a29k - wrs <nl> - os = - vxworks <nl> + pentiumii - * | pentium2 - * | pentiumiii - * | pentium3 - * ) <nl> + cpu = i686 <nl> ; ; <nl> - w65 * ) <nl> - basic_machine = w65 - wdc <nl> - os = - none <nl> + pentium4 - * ) <nl> + cpu = i786 <nl> ; ; <nl> - w89k - * ) <nl> - basic_machine = hppa1 . 1 - winbond <nl> - os = - proelf <nl> + pc98 - * ) <nl> + cpu = i386 <nl> ; ; <nl> - xbox ) <nl> - basic_machine = i686 - pc <nl> - os = - mingw32 <nl> + ppc - * | ppcbe - * ) <nl> + cpu = powerpc <nl> ; ; <nl> - xps | xps100 ) <nl> - basic_machine = xps100 - honeywell <nl> + ppcle - * | powerpclittle - * ) <nl> + cpu = powerpcle <nl> ; ; <nl> - xscale - * | xscalee [ bl ] - * ) <nl> - basic_machine = ` echo $ basic_machine | sed ' s / ^ xscale / arm / ' ` <nl> + ppc64 - * ) <nl> + cpu = powerpc64 <nl> ; ; <nl> - ymp ) <nl> - basic_machine = ymp - cray <nl> - os = - unicos <nl> + ppc64le - * | powerpc64little - * ) <nl> + cpu = powerpc64le <nl> ; ; <nl> - z8k - * - coff ) <nl> - basic_machine = z8k - unknown <nl> - os = - sim <nl> + sb1 - * ) <nl> + cpu = mipsisa64sb1 <nl> ; ; <nl> - z80 - * - coff ) <nl> - basic_machine = z80 - unknown <nl> - os = - sim <nl> + sb1el - * ) <nl> + cpu = mipsisa64sb1el <nl> ; ; <nl> - none ) <nl> - basic_machine = none - none <nl> - os = - none <nl> + sh5e [ lb ] - * ) <nl> + cpu = ` echo " $ cpu " | sed ' s / ^ \ ( sh . \ ) e \ ( . \ ) $ / \ 1 \ 2e / ' ` <nl> ; ; <nl> - <nl> - # Here we handle the default manufacturer of certain CPU types . It is in <nl> - # some cases the only manufacturer , in others , it is the most popular . <nl> - w89k ) <nl> - basic_machine = hppa1 . 1 - winbond <nl> + spur - * ) <nl> + cpu = spur <nl> ; ; <nl> - op50n ) <nl> - basic_machine = hppa1 . 1 - oki <nl> + strongarm - * | thumb - * ) <nl> + cpu = arm <nl> ; ; <nl> - op60c ) <nl> - basic_machine = hppa1 . 1 - oki <nl> + tx39 - * ) <nl> + cpu = mipstx39 <nl> ; ; <nl> - romp ) <nl> - basic_machine = romp - ibm <nl> + tx39el - * ) <nl> + cpu = mipstx39el <nl> ; ; <nl> - mmix ) <nl> - basic_machine = mmix - knuth <nl> + x64 - * ) <nl> + cpu = x86_64 <nl> ; ; <nl> - rs6000 ) <nl> - basic_machine = rs6000 - ibm <nl> + xscale - * | xscalee [ bl ] - * ) <nl> + cpu = ` echo " $ cpu " | sed ' s / ^ xscale / arm / ' ` <nl> ; ; <nl> - vax ) <nl> - basic_machine = vax - dec <nl> + <nl> + # Recognize the canonical CPU Types that limit and / or modify the <nl> + # company names they are paired with . <nl> + cr16 - * ) <nl> + os = $ { os : - elf } <nl> ; ; <nl> - pdp10 ) <nl> - # there are many clones , so DEC is not a safe bet <nl> - basic_machine = pdp10 - unknown <nl> + crisv32 - * | etraxfs * - * ) <nl> + cpu = crisv32 <nl> + vendor = axis <nl> ; ; <nl> - pdp11 ) <nl> - basic_machine = pdp11 - dec <nl> + cris - * | etrax * - * ) <nl> + cpu = cris <nl> + vendor = axis <nl> ; ; <nl> - we32k ) <nl> - basic_machine = we32k - att <nl> + crx - * ) <nl> + os = $ { os : - elf } <nl> ; ; <nl> - sh [ 1234 ] | sh [ 24 ] a | sh [ 24 ] aeb | sh [ 34 ] eb | sh [ 1234 ] le | sh [ 23 ] ele ) <nl> - basic_machine = sh - unknown <nl> + neo - tandem ) <nl> + cpu = neo <nl> + vendor = tandem <nl> ; ; <nl> - sparc | sparcv8 | sparcv9 | sparcv9b | sparcv9v ) <nl> - basic_machine = sparc - sun <nl> + nse - tandem ) <nl> + cpu = nse <nl> + vendor = tandem <nl> ; ; <nl> - cydra ) <nl> - basic_machine = cydra - cydrome <nl> + nsr - tandem ) <nl> + cpu = nsr <nl> + vendor = tandem <nl> ; ; <nl> - orion ) <nl> - basic_machine = orion - highlevel <nl> + nsv - tandem ) <nl> + cpu = nsv <nl> + vendor = tandem <nl> ; ; <nl> - orion105 ) <nl> - basic_machine = clipper - highlevel <nl> + nsx - tandem ) <nl> + cpu = nsx <nl> + vendor = tandem <nl> ; ; <nl> - mac | mpw | mac - mpw ) <nl> - basic_machine = m68k - apple <nl> + s390 - * ) <nl> + cpu = s390 <nl> + vendor = ibm <nl> ; ; <nl> - pmac | pmac - mpw ) <nl> - basic_machine = powerpc - apple <nl> + s390x - * ) <nl> + cpu = s390x <nl> + vendor = ibm <nl> ; ; <nl> - * - unknown ) <nl> - # Make sure to match an already - canonicalized machine name . <nl> + tile * - * ) <nl> + os = $ { os : - linux - gnu } <nl> ; ; <nl> + <nl> * ) <nl> - echo Invalid configuration \ ` $ 1 \ ' : machine \ ` $ basic_machine \ ' not recognized 1 > & 2 <nl> - exit 1 <nl> + # Recognize the canonical CPU types that are allowed with any <nl> + # company name . <nl> + case $ cpu in <nl> + 1750a | 580 \ <nl> + | a29k \ <nl> + | aarch64 | aarch64_be \ <nl> + | abacus \ <nl> + | alpha | alphaev [ 4 - 8 ] | alphaev56 | alphaev6 [ 78 ] \ <nl> + | alpha64 | alpha64ev [ 4 - 8 ] | alpha64ev56 | alpha64ev6 [ 78 ] \ <nl> + | alphapca5 [ 67 ] | alpha64pca5 [ 67 ] \ <nl> + | am33_2 . 0 \ <nl> + | amdgcn \ <nl> + | arc | arceb \ <nl> + | arm | arm [ lb ] e | arme [ lb ] | armv * \ <nl> + | avr | avr32 \ <nl> + | asmjs \ <nl> + | ba \ <nl> + | be32 | be64 \ <nl> + | bfin | bpf | bs2000 \ <nl> + | c [ 123 ] * | c30 | [ cjt ] 90 | c4x \ <nl> + | c8051 | clipper | craynv | csky | cydra \ <nl> + | d10v | d30v | dlx | dsp16xx \ <nl> + | e2k | elxsi | epiphany \ <nl> + | f30 [ 01 ] | f700 | fido | fr30 | frv | ft32 | fx80 \ <nl> + | h8300 | h8500 \ <nl> + | hppa | hppa1 . [ 01 ] | hppa2 . 0 | hppa2 . 0 [ nw ] | hppa64 \ <nl> + | hexagon \ <nl> + | i370 | i * 86 | i860 | i960 | ia16 | ia64 \ <nl> + | ip2k | iq2000 \ <nl> + | k1om \ <nl> + | le32 | le64 \ <nl> + | lm32 \ <nl> + | m32c | m32r | m32rle \ <nl> + | m5200 | m68000 | m680 [ 012346 ] 0 | m68360 | m683 ? 2 | m68k \ <nl> + | m6811 | m68hc11 | m6812 | m68hc12 | m68hcs12x \ <nl> + | m88110 | m88k | maxq | mb | mcore | mep | metag \ <nl> + | microblaze | microblazeel \ <nl> + | mips | mipsbe | mipseb | mipsel | mipsle \ <nl> + | mips16 \ <nl> + | mips64 | mips64eb | mips64el \ <nl> + | mips64octeon | mips64octeonel \ <nl> + | mips64orion | mips64orionel \ <nl> + | mips64r5900 | mips64r5900el \ <nl> + | mips64vr | mips64vrel \ <nl> + | mips64vr4100 | mips64vr4100el \ <nl> + | mips64vr4300 | mips64vr4300el \ <nl> + | mips64vr5000 | mips64vr5000el \ <nl> + | mips64vr5900 | mips64vr5900el \ <nl> + | mipsisa32 | mipsisa32el \ <nl> + | mipsisa32r2 | mipsisa32r2el \ <nl> + | mipsisa32r6 | mipsisa32r6el \ <nl> + | mipsisa64 | mipsisa64el \ <nl> + | mipsisa64r2 | mipsisa64r2el \ <nl> + | mipsisa64r6 | mipsisa64r6el \ <nl> + | mipsisa64sb1 | mipsisa64sb1el \ <nl> + | mipsisa64sr71k | mipsisa64sr71kel \ <nl> + | mipsr5900 | mipsr5900el \ <nl> + | mipstx39 | mipstx39el \ <nl> + | mmix \ <nl> + | mn10200 | mn10300 \ <nl> + | moxie \ <nl> + | mt \ <nl> + | msp430 \ <nl> + | nds32 | nds32le | nds32be \ <nl> + | nfp \ <nl> + | nios | nios2 | nios2eb | nios2el \ <nl> + | none | np1 | ns16k | ns32k | nvptx \ <nl> + | open8 \ <nl> + | or1k * \ <nl> + | or32 \ <nl> + | orion \ <nl> + | picochip \ <nl> + | pdp10 | pdp11 | pj | pjl | pn | power \ <nl> + | powerpc | powerpc64 | powerpc64le | powerpcle | powerpcspe \ <nl> + | pru \ <nl> + | pyramid \ <nl> + | riscv | riscv32 | riscv64 \ <nl> + | rl78 | romp | rs6000 | rx \ <nl> + | score \ <nl> + | sh | shl \ <nl> + | sh [ 1234 ] | sh [ 24 ] a | sh [ 24 ] ae [ lb ] | sh [ 23 ] e | she [ lb ] | sh [ lb ] e \ <nl> + | sh [ 1234 ] e [ lb ] | sh [ 12345 ] [ lb ] e | sh [ 23 ] ele | sh64 | sh64le \ <nl> + | sparc | sparc64 | sparc64b | sparc64v | sparc86x | sparclet \ <nl> + | sparclite \ <nl> + | sparcv8 | sparcv9 | sparcv9b | sparcv9v | sv1 | sx * \ <nl> + | spu \ <nl> + | tahoe \ <nl> + | tic30 | tic4x | tic54x | tic55x | tic6x | tic80 \ <nl> + | tron \ <nl> + | ubicom32 \ <nl> + | v70 | v850 | v850e | v850e1 | v850es | v850e2 | v850e2v3 \ <nl> + | vax \ <nl> + | visium \ <nl> + | w65 \ <nl> + | wasm32 | wasm64 \ <nl> + | we32k \ <nl> + | x86 | x86_64 | xc16x | xgate | xps100 \ <nl> + | xstormy16 | xtensa * \ <nl> + | ymp \ <nl> + | z8k | z80 ) <nl> + ; ; <nl> + <nl> + * ) <nl> + echo Invalid configuration \ ` " $ 1 " \ ' : machine \ ` " $ cpu - $ vendor " \ ' not recognized 1 > & 2 <nl> + exit 1 <nl> + ; ; <nl> + esac <nl> ; ; <nl> esac <nl> <nl> # Here we canonicalize certain aliases for manufacturers . <nl> - case $ basic_machine in <nl> - * - digital * ) <nl> - basic_machine = ` echo $ basic_machine | sed ' s / digital . * / dec / ' ` <nl> + case $ vendor in <nl> + digital * ) <nl> + vendor = dec <nl> ; ; <nl> - * - commodore * ) <nl> - basic_machine = ` echo $ basic_machine | sed ' s / commodore . * / cbm / ' ` <nl> + commodore * ) <nl> + vendor = cbm <nl> ; ; <nl> * ) <nl> ; ; <nl> esac <nl> <nl> # Decode manufacturer - specific aliases for certain operating systems . <nl> <nl> - if [ x " $ os " ! = x " " ] <nl> + if [ x $ os ! = x ] <nl> then <nl> case $ os in <nl> - # First match some system type aliases <nl> - # that might get confused with valid system types . <nl> - # - solaris * is a basic system type , with this one exception . <nl> - - auroraux ) <nl> - os = - auroraux <nl> + # First match some system type aliases that might get confused <nl> + # with valid system types . <nl> + # solaris * is a basic system type , with this one exception . <nl> + auroraux ) <nl> + os = auroraux <nl> ; ; <nl> - - solaris1 | - solaris1 . * ) <nl> - os = ` echo $ os | sed - e ' s | solaris1 | sunos4 | ' ` <nl> + bluegene * ) <nl> + os = cnk <nl> ; ; <nl> - - solaris ) <nl> - os = - solaris2 <nl> + solaris1 | solaris1 . * ) <nl> + os = ` echo $ os | sed - e ' s | solaris1 | sunos4 | ' ` <nl> ; ; <nl> - - svr4 * ) <nl> - os = - sysv4 <nl> + solaris ) <nl> + os = solaris2 <nl> ; ; <nl> - - unixware * ) <nl> - os = - sysv4 . 2uw <nl> + unixware * ) <nl> + os = sysv4 . 2uw <nl> ; ; <nl> - - gnu / linux * ) <nl> + gnu / linux * ) <nl> os = ` echo $ os | sed - e ' s | gnu / linux | linux - gnu | ' ` <nl> ; ; <nl> - # First accept the basic system types . <nl> + # es1800 is here to avoid being matched by es * ( a different OS ) <nl> + es1800 * ) <nl> + os = ose <nl> + ; ; <nl> + # Some version numbers need modification <nl> + chorusos * ) <nl> + os = chorusos <nl> + ; ; <nl> + isc ) <nl> + os = isc2 . 2 <nl> + ; ; <nl> + sco6 ) <nl> + os = sco5v6 <nl> + ; ; <nl> + sco5 ) <nl> + os = sco3 . 2v5 <nl> + ; ; <nl> + sco4 ) <nl> + os = sco3 . 2v4 <nl> + ; ; <nl> + sco3 . 2 . [ 4 - 9 ] * ) <nl> + os = ` echo $ os | sed - e ' s / sco3 . 2 . / sco3 . 2v / ' ` <nl> + ; ; <nl> + sco3 . 2v [ 4 - 9 ] * | sco5v6 * ) <nl> + # Don ' t forget version if it is 3 . 2v4 or newer . <nl> + ; ; <nl> + scout ) <nl> + # Don ' t match below <nl> + ; ; <nl> + sco * ) <nl> + os = sco3 . 2v2 <nl> + ; ; <nl> + psos * ) <nl> + os = psos <nl> + ; ; <nl> + # Now accept the basic system types . <nl> # The portable systems comes first . <nl> - # Each alternative MUST END IN A * , to match a version number . <nl> - # - sysv * is not here because it comes later , after sysvr4 . <nl> - - gnu * | - bsd * | - mach * | - minix * | - genix * | - ultrix * | - irix * \ <nl> - | - * vms * | - sco * | - esix * | - isc * | - aix * | - cnk * | - sunos | - sunos [ 34 ] * \ <nl> - | - hpux * | - unos * | - osf * | - luna * | - dgux * | - auroraux * | - solaris * \ <nl> - | - sym * | - kopensolaris * | - plan9 * \ <nl> - | - amigaos * | - amigados * | - msdos * | - newsos * | - unicos * | - aof * \ <nl> - | - aos * | - aros * \ <nl> - | - nindy * | - vxsim * | - vxworks * | - ebmon * | - hms * | - mvs * \ <nl> - | - clix * | - riscos * | - uniplus * | - iris * | - rtu * | - xenix * \ <nl> - | - hiux * | - 386bsd * | - knetbsd * | - mirbsd * | - netbsd * \ <nl> - | - bitrig * | - openbsd * | - solidbsd * \ <nl> - | - ekkobsd * | - kfreebsd * | - freebsd * | - riscix * | - lynxos * \ <nl> - | - bosx * | - nextstep * | - cxux * | - aout * | - elf * | - oabi * \ <nl> - | - ptx * | - coff * | - ecoff * | - winnt * | - domain * | - vsta * \ <nl> - | - udi * | - eabi * | - lites * | - ieee * | - go32 * | - aux * \ <nl> - | - chorusos * | - chorusrdb * | - cegcc * \ <nl> - | - cygwin * | - msys * | - pe * | - psos * | - moss * | - proelf * | - rtems * \ <nl> - | - mingw32 * | - mingw64 * | - linux - gnu * | - linux - android * \ <nl> - | - linux - newlib * | - linux - musl * | - linux - uclibc * \ <nl> - | - uxpv * | - beos * | - mpeix * | - udk * | - moxiebox * \ <nl> - | - interix * | - uwin * | - mks * | - rhapsody * | - darwin * | - opened * \ <nl> - | - openstep * | - oskit * | - conix * | - pw32 * | - nonstopux * \ <nl> - | - storm - chaos * | - tops10 * | - tenex * | - tops20 * | - its * \ <nl> - | - os2 * | - vos * | - palmos * | - uclinux * | - nucleus * \ <nl> - | - morphos * | - superux * | - rtmk * | - rtmk - nova * | - windiss * \ <nl> - | - powermax * | - dnix * | - nx6 | - nx7 | - sei * | - dragonfly * \ <nl> - | - skyos * | - haiku * | - rdos * | - toppers * | - drops * | - es * | - tirtos * ) <nl> + # Each alternative MUST end in a * to match a version number . <nl> + # sysv * is not here because it comes later , after sysvr4 . <nl> + gnu * | bsd * | mach * | minix * | genix * | ultrix * | irix * \ <nl> + | * vms * | esix * | aix * | cnk * | sunos | sunos [ 34 ] * \ <nl> + | hpux * | unos * | osf * | luna * | dgux * | auroraux * | solaris * \ <nl> + | sym * | kopensolaris * | plan9 * \ <nl> + | amigaos * | amigados * | msdos * | newsos * | unicos * | aof * \ <nl> + | aos * | aros * | cloudabi * | sortix * | twizzler * \ <nl> + | nindy * | vxsim * | vxworks * | ebmon * | hms * | mvs * \ <nl> + | clix * | riscos * | uniplus * | iris * | isc * | rtu * | xenix * \ <nl> + | knetbsd * | mirbsd * | netbsd * \ <nl> + | bitrig * | openbsd * | solidbsd * | libertybsd * | os108 * \ <nl> + | ekkobsd * | kfreebsd * | freebsd * | riscix * | lynxos * \ <nl> + | bosx * | nextstep * | cxux * | aout * | elf * | oabi * \ <nl> + | ptx * | coff * | ecoff * | winnt * | domain * | vsta * \ <nl> + | udi * | eabi * | lites * | ieee * | go32 * | aux * | hcos * \ <nl> + | chorusrdb * | cegcc * | glidix * \ <nl> + | cygwin * | msys * | pe * | moss * | proelf * | rtems * \ <nl> + | midipix * | mingw32 * | mingw64 * | linux - gnu * | linux - android * \ <nl> + | linux - newlib * | linux - musl * | linux - uclibc * \ <nl> + | uxpv * | beos * | mpeix * | udk * | moxiebox * \ <nl> + | interix * | uwin * | mks * | rhapsody * | darwin * \ <nl> + | openstep * | oskit * | conix * | pw32 * | nonstopux * \ <nl> + | storm - chaos * | tops10 * | tenex * | tops20 * | its * \ <nl> + | os2 * | vos * | palmos * | uclinux * | nucleus * \ <nl> + | morphos * | superux * | rtmk * | windiss * \ <nl> + | powermax * | dnix * | nx6 | nx7 | sei * | dragonfly * \ <nl> + | skyos * | haiku * | rdos * | toppers * | drops * | es * \ <nl> + | onefs * | tirtos * | phoenix * | fuchsia * | redox * | bme * \ <nl> + | midnightbsd * | amdhsa * | unleashed * | emscripten * | wasi * \ <nl> + | nsk * | powerunix ) <nl> # Remember , each alternative MUST END IN * , to match a version number . <nl> ; ; <nl> - - qnx * ) <nl> - case $ basic_machine in <nl> - x86 - * | i * 86 - * ) <nl> + qnx * ) <nl> + case $ cpu in <nl> + x86 | i * 86 ) <nl> ; ; <nl> * ) <nl> - os = - nto $ os <nl> + os = nto - $ os <nl> ; ; <nl> esac <nl> ; ; <nl> - - nto - qnx * ) <nl> + hiux * ) <nl> + os = hiuxwe2 <nl> ; ; <nl> - - nto * ) <nl> - os = ` echo $ os | sed - e ' s | nto | nto - qnx | ' ` <nl> + nto - qnx * ) <nl> ; ; <nl> - - sim | - es1800 * | - hms * | - xray | - os68k * | - none * | - v88r * \ <nl> - | - windows * | - osx | - abug | - netware * | - os9 * | - beos * | - haiku * \ <nl> - | - macos * | - mpw * | - magic * | - mmixware * | - mon960 * | - lnews * ) <nl> + nto * ) <nl> + os = ` echo $ os | sed - e ' s | nto | nto - qnx | ' ` <nl> ; ; <nl> - - mac * ) <nl> - os = ` echo $ os | sed - e ' s | mac | macos | ' ` <nl> + sim | xray | os68k * | v88r * \ <nl> + | windows * | osx | abug | netware * | os9 * \ <nl> + | macos * | mpw * | magic * | mmixware * | mon960 * | lnews * ) <nl> ; ; <nl> - - linux - dietlibc ) <nl> - os = - linux - dietlibc <nl> + linux - dietlibc ) <nl> + os = linux - dietlibc <nl> ; ; <nl> - - linux * ) <nl> + linux * ) <nl> os = ` echo $ os | sed - e ' s | linux | linux - gnu | ' ` <nl> ; ; <nl> - - sunos5 * ) <nl> - os = ` echo $ os | sed - e ' s | sunos5 | solaris2 | ' ` <nl> + lynx * 178 ) <nl> + os = lynxos178 <nl> + ; ; <nl> + lynx * 5 ) <nl> + os = lynxos5 <nl> ; ; <nl> - - sunos6 * ) <nl> - os = ` echo $ os | sed - e ' s | sunos6 | solaris3 | ' ` <nl> + lynx * ) <nl> + os = lynxos <nl> ; ; <nl> - - opened * ) <nl> - os = - openedition <nl> + mac * ) <nl> + os = ` echo " $ os " | sed - e ' s | mac | macos | ' ` <nl> ; ; <nl> - - os400 * ) <nl> - os = - os400 <nl> + opened * ) <nl> + os = openedition <nl> ; ; <nl> - - wince * ) <nl> - os = - wince <nl> + os400 * ) <nl> + os = os400 <nl> ; ; <nl> - - osfrose * ) <nl> - os = - osfrose <nl> + sunos5 * ) <nl> + os = ` echo " $ os " | sed - e ' s | sunos5 | solaris2 | ' ` <nl> ; ; <nl> - - osf * ) <nl> - os = - osf <nl> + sunos6 * ) <nl> + os = ` echo " $ os " | sed - e ' s | sunos6 | solaris3 | ' ` <nl> ; ; <nl> - - utek * ) <nl> - os = - bsd <nl> + wince * ) <nl> + os = wince <nl> ; ; <nl> - - dynix * ) <nl> - os = - bsd <nl> + utek * ) <nl> + os = bsd <nl> ; ; <nl> - - acis * ) <nl> - os = - aos <nl> + dynix * ) <nl> + os = bsd <nl> ; ; <nl> - - atheos * ) <nl> - os = - atheos <nl> + acis * ) <nl> + os = aos <nl> ; ; <nl> - - syllable * ) <nl> - os = - syllable <nl> + atheos * ) <nl> + os = atheos <nl> ; ; <nl> - - 386bsd ) <nl> - os = - bsd <nl> + syllable * ) <nl> + os = syllable <nl> ; ; <nl> - - ctix * | - uts * ) <nl> - os = - sysv <nl> + 386bsd ) <nl> + os = bsd <nl> ; ; <nl> - - nova * ) <nl> - os = - rtmk - nova <nl> + ctix * | uts * ) <nl> + os = sysv <nl> ; ; <nl> - - ns2 ) <nl> - os = - nextstep2 <nl> + nova * ) <nl> + os = rtmk - nova <nl> ; ; <nl> - - nsk * ) <nl> - os = - nsk <nl> + ns2 ) <nl> + os = nextstep2 <nl> ; ; <nl> # Preserve the version number of sinix5 . <nl> - - sinix5 . * ) <nl> + sinix5 . * ) <nl> os = ` echo $ os | sed - e ' s | sinix | sysv | ' ` <nl> ; ; <nl> - - sinix * ) <nl> - os = - sysv4 <nl> + sinix * ) <nl> + os = sysv4 <nl> ; ; <nl> - - tpf * ) <nl> - os = - tpf <nl> + tpf * ) <nl> + os = tpf <nl> ; ; <nl> - - triton * ) <nl> - os = - sysv3 <nl> + triton * ) <nl> + os = sysv3 <nl> ; ; <nl> - - oss * ) <nl> - os = - sysv3 <nl> + oss * ) <nl> + os = sysv3 <nl> ; ; <nl> - - svr4 ) <nl> - os = - sysv4 <nl> + svr4 * ) <nl> + os = sysv4 <nl> ; ; <nl> - - svr3 ) <nl> - os = - sysv3 <nl> + svr3 ) <nl> + os = sysv3 <nl> ; ; <nl> - - sysvr4 ) <nl> - os = - sysv4 <nl> + sysvr4 ) <nl> + os = sysv4 <nl> ; ; <nl> - # This must come after - sysvr4 . <nl> - - sysv * ) <nl> + # This must come after sysvr4 . <nl> + sysv * ) <nl> ; ; <nl> - - ose * ) <nl> - os = - ose <nl> + ose * ) <nl> + os = ose <nl> ; ; <nl> - - es1800 * ) <nl> - os = - ose <nl> + * mint | mint [ 0 - 9 ] * | * MiNT | MiNT [ 0 - 9 ] * ) <nl> + os = mint <nl> ; ; <nl> - - xenix ) <nl> - os = - xenix <nl> + zvmoe ) <nl> + os = zvmoe <nl> ; ; <nl> - - * mint | - mint [ 0 - 9 ] * | - * MiNT | - MiNT [ 0 - 9 ] * ) <nl> - os = - mint <nl> + dicos * ) <nl> + os = dicos <nl> ; ; <nl> - - aros * ) <nl> - os = - aros <nl> + pikeos * ) <nl> + # Until real need of OS specific support for <nl> + # particular features comes up , bare metal <nl> + # configurations are quite functional . <nl> + case $ cpu in <nl> + arm * ) <nl> + os = eabi <nl> + ; ; <nl> + * ) <nl> + os = elf <nl> + ; ; <nl> + esac <nl> ; ; <nl> - - zvmoe ) <nl> - os = - zvmoe <nl> + nacl * ) <nl> ; ; <nl> - - dicos * ) <nl> - os = - dicos <nl> + ios ) <nl> ; ; <nl> - - nacl * ) <nl> + none ) <nl> ; ; <nl> - - none ) <nl> + * - eabi ) <nl> ; ; <nl> * ) <nl> - # Get rid of the ` - ' at the beginning of $ os . <nl> - os = ` echo $ os | sed ' s / [ ^ - ] * - / / ' ` <nl> - echo Invalid configuration \ ` $ 1 \ ' : system \ ` $ os \ ' not recognized 1 > & 2 <nl> + echo Invalid configuration \ ` " $ 1 " \ ' : system \ ` " $ os " \ ' not recognized 1 > & 2 <nl> exit 1 <nl> ; ; <nl> esac <nl> else <nl> # will signal an error saying that MANUFACTURER isn ' t an operating <nl> # system , and we ' ll never get to this point . <nl> <nl> - case $ basic_machine in <nl> + case $ cpu - $ vendor in <nl> score - * ) <nl> - os = - elf <nl> + os = elf <nl> ; ; <nl> spu - * ) <nl> - os = - elf <nl> + os = elf <nl> ; ; <nl> * - acorn ) <nl> - os = - riscix1 . 2 <nl> + os = riscix1 . 2 <nl> ; ; <nl> arm * - rebel ) <nl> - os = - linux <nl> + os = linux <nl> ; ; <nl> arm * - semi ) <nl> - os = - aout <nl> + os = aout <nl> ; ; <nl> c4x - * | tic4x - * ) <nl> - os = - coff <nl> + os = coff <nl> ; ; <nl> c8051 - * ) <nl> - os = - elf <nl> + os = elf <nl> + ; ; <nl> + clipper - intergraph ) <nl> + os = clix <nl> ; ; <nl> hexagon - * ) <nl> - os = - elf <nl> + os = elf <nl> ; ; <nl> tic54x - * ) <nl> - os = - coff <nl> + os = coff <nl> ; ; <nl> tic55x - * ) <nl> - os = - coff <nl> + os = coff <nl> ; ; <nl> tic6x - * ) <nl> - os = - coff <nl> + os = coff <nl> ; ; <nl> # This must come before the * - dec entry . <nl> pdp10 - * ) <nl> - os = - tops20 <nl> + os = tops20 <nl> ; ; <nl> pdp11 - * ) <nl> - os = - none <nl> + os = none <nl> ; ; <nl> * - dec | vax - * ) <nl> - os = - ultrix4 . 2 <nl> + os = ultrix4 . 2 <nl> ; ; <nl> m68 * - apollo ) <nl> - os = - domain <nl> + os = domain <nl> ; ; <nl> i386 - sun ) <nl> - os = - sunos4 . 0 . 2 <nl> + os = sunos4 . 0 . 2 <nl> ; ; <nl> m68000 - sun ) <nl> - os = - sunos3 <nl> + os = sunos3 <nl> ; ; <nl> m68 * - cisco ) <nl> - os = - aout <nl> + os = aout <nl> ; ; <nl> mep - * ) <nl> - os = - elf <nl> + os = elf <nl> ; ; <nl> mips * - cisco ) <nl> - os = - elf <nl> + os = elf <nl> ; ; <nl> mips * - * ) <nl> - os = - elf <nl> + os = elf <nl> ; ; <nl> or32 - * ) <nl> - os = - coff <nl> + os = coff <nl> ; ; <nl> * - tti ) # must be before sparc entry or we get the wrong os . <nl> - os = - sysv3 <nl> + os = sysv3 <nl> ; ; <nl> sparc - * | * - sun ) <nl> - os = - sunos4 . 1 . 1 <nl> + os = sunos4 . 1 . 1 <nl> ; ; <nl> - * - be ) <nl> - os = - beos <nl> + pru - * ) <nl> + os = elf <nl> ; ; <nl> - * - haiku ) <nl> - os = - haiku <nl> + * - be ) <nl> + os = beos <nl> ; ; <nl> * - ibm ) <nl> - os = - aix <nl> + os = aix <nl> ; ; <nl> * - knuth ) <nl> - os = - mmixware <nl> + os = mmixware <nl> ; ; <nl> * - wec ) <nl> - os = - proelf <nl> + os = proelf <nl> ; ; <nl> * - winbond ) <nl> - os = - proelf <nl> + os = proelf <nl> ; ; <nl> * - oki ) <nl> - os = - proelf <nl> + os = proelf <nl> ; ; <nl> * - hp ) <nl> - os = - hpux <nl> + os = hpux <nl> ; ; <nl> * - hitachi ) <nl> - os = - hiux <nl> + os = hiux <nl> ; ; <nl> i860 - * | * - att | * - ncr | * - altos | * - motorola | * - convergent ) <nl> - os = - sysv <nl> + os = sysv <nl> ; ; <nl> * - cbm ) <nl> - os = - amigaos <nl> + os = amigaos <nl> ; ; <nl> * - dg ) <nl> - os = - dgux <nl> + os = dgux <nl> ; ; <nl> * - dolphin ) <nl> - os = - sysv3 <nl> + os = sysv3 <nl> ; ; <nl> m68k - ccur ) <nl> - os = - rtu <nl> + os = rtu <nl> ; ; <nl> m88k - omron * ) <nl> - os = - luna <nl> + os = luna <nl> ; ; <nl> - * - next ) <nl> - os = - nextstep <nl> + * - next ) <nl> + os = nextstep <nl> ; ; <nl> * - sequent ) <nl> - os = - ptx <nl> + os = ptx <nl> ; ; <nl> * - crds ) <nl> - os = - unos <nl> + os = unos <nl> ; ; <nl> * - ns ) <nl> - os = - genix <nl> + os = genix <nl> ; ; <nl> i370 - * ) <nl> - os = - mvs <nl> - ; ; <nl> - * - next ) <nl> - os = - nextstep3 <nl> + os = mvs <nl> ; ; <nl> * - gould ) <nl> - os = - sysv <nl> + os = sysv <nl> ; ; <nl> * - highlevel ) <nl> - os = - bsd <nl> + os = bsd <nl> ; ; <nl> * - encore ) <nl> - os = - bsd <nl> + os = bsd <nl> ; ; <nl> * - sgi ) <nl> - os = - irix <nl> + os = irix <nl> ; ; <nl> * - siemens ) <nl> - os = - sysv4 <nl> + os = sysv4 <nl> ; ; <nl> * - masscomp ) <nl> - os = - rtu <nl> + os = rtu <nl> ; ; <nl> f30 [ 01 ] - fujitsu | f700 - fujitsu ) <nl> - os = - uxpv <nl> + os = uxpv <nl> ; ; <nl> * - rom68k ) <nl> - os = - coff <nl> + os = coff <nl> ; ; <nl> * - * bug ) <nl> - os = - coff <nl> + os = coff <nl> ; ; <nl> * - apple ) <nl> - os = - macos <nl> + os = macos <nl> ; ; <nl> * - atari * ) <nl> - os = - mint <nl> + os = mint <nl> + ; ; <nl> + * - wrs ) <nl> + os = vxworks <nl> ; ; <nl> * ) <nl> - os = - none <nl> + os = none <nl> ; ; <nl> esac <nl> fi <nl> <nl> # Here we handle the case where we know the os , and the CPU type , but not the <nl> # manufacturer . We pick the logical manufacturer . <nl> - vendor = unknown <nl> - case $ basic_machine in <nl> - * - unknown ) <nl> + case $ vendor in <nl> + unknown ) <nl> case $ os in <nl> - - riscix * ) <nl> + riscix * ) <nl> vendor = acorn <nl> ; ; <nl> - - sunos * ) <nl> + sunos * ) <nl> vendor = sun <nl> ; ; <nl> - - cnk * | - aix * ) <nl> + cnk * | - aix * ) <nl> vendor = ibm <nl> ; ; <nl> - - beos * ) <nl> + beos * ) <nl> vendor = be <nl> ; ; <nl> - - hpux * ) <nl> + hpux * ) <nl> vendor = hp <nl> ; ; <nl> - - mpeix * ) <nl> + mpeix * ) <nl> vendor = hp <nl> ; ; <nl> - - hiux * ) <nl> + hiux * ) <nl> vendor = hitachi <nl> ; ; <nl> - - unos * ) <nl> + unos * ) <nl> vendor = crds <nl> ; ; <nl> - - dgux * ) <nl> + dgux * ) <nl> vendor = dg <nl> ; ; <nl> - - luna * ) <nl> + luna * ) <nl> vendor = omron <nl> ; ; <nl> - - genix * ) <nl> + genix * ) <nl> vendor = ns <nl> ; ; <nl> - - mvs * | - opened * ) <nl> + clix * ) <nl> + vendor = intergraph <nl> + ; ; <nl> + mvs * | opened * ) <nl> vendor = ibm <nl> ; ; <nl> - - os400 * ) <nl> + os400 * ) <nl> vendor = ibm <nl> ; ; <nl> - - ptx * ) <nl> + ptx * ) <nl> vendor = sequent <nl> ; ; <nl> - - tpf * ) <nl> + tpf * ) <nl> vendor = ibm <nl> ; ; <nl> - - vxsim * | - vxworks * | - windiss * ) <nl> + vxsim * | vxworks * | windiss * ) <nl> vendor = wrs <nl> ; ; <nl> - - aux * ) <nl> + aux * ) <nl> vendor = apple <nl> ; ; <nl> - - hms * ) <nl> + hms * ) <nl> vendor = hitachi <nl> ; ; <nl> - - mpw * | - macos * ) <nl> + mpw * | macos * ) <nl> vendor = apple <nl> ; ; <nl> - - * mint | - mint [ 0 - 9 ] * | - * MiNT | - MiNT [ 0 - 9 ] * ) <nl> + * mint | mint [ 0 - 9 ] * | * MiNT | MiNT [ 0 - 9 ] * ) <nl> vendor = atari <nl> ; ; <nl> - - vos * ) <nl> + vos * ) <nl> vendor = stratus <nl> ; ; <nl> esac <nl> - basic_machine = ` echo $ basic_machine | sed " s / unknown / $ vendor / " ` <nl> ; ; <nl> esac <nl> <nl> - echo $ basic_machine $ os <nl> + echo " $ cpu - $ vendor - $ os " <nl> exit <nl> <nl> # Local variables : <nl> - # eval : ( add - hook ' write - file - hooks ' time - stamp ) <nl> + # eval : ( add - hook ' before - save - hook ' time - stamp ) <nl> # time - stamp - start : " timestamp = ' " <nl> # time - stamp - format : " % : y - % 02m - % 02d " <nl> # time - stamp - end : " ' " <nl> mmm a / build - aux / install - sh <nl> ppp b / build - aux / install - sh <nl> <nl> # ! / bin / sh <nl> # install - install a program , script , or datafile <nl> <nl> - scriptversion = 2013 - 12 - 25 . 23 ; # UTC <nl> + scriptversion = 2018 - 03 - 11 . 20 ; # UTC <nl> <nl> # This originates from X11R5 ( mit / util / scripts / install . sh ) , which was <nl> # later released in X11R6 ( xc / config / util / install . sh ) with the <nl> do <nl> fi <nl> dst = $ dst_arg <nl> <nl> - # If destination is a directory , append the input filename ; won ' t work <nl> - # if double slashes aren ' t ignored . <nl> + # If destination is a directory , append the input filename . <nl> if test - d " $ dst " ; then <nl> if test " $ is_target_a_directory " = never ; then <nl> echo " $ 0 : $ dst_arg : Is a directory " > & 2 <nl> exit 1 <nl> fi <nl> dstdir = $ dst <nl> - dst = $ dstdir / ` basename " $ src " ` <nl> + dstbase = ` basename " $ src " ` <nl> + case $ dst in <nl> + * / ) dst = $ dst $ dstbase ; ; <nl> + * ) dst = $ dst / $ dstbase ; ; <nl> + esac <nl> dstdir_status = 0 <nl> else <nl> dstdir = ` dirname " $ dst " ` <nl> do <nl> fi <nl> fi <nl> <nl> + case $ dstdir in <nl> + * / ) dstdirslash = $ dstdir ; ; <nl> + * ) dstdirslash = $ dstdir / ; ; <nl> + esac <nl> + <nl> obsolete_mkdir_used = false <nl> <nl> if test $ dstdir_status ! = 0 ; then <nl> do <nl> # is incompatible with FreeBSD ' install ' when ( umask & 300 ) ! = 0 . <nl> ; ; <nl> * ) <nl> + # Note that $ RANDOM variable is not portable ( e . g . dash ) ; Use it <nl> + # here however when possible just to lower collision chance . <nl> tmpdir = $ { TMPDIR - / tmp } / ins $ RANDOM - $ $ <nl> - trap ' ret = $ ? ; rmdir " $ tmpdir / d " " $ tmpdir " 2 > / dev / null ; exit $ ret ' 0 <nl> <nl> + trap ' ret = $ ? ; rmdir " $ tmpdir / a / b " " $ tmpdir / a " " $ tmpdir " 2 > / dev / null ; exit $ ret ' 0 <nl> + <nl> + # Because " mkdir - p " follows existing symlinks and we likely work <nl> + # directly in world - writeable / tmp , make sure that the ' $ tmpdir ' <nl> + # directory is successfully created first before we actually test <nl> + # ' mkdir - p ' feature . <nl> if ( umask $ mkdir_umask & & <nl> - exec $ mkdirprog $ mkdir_mode - p - - " $ tmpdir / d " ) > / dev / null 2 > & 1 <nl> + $ mkdirprog $ mkdir_mode " $ tmpdir " & & <nl> + exec $ mkdirprog $ mkdir_mode - p - - " $ tmpdir / a / b " ) > / dev / null 2 > & 1 <nl> then <nl> if test - z " $ dir_arg " | | { <nl> # Check for POSIX incompatibilities with - m . <nl> # HP - UX 11 . 23 and IRIX 6 . 5 mkdir - m - p sets group - or <nl> # other - writable bit of parent directory when it shouldn ' t . <nl> # FreeBSD 6 . 1 mkdir - m - p sets mode of existing directory . <nl> - ls_ld_tmpdir = ` ls - ld " $ tmpdir " ` <nl> + test_tmpdir = " $ tmpdir / a " <nl> + ls_ld_tmpdir = ` ls - ld " $ test_tmpdir " ` <nl> case $ ls_ld_tmpdir in <nl> d ? ? ? ? - ? r - * ) different_mode = 700 ; ; <nl> d ? ? ? ? - ? - - * ) different_mode = 755 ; ; <nl> * ) false ; ; <nl> esac & & <nl> - $ mkdirprog - m $ different_mode - p - - " $ tmpdir " & & { <nl> - ls_ld_tmpdir_1 = ` ls - ld " $ tmpdir " ` <nl> + $ mkdirprog - m $ different_mode - p - - " $ test_tmpdir " & & { <nl> + ls_ld_tmpdir_1 = ` ls - ld " $ test_tmpdir " ` <nl> test " $ ls_ld_tmpdir " = " $ ls_ld_tmpdir_1 " <nl> } <nl> } <nl> then posix_mkdir = : <nl> fi <nl> - rmdir " $ tmpdir / d " " $ tmpdir " <nl> + rmdir " $ tmpdir / a / b " " $ tmpdir / a " " $ tmpdir " <nl> else <nl> # Remove any dirs left behind by ancient mkdir implementations . <nl> - rmdir . / $ mkdir_mode . / - p . / - - 2 > / dev / null <nl> + rmdir . / $ mkdir_mode . / - p . / - - " $ tmpdir " 2 > / dev / null <nl> fi <nl> trap ' ' 0 ; ; <nl> esac ; ; <nl> do <nl> else <nl> <nl> # Make a couple of temp file names in the proper directory . <nl> - dsttmp = $ dstdir / _inst . $ $ _ <nl> - rmtmp = $ dstdir / _rm . $ $ _ <nl> + dsttmp = $ { dstdirslash } _inst . $ $ _ <nl> + rmtmp = $ { dstdirslash } _rm . $ $ _ <nl> <nl> # Trap to clean up those temp files at exit . <nl> trap ' ret = $ ? ; rm - f " $ dsttmp " " $ rmtmp " & & exit $ ret ' 0 <nl> do <nl> done <nl> <nl> # Local variables : <nl> - # eval : ( add - hook ' write - file - hooks ' time - stamp ) <nl> + # eval : ( add - hook ' before - save - hook ' time - stamp ) <nl> # time - stamp - start : " scriptversion = " <nl> # time - stamp - format : " % : y - % 02m - % 02d . % 02H " <nl> - # time - stamp - time - zone : " UTC " <nl> + # time - stamp - time - zone : " UTC0 " <nl> # time - stamp - end : " ; # UTC " <nl> # End : <nl> mmm a / build - aux / missing <nl> ppp b / build - aux / missing <nl> <nl> # ! / bin / sh <nl> # Common wrapper for a few potentially missing GNU programs . <nl> <nl> - scriptversion = 2013 - 10 - 28 . 13 ; # UTC <nl> + scriptversion = 2018 - 03 - 07 . 03 ; # UTC <nl> <nl> - # Copyright ( C ) 1996 - 2014 Free Software Foundation , Inc . <nl> + # Copyright ( C ) 1996 - 2018 Free Software Foundation , Inc . <nl> # Originally written by Fran , cois Pinard < pinard @ iro . umontreal . ca > , 1996 . <nl> <nl> # This program is free software ; you can redistribute it and / or modify <nl> scriptversion = 2013 - 10 - 28 . 13 ; # UTC <nl> # GNU General Public License for more details . <nl> <nl> # You should have received a copy of the GNU General Public License <nl> - # along with this program . If not , see < http : / / www . gnu . org / licenses / > . <nl> + # along with this program . If not , see < https : / / www . gnu . org / licenses / > . <nl> <nl> # As a special exception to the GNU General Public License , if you <nl> # distribute this file as part of a program that contains a <nl> else <nl> exit $ st <nl> fi <nl> <nl> - perl_URL = http : / / www . perl . org / <nl> - flex_URL = http : / / flex . sourceforge . net / <nl> - gnu_software_URL = http : / / www . gnu . org / software <nl> + perl_URL = https : / / www . perl . org / <nl> + flex_URL = https : / / github . com / westes / flex <nl> + gnu_software_URL = https : / / www . gnu . org / software <nl> <nl> program_details ( ) <nl> { <nl> give_advice " $ 1 " | sed - e ' 1s / ^ / WARNING : / ' \ <nl> exit $ st <nl> <nl> # Local variables : <nl> - # eval : ( add - hook ' write - file - hooks ' time - stamp ) <nl> + # eval : ( add - hook ' before - save - hook ' time - stamp ) <nl> # time - stamp - start : " scriptversion = " <nl> # time - stamp - format : " % : y - % 02m - % 02d . % 02H " <nl> - # time - stamp - time - zone : " UTC " <nl> + # time - stamp - time - zone : " UTC0 " <nl> # time - stamp - end : " ; # UTC " <nl> # End : <nl> | Regenerate build - aux scripts | qbittorrent/qBittorrent | 48d628671407ea832adb45c028038f05c4305aac | 2019-10-19T17:28:10Z |
mmm a / tests / queries / 0_stateless / 01079_parallel_alter_modify_zookeeper . sh <nl> ppp b / tests / queries / 0_stateless / 01079_parallel_alter_modify_zookeeper . sh <nl> wait <nl> <nl> echo " Finishing alters " <nl> <nl> - # This alter will finish all previous , but replica 1 maybe still not up - to - date <nl> - while [ [ $ ( timeout 30 $ CLICKHOUSE_CLIENT - - query " ALTER TABLE concurrent_alter_mt_1 MODIFY COLUMN value1 String SETTINGS replication_alter_partitions_sync = 2 " 2 > & 1 ) ] ] ; do <nl> + # This alter will finish all previous , but replica 1 maybe still not up - to - date . <nl> + # If query will throw something , than we will sleep 1 and retry . If timeout <nl> + # happened we will silentrly go out of loop and probably fail tests in the <nl> + # following for loop . <nl> + # <nl> + # 120 seconds is more than enough , but in rare cases for slow builds ( debug , <nl> + # thread ) it maybe necessary <nl> + while [ [ $ ( timeout 120 $ CLICKHOUSE_CLIENT - - query " ALTER TABLE concurrent_alter_mt_1 MODIFY COLUMN value1 String SETTINGS replication_alter_partitions_sync = 2 " 2 > & 1 ) ] ] ; do <nl> sleep 1 <nl> done <nl> <nl> | Fix modify test | ClickHouse/ClickHouse | 508e0f44d188e883ca46935f3ee6dcfc9ac17853 | 2020-06-09T01:48:30Z |
mmm a / aten / src / THCUNN / generic / Im2Col . cu <nl> ppp b / aten / src / THCUNN / generic / Im2Col . cu <nl> void THNN_ ( Im2Col_updateOutput ) ( <nl> im2col ( <nl> THCState_getCurrentStream ( state ) , <nl> THCTensor_ ( data ) ( state , input_n ) , <nl> - nInputPlane , inputHeight , inputWidth , kH , kW , padH , padW , sH , sW , <nl> + nInputPlane , inputHeight , inputWidth , <nl> + outputHeight , outputWidth , <nl> + kH , kW , padH , padW , sH , sW , <nl> dH , dW , THCTensor_ ( data ) ( state , output_n ) ) ; <nl> } <nl> <nl> mmm a / aten / src / THCUNN / generic / SpatialConvolutionLocal . cu <nl> ppp b / aten / src / THCUNN / generic / SpatialConvolutionLocal . cu <nl> void THNN_ ( SpatialConvolutionLocal_updateOutput ) ( <nl> im2col ( <nl> THCState_getCurrentStream ( state ) , <nl> THCTensor_ ( data ) ( state , input_n ) , <nl> - nInputPlane , inputHeight , inputWidth , kH , kW , padH , padW , dH , dW , <nl> + nInputPlane , inputHeight , inputWidth , <nl> + outputHeight , outputWidth , <nl> + kH , kW , padH , padW , dH , dW , <nl> 1 , 1 , THCTensor_ ( data ) ( state , finput_n ) <nl> ) ; <nl> <nl> void THNN_ ( SpatialConvolutionLocal_accGradParameters ) ( <nl> im2col ( <nl> THCState_getCurrentStream ( state ) , <nl> THCTensor_ ( data ) ( state , input_n ) , <nl> - nInputPlane , inputHeight , inputWidth , kH , kW , padH , padW , dH , dW , <nl> + nInputPlane , inputHeight , inputWidth , <nl> + outputHeight , outputWidth , <nl> + kH , kW , padH , padW , dH , dW , <nl> 1 , 1 , THCTensor_ ( data ) ( state , finput_n ) <nl> ) ; <nl> <nl> mmm a / aten / src / THCUNN / generic / SpatialConvolutionMM . cu <nl> ppp b / aten / src / THCUNN / generic / SpatialConvolutionMM . cu <nl> void THNN_ ( SpatialConvolutionMM_updateOutput ) ( <nl> im2col ( <nl> THCState_getCurrentStream ( state ) , <nl> THCTensor_ ( data ) ( state , input_n ) , <nl> - nInputPlane , inputHeight , inputWidth , kH , kW , padH , padW , dH , dW , <nl> + nInputPlane , inputHeight , inputWidth , <nl> + outputHeight , outputWidth , <nl> + kH , kW , padH , padW , dH , dW , <nl> 1 , 1 , THCTensor_ ( data ) ( state , columns ) <nl> ) ; <nl> <nl> void THNN_ ( SpatialConvolutionMM_accGradParameters ) ( <nl> im2col ( <nl> THCState_getCurrentStream ( state ) , <nl> THCTensor_ ( data ) ( state , input_n ) , <nl> - nInputPlane , inputHeight , inputWidth , kH , kW , padH , padW , dH , dW , <nl> + nInputPlane , inputHeight , inputWidth , <nl> + outputHeight , outputWidth , <nl> + kH , kW , padH , padW , dH , dW , <nl> 1 , 1 , THCTensor_ ( data ) ( state , columns ) <nl> ) ; <nl> <nl> mmm a / aten / src / THCUNN / generic / SpatialDilatedConvolution . cu <nl> ppp b / aten / src / THCUNN / generic / SpatialDilatedConvolution . cu <nl> void THNN_ ( SpatialDilatedConvolution_updateOutput ) ( <nl> im2col ( <nl> THCState_getCurrentStream ( state ) , <nl> THCTensor_ ( data ) ( state , input_n ) , <nl> - nInputPlane , inputHeight , inputWidth , kH , kW , padH , padW , dH , dW , <nl> + nInputPlane , inputHeight , inputWidth , <nl> + outputHeight , outputWidth , <nl> + kH , kW , padH , padW , dH , dW , <nl> dilationH , dilationW , <nl> THCTensor_ ( data ) ( state , columns ) <nl> ) ; <nl> void THNN_ ( SpatialDilatedConvolution_accGradParameters ) ( <nl> im2col ( <nl> THCState_getCurrentStream ( state ) , <nl> THCTensor_ ( data ) ( state , input_n ) , <nl> - nInputPlane , inputHeight , inputWidth , kH , kW , padH , padW , dH , dW , <nl> + nInputPlane , inputHeight , inputWidth , <nl> + outputHeight , outputWidth , <nl> + kH , kW , padH , padW , dH , dW , <nl> dilationH , dilationW , <nl> THCTensor_ ( data ) ( state , columns ) <nl> ) ; <nl> mmm a / aten / src / THCUNN / generic / SpatialFullDilatedConvolution . cu <nl> ppp b / aten / src / THCUNN / generic / SpatialFullDilatedConvolution . cu <nl> void THNN_ ( SpatialFullDilatedConvolution_updateGradInput ) ( <nl> im2col ( <nl> THCState_getCurrentStream ( state ) , <nl> THCTensor_ ( data ) ( state , gradOutput_n ) , <nl> - nOutputPlane , outputHeight , outputWidth , kH , kW , padH , padW , dH , dW , <nl> + nOutputPlane , outputHeight , outputWidth , <nl> + inputHeight , inputWidth , <nl> + kH , kW , padH , padW , dH , dW , <nl> dilationH , dilationW , THCTensor_ ( data ) ( state , gradColumns ) <nl> ) ; <nl> <nl> void THNN_ ( SpatialFullDilatedConvolution_accGradParameters ) ( <nl> im2col ( <nl> THCState_getCurrentStream ( state ) , <nl> THCTensor_ ( data ) ( state , gradOutput_n ) , <nl> - nOutputPlane , outputHeight , outputWidth , kH , kW , padH , padW , dH , dW , <nl> + nOutputPlane , outputHeight , outputWidth , <nl> + inputHeight , inputWidth , <nl> + kH , kW , padH , padW , dH , dW , <nl> dilationH , dilationW , THCTensor_ ( data ) ( state , columns ) <nl> ) ; <nl> <nl> mmm a / aten / src / THCUNN / generic / VolumetricDilatedConvolution . cu <nl> ppp b / aten / src / THCUNN / generic / VolumetricDilatedConvolution . cu <nl> void THNN_ ( VolumetricDilatedConvolution_updateOutput ) ( <nl> THCState_getCurrentStream ( state ) , <nl> THCTensor_ ( data ) ( state , input_n ) , <nl> nInputPlane , inputDepth , inputHeight , inputWidth , <nl> + outputDepth , outputHeight , outputWidth , <nl> kT , kH , kW , padT , padH , padW , dT , dH , dW , <nl> dilationT , dilationH , dilationW , <nl> THCTensor_ ( data ) ( state , columns ) <nl> void THNN_ ( VolumetricDilatedConvolution_accGradParameters ) ( <nl> vol2col ( <nl> THCState_getCurrentStream ( state ) , <nl> THCTensor_ ( data ) ( state , input_n ) , <nl> - nInputPlane , inputDepth , inputHeight , inputWidth , kT , kH , kW , padT , padH , padW , dT , dH , dW , <nl> + nInputPlane , inputDepth , inputHeight , inputWidth , <nl> + outputDepth , outputHeight , outputWidth , <nl> + kT , kH , kW , padT , padH , padW , dT , dH , dW , <nl> dilationT , dilationH , dilationW , <nl> THCTensor_ ( data ) ( state , columns ) <nl> ) ; <nl> mmm a / aten / src / THCUNN / generic / VolumetricFullDilatedConvolution . cu <nl> ppp b / aten / src / THCUNN / generic / VolumetricFullDilatedConvolution . cu <nl> void THNN_ ( VolumetricFullDilatedConvolution_updateGradInput ) ( <nl> vol2col ( <nl> THCState_getCurrentStream ( state ) , <nl> THCTensor_ ( data ) ( state , gradOutput_n ) , <nl> - nOutputPlane , outputDepth , outputHeight , outputWidth , kT , kH , kW , padT , padH , padW , dT , dH , dW , <nl> + nOutputPlane , outputDepth , outputHeight , outputWidth , <nl> + inputDepth , inputHeight , inputWidth , <nl> + kT , kH , kW , padT , padH , padW , dT , dH , dW , <nl> dilationT , dilationH , dilationW , <nl> THCTensor_ ( data ) ( state , gradColumns ) <nl> ) ; <nl> void THNN_ ( VolumetricFullDilatedConvolution_accGradParameters ) ( <nl> vol2col ( <nl> THCState_getCurrentStream ( state ) , <nl> THCTensor_ ( data ) ( state , gradOutput_n ) , <nl> - nOutputPlane , outputDepth , outputHeight , outputWidth , kT , kH , kW , padT , padH , padW , dT , dH , dW , <nl> + nOutputPlane , outputDepth , outputHeight , outputWidth , <nl> + inputDepth , inputHeight , inputWidth , <nl> + kT , kH , kW , padT , padH , padW , dT , dH , dW , <nl> dilationT , dilationH , dilationW , <nl> THCTensor_ ( data ) ( state , columns ) <nl> ) ; <nl> mmm a / aten / src / THCUNN / im2col . h <nl> ppp b / aten / src / THCUNN / im2col . h <nl> __global__ void im2col_kernel ( const int n , const Dtype * data_im , <nl> template < typename Dtype > <nl> void im2col ( cudaStream_t stream , const Dtype * data_im , const int channels , <nl> const int height , const int width , <nl> + const int height_col , const int width_col , <nl> const int ksize_h , const int ksize_w , const int pad_h , <nl> const int pad_w , const int stride_h , const int stride_w , <nl> const int dilation_h , const int dilation_w , Dtype * data_col ) { <nl> / / We are going to launch channels * height_col * width_col kernels , each <nl> / / kernel responsible for copying a single - channel grid . <nl> - int height_col = ( height + 2 * pad_h - ( dilation_h * ( ksize_h - 1 ) + 1 ) ) <nl> - / stride_h + 1 ; <nl> - int width_col = ( width + 2 * pad_w - ( dilation_w * ( ksize_w - 1 ) + 1 ) ) <nl> - / stride_w + 1 ; <nl> int num_kernels = channels * height_col * width_col ; <nl> / / Launch <nl> im2col_kernel < < < GET_BLOCKS ( num_kernels ) , CUDA_NUM_THREADS , 0 , stream > > > ( <nl> mmm a / aten / src / THCUNN / vol2col . h <nl> ppp b / aten / src / THCUNN / vol2col . h <nl> CUDA_KERNEL_LOOP ( index , n ) { <nl> template < typename Dtype > <nl> void vol2col ( cudaStream_t stream , const Dtype * data_vol , const int channels , <nl> const int depth , const int height , const int width , <nl> + const int depth_col , const int height_col , const int width_col , <nl> const int ksize_t , const int ksize_h , const int ksize_w , <nl> const int pad_t , const int pad_h , const int pad_w , <nl> const int stride_t , const int stride_h , const int stride_w , <nl> void vol2col ( cudaStream_t stream , const Dtype * data_vol , const int channels , <nl> Dtype * data_col ) { <nl> / / We are going to launch channels * depth_col * height_col * width_col kernels , each <nl> / / kernel responsible for copying a single - channel grid . <nl> - int depth_col = ( depth + 2 * pad_t - ( dilation_t * ( ksize_t - 1 ) + 1 ) ) / stride_t + 1 ; <nl> - int height_col = ( height + 2 * pad_h - ( dilation_h * ( ksize_h - 1 ) + 1 ) ) / stride_h + 1 ; <nl> - int width_col = ( width + 2 * pad_w - ( dilation_w * ( ksize_w - 1 ) + 1 ) ) / stride_w + 1 ; <nl> int num_kernels = channels * depth_col * height_col * width_col ; <nl> / / Launch <nl> vol2col_kernel < < < GET_BLOCKS ( num_kernels ) , CUDA_NUM_THREADS , 0 , stream > > > ( <nl> mmm a / aten / src / THNN / generic / Col2Im . c <nl> ppp b / aten / src / THNN / generic / Col2Im . c <nl> <nl> # define TH_GENERIC_FILE " generic / Col2Im . c " <nl> # else <nl> <nl> + / / Note [ im2col / col2im output padding ] <nl> + / / ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ <nl> + / / Our implementations of im2col and col2im take both the input height / width as <nl> + / / well as a seemingly redundant output height / width . In principle , you could <nl> + / / compute the output height / width by using the convolution shape formulas . So , <nl> + / / what ' s up with that ? <nl> + / / <nl> + / / The trouble arises when one runs the backward of a transposed convolution <nl> + / / with output_padding > = stride . ( BTW , output_padding is known as adj inside <nl> + / / THNN . ) Let ' s consider a simple case where we have kernel = 2 , dilation = 2 , <nl> + / / stride = 1 , output_padding = 1 for a 4x4 input : <nl> + / / <nl> + / / Input : X <nl> + / / <nl> + / / Output : X . X . <nl> + / / . . . . <nl> + / / X . X . <nl> + / / . . . . <nl> + / / <nl> + / / If we compute backwards of output with a standard convolution on the output <nl> + / / with the same parameters , we would end up with a 2x2 grad_input ( because you <nl> + / / can slide the stencil over to the right once and down once ) . But that is all <nl> + / / out - of - bounds if you ' re computing backwards for a 1x1 input . <nl> + / / <nl> + / / " Now Edward , " you might say , " the real problem is that you set output_padding <nl> + / / > = stride , surely an error should have been raised in this case . " To <nl> + / / understand why it is useful to handle this case , we have to understand how we <nl> + / / compute the weight gradient of a convolution . Suppose we have a convolution <nl> + / / with kernel = 2 , stride = 2 on a 5x5 input . Let us see all the contributions of <nl> + / / weight [ 0 ] [ 0 ] ( which we have labeled w ) in the output : <nl> + / / <nl> + / / Input : a . b . . Weight : w . <nl> + / / . . . . . . . <nl> + / / c . d . . <nl> + / / . . . . . <nl> + / / . . . . . <nl> + / / <nl> + / / Output : [ aw + . . . bw + . . . ] <nl> + / / [ cw + . . . dw + . . . ] <nl> + / / <nl> + / / From this diagram , it easy to see that we can compute the weight gradient <nl> + / / by performing a * dilated * convolution between the input and the <nl> + / / output gradients with kernel = 2 , dilation = 2 , stride = 1 . But there ' s a rub : if <nl> + / / we do a dilated convolution directly , we ' ll end up with a 3x3 weight <nl> + / / gradient , when we clearly wanted a 2x2 . So how do we avoid going out <nl> + / / of bounds ? We could add a notion of ' output_padding ' for non - transposed <nl> + / / convolution , but another simple and effective fix is to just accept <nl> + / / the desired output size directly , and compute only within those bounds . <nl> + / / <nl> + / / <nl> + / / ALSO do vol2col <nl> + <nl> static void THNN_ ( im2col ) ( const real * data_im , const int channels , <nl> - const int height , const int width , const int kernel_h , const int kernel_w , <nl> + const int height , const int width , <nl> + const int output_height , const int output_width , <nl> + const int kernel_h , const int kernel_w , <nl> const int pad_h , const int pad_w , <nl> const int stride_h , const int stride_w , <nl> const int dilation_h , const int dilation_w , <nl> real * data_col ) { <nl> - const int height_col = ( height + 2 * pad_h - <nl> - ( dilation_h * ( kernel_h - 1 ) + 1 ) ) / stride_h + 1 ; <nl> - const int width_col = ( width + 2 * pad_w - <nl> - ( dilation_w * ( kernel_w - 1 ) + 1 ) ) / stride_w + 1 ; <nl> + const int height_col = output_height ; <nl> + const int width_col = output_width ; <nl> const int channels_col = channels * kernel_h * kernel_w ; <nl> for ( int c_col = 0 ; c_col < channels_col ; + + c_col ) { <nl> int w_offset = c_col % kernel_w ; <nl> mmm a / aten / src / THNN / generic / Im2Col . c <nl> ppp b / aten / src / THNN / generic / Im2Col . c <nl> void THNN_ ( Im2Col_updateOutput ) ( <nl> <nl> THNN_ ( im2col ) ( <nl> THTensor_ ( data ) ( input_n ) , <nl> - nInputPlane , inputHeight , inputWidth , kH , kW , padH , padW , sH , sW , <nl> + nInputPlane , <nl> + inputHeight , inputWidth , <nl> + outputHeight , outputWidth , <nl> + kH , kW , padH , padW , sH , sW , <nl> dH , dW , THTensor_ ( data ) ( output_n ) ) ; <nl> } <nl> <nl> mmm a / aten / src / THNN / generic / SpatialDilatedConvolution . c <nl> ppp b / aten / src / THNN / generic / SpatialDilatedConvolution . c <nl> void THNN_ ( SpatialDilatedConvolution_updateOutput ) ( <nl> / / Extract columns : <nl> THNN_ ( im2col ) ( <nl> THTensor_ ( data ) ( input_n ) , <nl> - nInputPlane , inputHeight , inputWidth , kH , kW , padH , padW , dH , dW , <nl> + nInputPlane , inputHeight , inputWidth , <nl> + outputHeight , outputWidth , <nl> + kH , kW , padH , padW , dH , dW , <nl> dilationH , dilationW , <nl> THTensor_ ( data ) ( columns ) <nl> ) ; <nl> void THNN_ ( SpatialDilatedConvolution_accGradParameters ) ( <nl> / / Extract columns : <nl> THNN_ ( im2col ) ( <nl> THTensor_ ( data ) ( input_n ) , <nl> - nInputPlane , inputHeight , inputWidth , kH , kW , padH , padW , dH , dW , <nl> + nInputPlane , inputHeight , inputWidth , <nl> + outputHeight , outputWidth , <nl> + kH , kW , padH , padW , dH , dW , <nl> dilationH , dilationW , <nl> THTensor_ ( data ) ( columns ) <nl> ) ; <nl> mmm a / aten / src / THNN / generic / SpatialFullDilatedConvolution . c <nl> ppp b / aten / src / THNN / generic / SpatialFullDilatedConvolution . c <nl> void THNN_ ( SpatialFullDilatedConvolution_updateGradInput ) ( <nl> / / Extract columns : <nl> THNN_ ( im2col ) ( <nl> THTensor_ ( data ) ( gradOutput_n ) , <nl> - nOutputPlane , outputHeight , outputWidth , kH , kW , padH , padW , dH , dW , <nl> + nOutputPlane , outputHeight , outputWidth , <nl> + inputHeight , inputWidth , <nl> + kH , kW , padH , padW , dH , dW , <nl> dilationH , dilationW , <nl> THTensor_ ( data ) ( gradColumns ) <nl> ) ; <nl> void THNN_ ( SpatialFullDilatedConvolution_accGradParameters ) ( <nl> / / Extract columns : <nl> THNN_ ( im2col ) ( <nl> THTensor_ ( data ) ( gradOutput_n ) , <nl> - nOutputPlane , outputHeight , outputWidth , kH , kW , padH , padW , dH , dW , <nl> + nOutputPlane , outputHeight , outputWidth , <nl> + inputHeight , inputWidth , <nl> + kH , kW , padH , padW , dH , dW , <nl> dilationH , dilationW , <nl> THTensor_ ( data ) ( columns ) <nl> ) ; <nl> mmm a / aten / src / THNN / generic / VolumetricDilatedConvolution . c <nl> ppp b / aten / src / THNN / generic / VolumetricDilatedConvolution . c <nl> void THNN_ ( VolumetricDilatedConvolution_updateOutput ) ( <nl> THNN_ ( vol2col ) ( <nl> THTensor_ ( data ) ( input_n ) , <nl> nInputPlane , inputDepth , inputHeight , inputWidth , <nl> + outputDepth , outputHeight , outputWidth , <nl> kT , kH , kW , padT , padH , padW , dT , dH , dW , <nl> dilationT , dilationH , dilationW , <nl> THTensor_ ( data ) ( columns ) <nl> void THNN_ ( VolumetricDilatedConvolution_accGradParameters ) ( <nl> THNN_ ( vol2col ) ( <nl> THTensor_ ( data ) ( input_n ) , <nl> nInputPlane , inputDepth , inputHeight , inputWidth , <nl> + outputDepth , outputHeight , outputWidth , <nl> kT , kH , kW , padT , padH , padW , dT , dH , dW , <nl> dilationT , dilationH , dilationW , <nl> THTensor_ ( data ) ( columns ) <nl> mmm a / aten / src / THNN / generic / VolumetricFullDilatedConvolution . c <nl> ppp b / aten / src / THNN / generic / VolumetricFullDilatedConvolution . c <nl> <nl> static void THNN_ ( vol2col ) ( <nl> const real * data_vol , const int channels , <nl> const int depth , const int height , const int width , <nl> + const int depth_col , const int height_col , const int width_col , <nl> const int kT , const int kH , const int kW , <nl> const int pT , const int pH , const int pW , <nl> const int dT , const int dH , const int dW , <nl> static void THNN_ ( vol2col ) ( <nl> real * data_col ) <nl> { <nl> int c , t , h , w ; <nl> - int depth_col = ( depth + 2 * pT - ( dilationT * ( kT - 1 ) + 1 ) ) / dT + 1 ; <nl> - int height_col = ( height + 2 * pH - ( dilationH * ( kH - 1 ) + 1 ) ) / dH + 1 ; <nl> - int width_col = ( width + 2 * pW - ( dilationW * ( kW - 1 ) + 1 ) ) / dW + 1 ; <nl> int channels_col = channels * kT * kH * kW ; <nl> for ( c = 0 ; c < channels_col ; + + c ) <nl> { <nl> void THNN_ ( VolumetricFullDilatedConvolution_updateGradInput ) ( <nl> THNN_ ( vol2col ) ( <nl> THTensor_ ( data ) ( gradOutput_n ) , <nl> nOutputPlane , outputDepth , outputHeight , outputWidth , <nl> + inputDepth , inputHeight , inputWidth , <nl> kT , kH , kW , <nl> pT , pH , pW , <nl> dT , dH , dW , <nl> void THNN_ ( VolumetricFullDilatedConvolution_accGradParameters ) ( <nl> THNN_ ( vol2col ) ( <nl> THTensor_ ( data ) ( gradOutput_n ) , nOutputPlane , <nl> outputDepth , outputHeight , outputWidth , <nl> + inputDepth , inputHeight , inputWidth , <nl> kT , kH , kW , <nl> pT , pH , pW , <nl> dT , dH , dW , <nl> | Fix memory corruption in im2col / vol2col based convolution kernels . ( ) | pytorch/pytorch | 177b4509ceeb7a8ae2e176f52f4b41d9e71dc993 | 2018-02-12T19:13:42Z |
mmm a / lib / SILOptimizer / Utils / CastOptimizer . cpp <nl> ppp b / lib / SILOptimizer / Utils / CastOptimizer . cpp <nl> CastOptimizer : : optimizeBridgedObjCToSwiftCast ( SILDynamicCastInst dynamicCast ) { <nl> case CastConsumptionKind : : TakeAlways : <nl> Builder . createReleaseValue ( Loc , srcOp , Builder . getDefaultAtomicity ( ) ) ; <nl> break ; <nl> - case CastConsumptionKind : : TakeOnSuccess : <nl> + case CastConsumptionKind : : TakeOnSuccess : { <nl> / / Insert a release in the success BB . <nl> - Builder . setInsertionPoint ( SuccessBB - > begin ( ) ) ; <nl> - Builder . createReleaseValue ( Loc , srcOp , Builder . getDefaultAtomicity ( ) ) ; <nl> + SILBuilderWithScope SuccessBuilder ( SuccessBB - > begin ( ) ) ; <nl> + SuccessBuilder . emitDestroyValueOperation ( Loc , srcOp ) ; <nl> break ; <nl> + } <nl> case CastConsumptionKind : : BorrowAlways : <nl> llvm_unreachable ( " checked_cast_addr_br never has BorrowAlways " ) ; <nl> case CastConsumptionKind : : CopyOnSuccess : <nl> new file mode 100644 <nl> index 000000000000 . . 0e0f848db711 <nl> mmm / dev / null <nl> ppp b / test / SILOptimizer / constant_propagation_objc . sil <nl> <nl> + / / RUN : % target - sil - opt - enable - sil - verify - all % s - diagnostic - constant - propagation | % FileCheck % s <nl> + / / RUN : % target - sil - opt - enable - sil - verify - all % s - performance - constant - propagation | % FileCheck % s <nl> + <nl> + / / REQUIRES : objc_interop <nl> + <nl> + sil_stage canonical <nl> + <nl> + import Swift <nl> + import Foundation <nl> + import Builtin <nl> + <nl> + sil @ $ ss11AnyHashableVyABxcSHRzlufC : $ @ convention ( method ) < τ_0_0 where τ_0_0 : Hashable > ( @ in τ_0_0 , @ thin AnyHashable . Type ) - > @ out AnyHashable <nl> + <nl> + sil @ guaranteed_swift_array_user : $ @ convention ( thin ) < τ_0_0 > ( @ guaranteed Array < τ_0_0 > ) - > ( ) <nl> + <nl> + / / CHECK - LABEL : sil @ array_downcast_copyonsuccess : $ @ convention ( thin ) ( @ guaranteed NSArray ) - > ( ) { <nl> + / / CHECK : bb0 ( [ [ ARG : % . * ] ] : $ NSArray ) : <nl> + / / CHECK : [ [ INPUT : % . * ] ] = alloc_stack $ NSArray <nl> + / / CHECK : retain_value [ [ ARG ] ] <nl> + / / CHECK : store [ [ ARG ] ] to [ [ INPUT ] ] <nl> + / / CHECK : [ [ OUTPUT : % . * ] ] = alloc_stack $ Array < String > <nl> + / / CHECK : [ [ INPUT_VALUE : % . * ] ] = load [ [ INPUT ] ] <nl> + / / CHECK : br [ [ BRIDGE_BB : bb [ 0 - 9 ] + ] ] ( [ [ INPUT_VALUE ] ] : <nl> + / / <nl> + / / CHECK : [ [ SUCCESS_BB : bb [ 0 - 9 ] + ] ] : <nl> + / / CHECK : [ [ SUCCESS_VAL : % . * ] ] = load [ [ OUTPUT ] ] <nl> + / / CHECK : [ [ CAST_RESULT : % . * ] ] = apply { { % . * } } < String > ( [ [ SUCCESS_VAL ] ] ) <nl> + / / CHECK - NEXT : release_value [ [ SUCCESS_VAL ] ] <nl> + / / CHECK - NEXT : dealloc_stack [ [ OUTPUT ] ] <nl> + / / CHECK - NEXT : destroy_addr [ [ INPUT ] ] <nl> + / / CHECK - NEXT : dealloc_stack [ [ INPUT ] ] <nl> + / / CHECK - NEXT : br [ [ EXIT_BB : bb [ 0 - 9 ] + ] ] <nl> + / / <nl> + / / CHECK : [ [ FAIL_BB : bb [ 0 - 9 ] + ] ] : <nl> + / / CHECK - NEXT : dealloc_stack [ [ CAST_TMP : % . * ] ] <nl> + / / CHECK - NEXT : dealloc_stack [ [ OUTPUT ] ] <nl> + / / CHECK - NEXT : destroy_addr [ [ INPUT ] ] <nl> + / / CHECK - NEXT : dealloc_stack [ [ INPUT ] ] <nl> + / / CHECK - NEXT : br [ [ EXIT_BB ] ] <nl> + / / <nl> + / / CHECK : [ [ EXIT_BB ] ] : <nl> + / / CHECK : return <nl> + / / <nl> + / / CHECK : [ [ BRIDGE_BB ] ] ( [ [ INPUT_VALUE : % . * ] ] : $ NSArray ) : <nl> + / / CHECK : [ [ CAST_TMP : % . * ] ] = alloc_stack $ Optional < Array < String > > <nl> + / / CHECK : strong_retain [ [ INPUT_VALUE ] ] <nl> + / / CHECK : apply { { % . * } } < Array < String > > ( [ [ CAST_TMP ] ] , [ [ INPUT_VALUE ] ] , <nl> + / / CHECK : strong_release [ [ INPUT_VALUE ] ] <nl> + / / CHECK : switch_enum_addr [ [ CAST_TMP ] ] : $ * Optional < Array < String > > , case # Optional . none ! enumelt : [ [ FAIL_BB ] ] , default [ [ SUCCESS_TRAMPOLINE_BB : bb [ 0 - 9 ] + ] ] <nl> + / / <nl> + / / CHECK : [ [ SUCCESS_TRAMPOLINE_BB ] ] : <nl> + / / CHECK : [ [ PROJ_ENUM : % . * ] ] = unchecked_take_enum_data_addr [ [ CAST_TMP ] ] <nl> + / / CHECK : copy_addr [ take ] [ [ PROJ_ENUM ] ] to [ initialization ] [ [ OUTPUT ] ] <nl> + / / CHECK : dealloc_stack [ [ CAST_TMP ] ] <nl> + / / CHECK : br [ [ SUCCESS_BB ] ] <nl> + / / CHECK : } / / end sil function ' array_downcast_copyonsuccess ' <nl> + sil @ array_downcast_copyonsuccess : $ @ convention ( thin ) ( @ guaranteed NSArray ) - > ( ) { <nl> + bb0 ( % 0 : $ NSArray ) : <nl> + % 4 = alloc_stack $ NSArray <nl> + retain_value % 0 : $ NSArray <nl> + store % 0 to % 4 : $ * NSArray <nl> + % 7 = alloc_stack $ Array < String > <nl> + checked_cast_addr_br copy_on_success NSArray in % 4 : $ * NSArray to Array < String > in % 7 : $ * Array < String > , bb2 , bb3 <nl> + <nl> + bb2 : <nl> + % 9 = load % 7 : $ * Array < String > <nl> + % 10 = function_ref @ guaranteed_swift_array_user : $ @ convention ( thin ) < τ_0_0 > ( @ guaranteed Array < τ_0_0 > ) - > ( ) <nl> + apply % 10 < String > ( % 9 ) : $ @ convention ( thin ) < τ_0_0 > ( @ guaranteed Array < τ_0_0 > ) - > ( ) <nl> + release_value % 9 : $ Array < String > <nl> + dealloc_stack % 7 : $ * Array < String > <nl> + destroy_addr % 4 : $ * NSArray <nl> + dealloc_stack % 4 : $ * NSArray <nl> + br bb4 <nl> + <nl> + bb3 : <nl> + dealloc_stack % 7 : $ * Array < String > <nl> + destroy_addr % 4 : $ * NSArray <nl> + dealloc_stack % 4 : $ * NSArray <nl> + br bb4 <nl> + <nl> + bb4 : <nl> + % 9999 = tuple ( ) <nl> + return % 9999 : $ ( ) <nl> + } <nl> + <nl> + / / CHECK - LABEL : sil @ array_downcast_takeonsuccess : $ @ convention ( thin ) ( @ guaranteed NSArray ) - > ( ) { <nl> + / / CHECK : bb0 ( [ [ ARG : % . * ] ] : $ NSArray ) : <nl> + / / CHECK : [ [ INPUT : % . * ] ] = alloc_stack $ NSArray <nl> + / / CHECK : retain_value [ [ ARG ] ] <nl> + / / CHECK : store [ [ ARG ] ] to [ [ INPUT ] ] <nl> + / / CHECK : [ [ OUTPUT : % . * ] ] = alloc_stack $ Array < String > <nl> + / / CHECK : [ [ INPUT_VALUE : % . * ] ] = load [ [ INPUT ] ] <nl> + / / CHECK : br [ [ BRIDGE_BB : bb [ 0 - 9 ] + ] ] ( [ [ INPUT_VALUE ] ] : <nl> + / / <nl> + / / CHECK : [ [ SUCCESS_BB : bb [ 0 - 9 ] + ] ] : <nl> + / / CHECK : strong_release [ [ INPUT_VALUE : % . * ] ] : <nl> + / / CHECK : [ [ SUCCESS_VAL : % . * ] ] = load [ [ OUTPUT ] ] <nl> + / / CHECK : [ [ CAST_RESULT : % . * ] ] = apply { { % . * } } < String > ( [ [ SUCCESS_VAL ] ] ) <nl> + / / CHECK - NEXT : release_value [ [ SUCCESS_VAL ] ] <nl> + / / CHECK - NEXT : dealloc_stack [ [ OUTPUT ] ] <nl> + / / CHECK - NEXT : dealloc_stack [ [ INPUT ] ] <nl> + / / CHECK - NEXT : br [ [ EXIT_BB : bb [ 0 - 9 ] + ] ] <nl> + / / <nl> + / / CHECK : [ [ FAIL_BB : bb [ 0 - 9 ] + ] ] : <nl> + / / CHECK - NEXT : dealloc_stack [ [ CAST_TMP : % . * ] ] <nl> + / / CHECK - NEXT : dealloc_stack [ [ OUTPUT ] ] <nl> + / / CHECK - NEXT : destroy_addr [ [ INPUT ] ] <nl> + / / CHECK - NEXT : dealloc_stack [ [ INPUT ] ] <nl> + / / CHECK - NEXT : br [ [ EXIT_BB ] ] <nl> + / / <nl> + / / CHECK : [ [ EXIT_BB ] ] : <nl> + / / CHECK : return <nl> + / / <nl> + / / CHECK : [ [ BRIDGE_BB ] ] ( [ [ INPUT_VALUE ] ] : $ NSArray ) : <nl> + / / CHECK : [ [ CAST_TMP : % . * ] ] = alloc_stack $ Optional < Array < String > > <nl> + / / CHECK : strong_retain [ [ INPUT_VALUE ] ] <nl> + / / CHECK : apply { { % . * } } < Array < String > > ( [ [ CAST_TMP ] ] , [ [ INPUT_VALUE ] ] , <nl> + / / CHECK : strong_release [ [ INPUT_VALUE ] ] <nl> + / / NOTE : In contrast to with take_always , the release_value is above in SUCCESS_BLOCK <nl> + / / CHECK : switch_enum_addr [ [ CAST_TMP ] ] : $ * Optional < Array < String > > , case # Optional . none ! enumelt : [ [ FAIL_BB ] ] , default [ [ SUCCESS_TRAMPOLINE_BB : bb [ 0 - 9 ] + ] ] <nl> + / / <nl> + / / CHECK : [ [ SUCCESS_TRAMPOLINE_BB ] ] : <nl> + / / CHECK : [ [ PROJ_ENUM : % . * ] ] = unchecked_take_enum_data_addr [ [ CAST_TMP ] ] <nl> + / / CHECK : copy_addr [ take ] [ [ PROJ_ENUM ] ] to [ initialization ] [ [ OUTPUT ] ] <nl> + / / CHECK : dealloc_stack [ [ CAST_TMP ] ] <nl> + / / CHECK : br [ [ SUCCESS_BB ] ] <nl> + / / CHECK : } / / end sil function ' array_downcast_takeonsuccess ' <nl> + sil @ array_downcast_takeonsuccess : $ @ convention ( thin ) ( @ guaranteed NSArray ) - > ( ) { <nl> + bb0 ( % 0 : $ NSArray ) : <nl> + % 4 = alloc_stack $ NSArray <nl> + retain_value % 0 : $ NSArray <nl> + store % 0 to % 4 : $ * NSArray <nl> + % 7 = alloc_stack $ Array < String > <nl> + checked_cast_addr_br take_on_success NSArray in % 4 : $ * NSArray to Array < String > in % 7 : $ * Array < String > , bb2 , bb3 <nl> + <nl> + bb2 : <nl> + % 9 = load % 7 : $ * Array < String > <nl> + % 10 = function_ref @ guaranteed_swift_array_user : $ @ convention ( thin ) < τ_0_0 > ( @ guaranteed Array < τ_0_0 > ) - > ( ) <nl> + apply % 10 < String > ( % 9 ) : $ @ convention ( thin ) < τ_0_0 > ( @ guaranteed Array < τ_0_0 > ) - > ( ) <nl> + release_value % 9 : $ Array < String > <nl> + dealloc_stack % 7 : $ * Array < String > <nl> + dealloc_stack % 4 : $ * NSArray <nl> + br bb4 <nl> + <nl> + bb3 : <nl> + dealloc_stack % 7 : $ * Array < String > <nl> + destroy_addr % 4 : $ * NSArray <nl> + dealloc_stack % 4 : $ * NSArray <nl> + br bb4 <nl> + <nl> + bb4 : <nl> + % 9999 = tuple ( ) <nl> + return % 9999 : $ ( ) <nl> + } <nl> + <nl> + / / CHECK - LABEL : sil @ array_downcast_takealways : $ @ convention ( thin ) ( @ guaranteed NSArray ) - > ( ) { <nl> + / / CHECK : bb0 ( [ [ ARG : % . * ] ] : $ NSArray ) : <nl> + / / CHECK : [ [ INPUT : % . * ] ] = alloc_stack $ NSArray <nl> + / / CHECK : retain_value [ [ ARG ] ] <nl> + / / CHECK : store [ [ ARG ] ] to [ [ INPUT ] ] <nl> + / / CHECK : [ [ OUTPUT : % . * ] ] = alloc_stack $ Array < String > <nl> + / / CHECK : [ [ INPUT_VALUE : % . * ] ] = load [ [ INPUT ] ] <nl> + / / CHECK : br [ [ BRIDGE_BB : bb [ 0 - 9 ] + ] ] ( [ [ INPUT_VALUE ] ] : <nl> + / / <nl> + / / CHECK : [ [ SUCCESS_BB : bb [ 0 - 9 ] + ] ] : <nl> + / / CHECK : [ [ SUCCESS_VAL : % . * ] ] = load [ [ OUTPUT ] ] <nl> + / / CHECK : [ [ CAST_RESULT : % . * ] ] = apply { { % . * } } < String > ( [ [ SUCCESS_VAL ] ] ) <nl> + / / CHECK - NEXT : release_value [ [ SUCCESS_VAL ] ] <nl> + / / CHECK - NEXT : dealloc_stack [ [ OUTPUT ] ] <nl> + / / CHECK - NEXT : dealloc_stack [ [ INPUT ] ] <nl> + / / CHECK - NEXT : br [ [ EXIT_BB : bb [ 0 - 9 ] + ] ] <nl> + / / <nl> + / / CHECK : [ [ FAIL_BB : bb [ 0 - 9 ] + ] ] : <nl> + / / CHECK - NEXT : dealloc_stack [ [ CAST_TMP : % . * ] ] <nl> + / / CHECK - NEXT : dealloc_stack [ [ OUTPUT ] ] <nl> + / / CHECK - NEXT : dealloc_stack [ [ INPUT ] ] <nl> + / / CHECK - NEXT : br [ [ EXIT_BB ] ] <nl> + / / <nl> + / / CHECK : [ [ EXIT_BB ] ] : <nl> + / / CHECK : return <nl> + / / <nl> + / / CHECK : [ [ BRIDGE_BB ] ] ( [ [ INPUT_VALUE : % . * ] ] : $ NSArray ) : <nl> + / / CHECK : [ [ CAST_TMP : % . * ] ] = alloc_stack $ Optional < Array < String > > <nl> + / / CHECK : strong_retain [ [ INPUT_VALUE ] ] <nl> + / / CHECK : apply { { % . * } } < Array < String > > ( [ [ CAST_TMP ] ] , [ [ INPUT_VALUE ] ] , <nl> + / / CHECK : strong_release [ [ INPUT_VALUE ] ] <nl> + / / NOTE : When we perform take_always , this is the take of the cast . <nl> + / / CHECK : release_value [ [ INPUT_VALUE ] ] <nl> + / / CHECK : switch_enum_addr [ [ CAST_TMP ] ] : $ * Optional < Array < String > > , case # Optional . none ! enumelt : [ [ FAIL_BB ] ] , default [ [ SUCCESS_TRAMPOLINE_BB : bb [ 0 - 9 ] + ] ] <nl> + / / <nl> + / / CHECK : [ [ SUCCESS_TRAMPOLINE_BB ] ] : <nl> + / / CHECK : [ [ PROJ_ENUM : % . * ] ] = unchecked_take_enum_data_addr [ [ CAST_TMP ] ] <nl> + / / CHECK : copy_addr [ take ] [ [ PROJ_ENUM ] ] to [ initialization ] [ [ OUTPUT ] ] <nl> + / / CHECK : dealloc_stack [ [ CAST_TMP ] ] <nl> + / / CHECK : br [ [ SUCCESS_BB ] ] <nl> + / / CHECK : } / / end sil function ' array_downcast_takealways ' <nl> + sil @ array_downcast_takealways : $ @ convention ( thin ) ( @ guaranteed NSArray ) - > ( ) { <nl> + bb0 ( % 0 : $ NSArray ) : <nl> + % 4 = alloc_stack $ NSArray <nl> + retain_value % 0 : $ NSArray <nl> + store % 0 to % 4 : $ * NSArray <nl> + % 7 = alloc_stack $ Array < String > <nl> + checked_cast_addr_br take_always NSArray in % 4 : $ * NSArray to Array < String > in % 7 : $ * Array < String > , bb2 , bb3 <nl> + <nl> + bb2 : <nl> + % 9 = load % 7 : $ * Array < String > <nl> + % 10 = function_ref @ guaranteed_swift_array_user : $ @ convention ( thin ) < τ_0_0 > ( @ guaranteed Array < τ_0_0 > ) - > ( ) <nl> + apply % 10 < String > ( % 9 ) : $ @ convention ( thin ) < τ_0_0 > ( @ guaranteed Array < τ_0_0 > ) - > ( ) <nl> + release_value % 9 : $ Array < String > <nl> + dealloc_stack % 7 : $ * Array < String > <nl> + dealloc_stack % 4 : $ * NSArray <nl> + br bb4 <nl> + <nl> + bb3 : <nl> + dealloc_stack % 7 : $ * Array < String > <nl> + dealloc_stack % 4 : $ * NSArray <nl> + br bb4 <nl> + <nl> + bb4 : <nl> + % 9999 = tuple ( ) <nl> + return % 9999 : $ ( ) <nl> + } <nl> | Merge pull request from gottesmm / pr - d02a03999626f1d50a4bf0996dadb34e72d9f355 | apple/swift | a0421124f0de37d696e503c17c05afba19f74cf2 | 2019-05-25T03:48:10Z |
mmm a / src / resources / qml / ValueTabs . qml <nl> ppp b / src / resources / qml / ValueTabs . qml <nl> Repeater { <nl> <nl> itemDelegate : Item { <nl> Text { <nl> - anchors . verticalCenter : parent . verticalCenter <nl> + anchors . fill : parent <nl> color : styleData . textColor <nl> elide : styleData . elideMode <nl> text : styleData . value ? styleData . value + ( truncated ? ' . . . ' : ' ' ) : ( styleData . column = = 1 ) ? " [ not loaded from server ] " : " - - " <nl> - wrapMode : Text . WordWrap <nl> + wrapMode : Text . Wrap <nl> maximumLineCount : 1 <nl> } <nl> } <nl> | Fix issue : Overlap of column values | uglide/RedisDesktopManager | a58b0f5dd0910f52585cafb969b7bde2ee9edf4c | 2015-10-04T09:16:25Z |
mmm a / src / propertiesWidget . ui <nl> ppp b / src / propertiesWidget . ui <nl> <nl> < / item > <nl> < item > <nl> < layout class = " QHBoxLayout " name = " horizontalLayout " > <nl> + < property name = " spacing " > <nl> + < number > 9 < / number > <nl> + < / property > <nl> < property name = " leftMargin " > <nl> < number > 5 < / number > <nl> < / property > <nl> <nl> border : 1px solid rgb ( 85 , 81 , 91 ) ; <nl> border - radius : 3px ; <nl> padding : 2px ; <nl> - margin - left : 3px ; margin - right : 3px ; <nl> } < / string > <nl> < / property > <nl> < property name = " text " > <nl> margin - left : 3px ; margin - right : 3px ; <nl> border : 1px solid rgb ( 85 , 81 , 91 ) ; <nl> border - radius : 3px ; <nl> padding : 2px ; <nl> - margin - left : 3px ; margin - right : 3px ; <nl> } < / string > <nl> < / property > <nl> < property name = " text " > <nl> margin - left : 3px ; margin - right : 3px ; <nl> border : 1px solid rgb ( 85 , 81 , 91 ) ; <nl> border - radius : 3px ; <nl> padding : 2px ; <nl> - margin - left : 3px ; margin - right : 3px ; <nl> } < / string > <nl> < / property > <nl> < property name = " text " > <nl> margin - left : 3px ; margin - right : 3px ; <nl> border : 1px solid rgb ( 85 , 81 , 91 ) ; <nl> border - radius : 3px ; <nl> padding : 2px ; <nl> - margin - left : 3px ; margin - right : 3px ; <nl> } < / string > <nl> < / property > <nl> < property name = " text " > <nl> margin - left : 3px ; margin - right : 3px ; <nl> border : 1px solid rgb ( 85 , 81 , 91 ) ; <nl> border - radius : 3px ; <nl> padding : 2px ; <nl> - margin - left : 3px ; margin - right : 3px ; <nl> } < / string > <nl> < / property > <nl> < property name = " text " > <nl> mmm a / src / propertieswidget . cpp <nl> ppp b / src / propertieswidget . cpp <nl> <nl> # include " TorrentFilesModel . h " <nl> # include " peerlistwidget . h " <nl> <nl> - # define DEFAULT_BUTTON_CSS " QPushButton { border : 1px solid rgb ( 85 , 81 , 91 ) ; border - radius : 3px ; padding : 2px ; margin - left : 3px ; margin - right : 3px ; } " <nl> - # define SELECTED_BUTTON_CSS " QPushButton { border : 1px solid rgb ( 85 , 81 , 91 ) ; border - radius : 3px ; padding : 2px ; background - color : rgb ( 255 , 208 , 105 ) ; margin - left : 3px ; margin - right : 3px ; } " <nl> + # define DEFAULT_BUTTON_CSS " QPushButton { border : 1px solid rgb ( 85 , 81 , 91 ) ; border - radius : 3px ; padding : 2px ; } " <nl> + # define SELECTED_BUTTON_CSS " QPushButton { border : 1px solid rgb ( 85 , 81 , 91 ) ; border - radius : 3px ; padding : 2px ; background - color : rgb ( 255 , 208 , 105 ) ; } " <nl> <nl> PropertiesWidget : : PropertiesWidget ( QWidget * parent , TransferListWidget * transferList , bittorrent * BTSession ) : QWidget ( parent ) , transferList ( transferList ) , BTSession ( BTSession ) { <nl> setupUi ( this ) ; <nl> | - Attempt to fix property buttons layout on Mac OS | qbittorrent/qBittorrent | e30268cf3baf58281d2b155921e5fe58ff369084 | 2009-11-17T07:17:33Z |
mmm a / platform / windows / os_windows . cpp <nl> ppp b / platform / windows / os_windows . cpp <nl> void OS_Windows : : _drag_event ( float p_x , float p_y , int idx ) { <nl> if ( curr - > get ( ) = = Vector2 ( p_x , p_y ) ) <nl> return ; <nl> <nl> - curr - > get ( ) = Vector2 ( p_x , p_y ) ; <nl> - <nl> Ref < InputEventScreenDrag > event ; <nl> event . instance ( ) ; <nl> event - > set_index ( idx ) ; <nl> event - > set_position ( Vector2 ( p_x , p_y ) ) ; <nl> + event - > set_relative ( Vector2 ( p_x , p_y ) - curr - > get ( ) ) ; <nl> <nl> if ( main_loop ) <nl> input - > accumulate_input_event ( event ) ; <nl> + <nl> + curr - > get ( ) = Vector2 ( p_x , p_y ) ; <nl> } ; <nl> <nl> LRESULT OS_Windows : : WndProc ( HWND hWnd , UINT uMsg , WPARAM wParam , LPARAM lParam ) { <nl> | Merge pull request from jcs224 / windows - screendrag - relative | godotengine/godot | 4f0c05402f94e6055463c86bcdbb06b2b72b3d7d | 2019-10-27T08:47:12Z |
mmm a / tools / jenkins - scripts / ios - build . sh <nl> ppp b / tools / jenkins - scripts / ios - build . sh <nl> <nl> # put xctool . sh into your PATH <nl> DIR = " $ ( cd " $ ( dirname " $ { BASH_SOURCE [ 0 ] } " ) " & & pwd ) " <nl> COCOS2DX_ROOT = " $ DIR " / . . / . . <nl> - xcodebuild - project " $ COCOS2DX_ROOT " / build / cocos2d_tests . xcodeproj - target " build all tests iOS " - destination " platform = iOS Simulator , name = iPhone Retina ( 4 - inch ) " clean <nl> - xcodebuild - project " $ COCOS2DX_ROOT " / build / cocos2d_tests . xcodeproj - target " build all tests iOS " - destination " platform = iOS Simulator , name = iPhone Retina ( 4 - inch ) " build <nl> + xcodebuild - project " $ COCOS2DX_ROOT " / build / cocos2d_tests . xcodeproj - scheme " build all tests iOS " - destination " platform = iOS Simulator , name = iPhone Retina ( 4 - inch ) " clean <nl> + xcodebuild - project " $ COCOS2DX_ROOT " / build / cocos2d_tests . xcodeproj - scheme " build all tests iOS " - destination " platform = iOS Simulator , name = iPhone Retina ( 4 - inch ) " build <nl> | fix ios build code sign | cocos2d/cocos2d-x | 26cf4321d291d6cacc3f7d6ed4b5578be9b4868c | 2014-07-30T15:59:19Z |
mmm a / dbms / src / Interpreters / Cluster . cpp <nl> ppp b / dbms / src / Interpreters / Cluster . cpp <nl> Cluster : : Address : : Address ( const Poco : : Util : : AbstractConfiguration & config , cons <nl> default_database = config . getString ( config_prefix + " . default_database " , " " ) ; <nl> secure = config . getBool ( config_prefix + " . secure " , false ) ? Protocol : : Secure : : Enable : Protocol : : Secure : : Disable ; <nl> compression = config . getBool ( config_prefix + " . compression " , true ) ? Protocol : : Compression : : Enable : Protocol : : Compression : : Disable ; <nl> - is_local = isLocal ( config . getInt ( " tcp_port " , 0 ) ) ; <nl> + const char * port_type = secure = = Protocol : : Secure : : Enable ? " tcp_port_secure " : " tcp_port " ; <nl> + is_local = isLocal ( config . getInt ( port_type , 0 ) ) ; <nl> } <nl> <nl> <nl> mmm a / dbms / src / Interpreters / DDLWorker . cpp <nl> ppp b / dbms / src / Interpreters / DDLWorker . cpp <nl> void DDLWorker : : processTask ( DDLTask & task , const ZooKeeperPtr & zookeeper ) <nl> } <nl> catch ( . . . ) <nl> { <nl> - task . execution_status = ExecutionStatus : : fromCurrentException ( " An error occured before execution " ) ; <nl> + tryLogCurrentException ( log , " An error occurred before execution of DDL task : " ) ; <nl> + task . execution_status = ExecutionStatus : : fromCurrentException ( " An error occurred before execution " ) ; <nl> } <nl> <nl> / / / We need to distinguish ZK errors occured before and after query executing <nl> mmm a / dbms / src / Storages / Distributed / DirectoryMonitor . cpp <nl> ppp b / dbms / src / Storages / Distributed / DirectoryMonitor . cpp <nl> namespace <nl> <nl> StorageDistributedDirectoryMonitor : : StorageDistributedDirectoryMonitor ( <nl> StorageDistributed & storage_ , const std : : string & name_ , const ConnectionPoolPtr & pool_ , ActionBlocker & monitor_blocker_ ) <nl> - : storage ( storage_ ) , pool { pool_ } , path { storage . path + name_ + ' / ' } <nl> + : storage ( storage_ ) <nl> + , pool { pool_ } <nl> + , name { name_ } <nl> + , path { storage . path + name + ' / ' } <nl> , current_batch_file_path { path + " current_batch . txt " } <nl> , default_sleep_time { storage . global_context . getSettingsRef ( ) . distributed_directory_monitor_sleep_time_ms . totalMilliseconds ( ) } <nl> , sleep_time { default_sleep_time } <nl> std : : string StorageDistributedDirectoryMonitor : : getLoggerName ( ) const <nl> return storage . table_name + ' . ' + storage . getName ( ) + " . DirectoryMonitor " ; <nl> } <nl> <nl> + void StorageDistributedDirectoryMonitor : : updatePath ( ) <nl> + { <nl> + std : : lock_guard lock { mutex } ; <nl> + path = storage . path + name + ' / ' ; <nl> + current_batch_file_path = path + " current_batch . txt " ; <nl> + } <nl> + <nl> } <nl> mmm a / dbms / src / Storages / Distributed / DirectoryMonitor . h <nl> ppp b / dbms / src / Storages / Distributed / DirectoryMonitor . h <nl> class StorageDistributedDirectoryMonitor <nl> <nl> static ConnectionPoolPtr createPool ( const std : : string & name , const StorageDistributed & storage ) ; <nl> <nl> + void updatePath ( ) ; <nl> + <nl> void flushAllData ( ) ; <nl> <nl> void shutdownAndDropAllData ( ) ; <nl> class StorageDistributedDirectoryMonitor <nl> <nl> StorageDistributed & storage ; <nl> ConnectionPoolPtr pool ; <nl> + std : : string name ; <nl> std : : string path ; <nl> <nl> bool should_batch_inserts = false ; <nl> mmm a / dbms / src / Storages / StorageDistributed . cpp <nl> ppp b / dbms / src / Storages / StorageDistributed . cpp <nl> void StorageDistributed : : flushClusterNodesAllData ( ) <nl> it - > second . flushAllData ( ) ; <nl> } <nl> <nl> + void StorageDistributed : : rename ( const String & new_path_to_db , const String & new_database_name , const String & new_table_name , <nl> + TableStructureWriteLockHolder & ) <nl> + { <nl> + table_name = new_table_name ; <nl> + database_name = new_database_name ; <nl> + if ( ! path . empty ( ) ) <nl> + { <nl> + Poco : : File ( path ) . renameTo ( new_path_to_db + escapeForFileName ( new_table_name ) ) ; <nl> + path = new_path_to_db + escapeForFileName ( new_table_name ) + ' / ' ; <nl> + std : : lock_guard lock ( cluster_nodes_mutex ) ; <nl> + for ( auto & node : cluster_nodes_data ) <nl> + node . second . directory_monitor - > updatePath ( ) ; <nl> + } <nl> + } <nl> + <nl> <nl> void registerStorageDistributed ( StorageFactory & factory ) <nl> { <nl> mmm a / dbms / src / Storages / StorageDistributed . h <nl> ppp b / dbms / src / Storages / StorageDistributed . h <nl> class StorageDistributed : public ext : : shared_ptr_helper < StorageDistributed > , pu <nl> / / / Removes temporary data in local filesystem . <nl> void truncate ( const ASTPtr & , const Context & , TableStructureWriteLockHolder & ) override ; <nl> <nl> - void rename ( const String & / * new_path_to_db * / , const String & new_database_name , const String & new_table_name , TableStructureWriteLockHolder & ) override <nl> - { <nl> - table_name = new_table_name ; <nl> - database_name = new_database_name ; <nl> - } <nl> + void rename ( const String & new_path_to_db , const String & new_database_name , const String & new_table_name , TableStructureWriteLockHolder & ) override ; <nl> <nl> / / / in the sub - tables , you need to manually add and delete columns <nl> / / / the structure of the sub - table is not checked <nl> mmm a / dbms / tests / integration / test_distributed_ddl / cluster . py <nl> ppp b / dbms / tests / integration / test_distributed_ddl / cluster . py <nl> def insert_reliable ( instance , query_insert ) : <nl> if not ( s . find ( ' Unknown status , client must retry ' ) > = 0 or s . find ( ' zkutil : : KeeperException ' ) ) : <nl> raise e <nl> <nl> - raise last_exception <nl> \ No newline at end of file <nl> + raise last_exception <nl> mmm a / dbms / tests / integration / test_distributed_ddl / test . py <nl> ppp b / dbms / tests / integration / test_distributed_ddl / test . py <nl> def test_create_reserved ( test_cluster ) : <nl> test_cluster . ddl_check_query ( instance , " DROP TABLE IF EXISTS test_as_reserved ON CLUSTER cluster " ) <nl> <nl> <nl> + def test_rename ( test_cluster ) : <nl> + instance = test_cluster . instances [ ' ch1 ' ] <nl> + rules = test_cluster . pm_random_drops . pop_rules ( ) <nl> + test_cluster . ddl_check_query ( instance , " CREATE TABLE rename_shard ON CLUSTER cluster ( id Int64 , sid String DEFAULT concat ( ' old ' , toString ( id ) ) ) ENGINE = ReplicatedMergeTree ( ' / clickhouse / tables / { shard } / staging / test_shard ' , ' { replica } ' ) ORDER BY ( id ) " ) <nl> + test_cluster . ddl_check_query ( instance , " CREATE TABLE rename_new ON CLUSTER cluster AS rename_shard ENGINE = Distributed ( cluster , default , rename_shard , id % 2 ) " ) <nl> + test_cluster . ddl_check_query ( instance , " RENAME TABLE rename_new TO rename ON CLUSTER cluster ; " ) <nl> + <nl> + <nl> + for i in range ( 10 ) : <nl> + instance . query ( " insert into rename ( id ) values ( { } ) " . format ( i ) ) <nl> + <nl> + # FIXME ddl_check_query doesnt work for replicated DDDL if replace_hostnames_with_ips = True <nl> + # because replicas use wrong host name of leader ( and wrong path in zk ) to check if it has executed query <nl> + # so ddl query will always fail on some replicas even if query was actually executed by leader <nl> + # Also such inconsistency in cluster configuration may lead to query duplication if leader suddenly changed <nl> + # because path of lock in zk contains shard name , which is list of host names of replicas <nl> + instance . query ( " ALTER TABLE rename_shard ON CLUSTER cluster MODIFY COLUMN sid String DEFAULT concat ( ' new ' , toString ( id ) ) " , ignore_error = True ) <nl> + time . sleep ( 1 ) <nl> + <nl> + test_cluster . ddl_check_query ( instance , " CREATE TABLE rename_new ON CLUSTER cluster AS rename_shard ENGINE = Distributed ( cluster , default , rename_shard , id % 2 ) " ) <nl> + <nl> + instance . query ( " system stop distributed sends rename " ) <nl> + <nl> + for i in range ( 10 , 20 ) : <nl> + instance . query ( " insert into rename ( id ) values ( { } ) " . format ( i ) ) <nl> + <nl> + test_cluster . ddl_check_query ( instance , " RENAME TABLE rename TO rename_old , rename_new TO rename ON CLUSTER cluster " ) <nl> + <nl> + for i in range ( 20 , 30 ) : <nl> + instance . query ( " insert into rename ( id ) values ( { } ) " . format ( i ) ) <nl> + <nl> + instance . query ( " system flush distributed rename " ) <nl> + for name in [ ' ch1 ' , ' ch2 ' , ' ch3 ' , ' ch4 ' ] : <nl> + test_cluster . instances [ name ] . query ( " system sync replica rename_shard " ) <nl> + <nl> + # system stop distributed sends does not affect inserts into local shard , <nl> + # so some ids in range ( 10 , 20 ) will be inserted into rename_shard <nl> + assert instance . query ( " select count ( id ) , sum ( id ) from rename " ) . rstrip ( ) = = " 25 \ t360 " <nl> + # assert instance . query ( " select count ( id ) , sum ( id ) from rename " ) . rstrip ( ) = = " 20 \ t290 " <nl> + assert instance . query ( " select count ( id ) , sum ( id ) from rename where sid like ' old % ' " ) . rstrip ( ) = = " 15 \ t115 " <nl> + # assert instance . query ( " select count ( id ) , sum ( id ) from rename where sid like ' old % ' " ) . rstrip ( ) = = " 10 \ t45 " <nl> + assert instance . query ( " select count ( id ) , sum ( id ) from rename where sid like ' new % ' " ) . rstrip ( ) = = " 10 \ t245 " <nl> + test_cluster . pm_random_drops . push_rules ( rules ) <nl> + <nl> + <nl> if __name__ = = ' __main__ ' : <nl> with contextmanager ( test_cluster ) ( ) as ctx_cluster : <nl> for name , instance in ctx_cluster . instances . items ( ) : <nl> | fix rename distributed | ClickHouse/ClickHouse | d1568c16145a985f55e8304b01f0efe87e8ce3d0 | 2019-12-19T19:51:12Z |
similarity index 89 % <nl> rename from cocos2dx / platform / third_party / marmalade / libtiff / libtiff . mkf <nl> rename to cocos2dx / platform / third_party / marmalade / libtiff . mkf <nl> mmm a / cocos2dx / platform / third_party / marmalade / libtiff / libtiff . mkf <nl> ppp b / cocos2dx / platform / third_party / marmalade / libtiff . mkf <nl> <nl> - includepath headers <nl> + includepath <nl> + { <nl> + libtiff / headers <nl> + } <nl> <nl> files <nl> { <nl> - [ Source ] <nl> - ( source ) <nl> + [ source ] <nl> + ( libtiff / source ) <nl> tif_aux . c <nl> tif_close . c <nl> tif_codec . c <nl> files <nl> tif_write . c <nl> tif_zip . c <nl> <nl> - [ Headers ] <nl> - ( headers ) <nl> + [ headers ] <nl> + ( libtiff / headers ) <nl> t4 . h <nl> tiff . h <nl> tiffconf . h <nl> mmm a / cocos2dx / platform / third_party / marmalade / libxml2 . mkf <nl> ppp b / cocos2dx / platform / third_party / marmalade / libxml2 . mkf <nl> undefines <nl> <nl> files <nl> { <nl> - ( libxml2 / sources ) <nl> - elfgcchack . h <nl> - libxml . h <nl> - trio . h <nl> - triodef . h <nl> - trionan . h <nl> - triop . h <nl> - triostr . h <nl> - <nl> ( libxml2 / include / libxml ) <nl> [ " common / libxml " ] <nl> c14n . h <nl> files <nl> <nl> ( libxml2 / sources ) <nl> [ " sources " ] <nl> + elfgcchack . h <nl> + libxml . h <nl> + trio . h <nl> + triodef . h <nl> + trionan . h <nl> + triop . h <nl> + triostr . h <nl> + <nl> c14n . c <nl> catalog . c <nl> chvalid . c <nl> | reorganize third party libs | cocos2d/cocos2d-x | 29a2f273b146f9d1a36f481878a27b509e5f9a3f | 2012-12-29T22:56:14Z |
mmm a / stdlib / runtime / HeapObject . cpp <nl> ppp b / stdlib / runtime / HeapObject . cpp <nl> <nl> # include " Private . h " <nl> # include " Debug . h " <nl> # include < algorithm > <nl> - # include < malloc / malloc . h > <nl> # include < cassert > <nl> # include < cstring > <nl> # include < cstdio > <nl> mmm a / stdlib / runtime / Stubs . cpp <nl> ppp b / stdlib / runtime / Stubs . cpp <nl> <nl> / / <nl> / / = = = mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - = = = / / <nl> <nl> - # include < mach / mach_time . h > <nl> # include < sys / resource . h > <nl> # include < sys / errno . h > <nl> # include < unistd . h > <nl> | Remove unused includes | apple/swift | 7a53df0d372781898941c0320f2adc65cd7c539c | 2014-09-28T01:16:14Z |
mmm a / benchmark / single - source / StringBuilder . swift <nl> ppp b / benchmark / single - source / StringBuilder . swift <nl> public let StringBuilder = [ <nl> BenchmarkInfo ( <nl> name : " StringUTF16Builder " , <nl> runFunction : run_StringUTF16Builder , <nl> - tags : [ . validation , . api , . String ] ) , <nl> + tags : [ . validation , . api , . String ] , <nl> + legacyFactor : 10 ) , <nl> BenchmarkInfo ( <nl> name : " StringUTF16SubstringBuilder " , <nl> runFunction : run_StringUTF16SubstringBuilder , <nl> - tags : [ . validation , . api , . String ] ) , <nl> + tags : [ . validation , . api , . String ] , <nl> + legacyFactor : 10 ) , <nl> BenchmarkInfo ( <nl> name : " StringBuilderLong " , <nl> runFunction : run_StringBuilderLong , <nl> - tags : [ . validation , . api , . String ] ) , <nl> + tags : [ . validation , . api , . String ] , <nl> + legacyFactor : 10 ) , <nl> BenchmarkInfo ( <nl> name : " StringBuilderWithLongSubstring " , <nl> runFunction : run_StringBuilderWithLongSubstring , <nl> - tags : [ . validation , . api , . String ] ) , <nl> + tags : [ . validation , . api , . String ] , <nl> + legacyFactor : 10 ) , <nl> BenchmarkInfo ( <nl> name : " StringWordBuilder " , <nl> runFunction : run_StringWordBuilder , <nl> - tags : [ . validation , . api , . String ] ) , <nl> + tags : [ . validation , . api , . String ] , <nl> + legacyFactor : 10 ) , <nl> BenchmarkInfo ( <nl> name : " StringWordBuilderReservingCapacity " , <nl> runFunction : run_StringWordBuilderReservingCapacity , <nl> - tags : [ . validation , . api , . String ] ) , <nl> + tags : [ . validation , . api , . String ] , <nl> + legacyFactor : 10 ) , <nl> ] <nl> <nl> @ inline ( never ) <nl> func buildStringFromSmallSubstrings ( _ i : String ) - > String { <nl> <nl> @ inline ( never ) <nl> public func run_StringUTF16Builder ( _ N : Int ) { <nl> - for _ in 1 . . . 5000 * N { <nl> + for _ in 1 . . . 500 * N { <nl> blackHole ( buildStringUTF16 ( " a " ) ) <nl> } <nl> } <nl> <nl> @ inline ( never ) <nl> public func run_StringUTF16SubstringBuilder ( _ N : Int ) { <nl> - for _ in 1 . . . 5000 * N { <nl> + for _ in 1 . . . 500 * N { <nl> blackHole ( buildStringFromSmallSubstrings ( " a " ) ) <nl> } <nl> } <nl> func buildStringWithLongSubstring ( _ i : String ) - > String { <nl> <nl> @ inline ( never ) <nl> public func run_StringBuilderLong ( _ N : Int ) { <nl> - for _ in 1 . . . 5000 * N { <nl> + for _ in 1 . . . 500 * N { <nl> blackHole ( buildStringLong ( " 👻 " ) ) <nl> } <nl> } <nl> <nl> @ inline ( never ) <nl> public func run_StringBuilderWithLongSubstring ( _ N : Int ) { <nl> - for _ in 1 . . . 5000 * N { <nl> + for _ in 1 . . . 500 * N { <nl> blackHole ( buildStringWithLongSubstring ( " 👻 " ) ) <nl> } <nl> } <nl> func buildString ( <nl> <nl> @ inline ( never ) <nl> public func run_StringWordBuilder ( _ N : Int ) { <nl> - blackHole ( buildString ( <nl> - word : " bumfuzzle " , count : 50_000 * N , reservingCapacity : false ) ) <nl> + for _ in 1 . . . N { <nl> + blackHole ( buildString ( <nl> + word : " bumfuzzle " , count : 5_000 , reservingCapacity : false ) ) <nl> + } <nl> } <nl> <nl> @ inline ( never ) <nl> public func run_StringWordBuilderReservingCapacity ( _ N : Int ) { <nl> - blackHole ( buildString ( <nl> - word : " bumfuzzle " , count : 50_000 * N , reservingCapacity : true ) ) <nl> + for _ in 1 . . . N { <nl> + blackHole ( buildString ( <nl> + word : " bumfuzzle " , count : 5_000 , reservingCapacity : true ) ) <nl> + } <nl> } <nl> - <nl> | [ benchmark ] StringBuilder Legacy Factor | apple/swift | 7c8beb7b700e1afb5a0d5f8f8c1561423106e16b | 2018-12-18T19:43:07Z |
mmm a / example / speech - demo / io_func / feat_io . py <nl> ppp b / example / speech - demo / io_func / feat_io . py <nl> <nl> import time <nl> import re <nl> <nl> - from utils . utils import to_bool <nl> + from utils import to_bool <nl> from . feat_readers . common import * <nl> from . feat_readers import stats <nl> <nl> mmm a / example / speech - demo / train_lstm . py <nl> ppp b / example / speech - demo / train_lstm . py <nl> def parse_args ( ) : <nl> for name , _ in default_cfg . items ( sec ) : <nl> arg_name = ( ' % s_ % s ' % ( sec , name ) ) . replace ( ' - ' , ' _ ' ) <nl> if hasattr ( args , arg_name ) and getattr ( args , arg_name ) is not None : <nl> - print ( ' ! ! CMDLine overwriting % s . % s : ' % ( sec , name ) , file = sys . stderr ) <nl> - print ( " ' % s ' = > ' % s ' " % ( default_cfg . get ( sec , name ) , <nl> - getattr ( args , arg_name ) ) , file = sys . stderr ) <nl> + sys . stderr . write ( ' ! ! CMDLine overwriting % s . % s : \ n ' % ( sec , name ) ) <nl> + sys . stderr . write ( " ' % s ' = > ' % s ' \ n " % ( default_cfg . get ( sec , name ) , <nl> + getattr ( args , arg_name ) ) ) <nl> default_cfg . set ( sec , name , getattr ( args , arg_name ) ) <nl> <nl> args . config = default_cfg <nl> - print ( " = " * 80 , file = sys . stderr ) <nl> + sys . stderr . write ( " = " * 80 + " \ n " ) <nl> return args <nl> <nl> def prepare_data ( args ) : <nl> def do_training ( training_method , args , module , data_train , data_val ) : <nl> module . bind ( data_shapes = data_train . provide_data , <nl> label_shapes = data_train . provide_label , <nl> for_training = True ) <nl> - module . init_params ( initializer = mx . initializer . Uniform ( 0 . 01 ) ) <nl> + module . init_params ( initializer = mx . initializer . Uniform ( 0 . 1 ) ) <nl> module . init_optimizer ( kvstore = ' local ' , <nl> optimizer = args . config . get ( ' train ' , ' optimizer ' ) , <nl> optimizer_params = { ' lr_scheduler ' : lr_scheduler , <nl> deleted file mode 100644 <nl> index e69de29bb2d . . 00000000000 <nl> deleted file mode 100644 <nl> index aca760fbcd5 . . 00000000000 <nl> mmm a / example / speech - demo / utils / hidden_posterior_stats . py <nl> ppp / dev / null <nl> <nl> - " " " <nl> - x = hidden posterior <nl> - <nl> - 1 ) For each hidden layer <nl> - <nl> - stat_1 : <nl> - pi = 1 / ( 1 + exp ( - x ) <nl> - pi = pi / sum ( pj ) <nl> - sum over each file <nl> - <nl> - stat_2 : <nl> - no sigmoid involved <nl> - pass all the vectors through softmax <nl> - sum all the vectors per file <nl> - <nl> - stat_3 : <nl> - need pi vector concatenated with 1 - pi vector <nl> - ( result is a bigger vector ) <nl> - <nl> - II ) For the final layer <nl> - <nl> - stat_1 : <nl> - pass hidden posteriors through softmax <nl> - not the same as the final output of the network ( since there is no sigmoid ) <nl> - <nl> - Save format for DNN with 2 hidden layers : <nl> - $ { stats_dir } / <nl> - hidden_0 / <nl> - basename . stat_1 <nl> - basename . stat_2 <nl> - basename . stat_3 <nl> - hidden_1 / <nl> - basename . stat_1 <nl> - basename . stat_2 <nl> - basename . stat_3 <nl> - final / <nl> - basename . stat_1 <nl> - hidden_posteriors / < - only if save_hidden_posteriors = = true <nl> - basename . L0 <nl> - basename . L1 <nl> - basename . L2 < - final layer <nl> - . . . <nl> - <nl> - All the . stat_ files ( which each store a single vector ) <nl> - are stored in one big column of numbers instead of 1 row of numbers . <nl> - As before , the . L * hidden posterior files still contain one row of numbers per frame . <nl> - " " " <nl> - <nl> - import os <nl> - import logging <nl> - import numpy <nl> - import sys <nl> - <nl> - import utils <nl> - <nl> - def numpy_sigmoid ( X ) : <nl> - " " " <nl> - numpy . array - > numpy . array <nl> - compute sigmoid function : 1 / ( 1 + exp ( - X ) ) <nl> - Dunno why this comment is here : All elememnts should be in [ 0 , 1 ] <nl> - " " " <nl> - return 1 . / ( 1 . + numpy . exp ( - X ) ) <nl> - <nl> - def numpy_normalize_rows ( a ) : <nl> - row_sums = a . sum ( axis = 1 ) <nl> - new_matrix = a / row_sums [ : , numpy . newaxis ] <nl> - return new_matrix <nl> - <nl> - def numpy_softmax ( X ) : <nl> - " " " <nl> - http : / / nbviewer . ipython . org / github / dolaameng / tutorials / blob / master / ml - tutorials / COURSE_deep_learning . ipynb <nl> - numpy . array - > numpy . array <nl> - Compute softmax function : exp ( X ) / sum ( exp ( X , 1 ) ) <nl> - where each row of X is a vector output ( e . g . , different columns representing <nl> - outputs for different classes ) <nl> - The output of softmax is a matrix , with the sum of each row to be nearly 1 . 0 <nl> - as it is the probabilities that are calculated . <nl> - " " " <nl> - mx = numpy . max ( X ) <nl> - ex = numpy . exp ( X - mx ) # prefer zeros over stack overflow - but NOT always useful <nl> - return ex / numpy . sum ( ex , 1 ) . reshape ( - 1 , 1 ) <nl> - <nl> - class FinalStat1 : <nl> - def __init__ ( self , part_posterior_sum = None , part_answer = None ) : <nl> - self . part_posterior_sum = part_posterior_sum <nl> - self . part_answer = part_answer <nl> - <nl> - class HiddenPosteriorStats : <nl> - <nl> - def __init__ ( self , stats_dir , save_hidden_posteriors , num_hidden , n_outs ) : <nl> - self . logger = logging . getLogger ( __name__ ) <nl> - <nl> - self . stats_dir = stats_dir <nl> - self . save_hidden_posteriors = save_hidden_posteriors <nl> - self . num_hidden = num_hidden <nl> - self . n_outs = n_outs <nl> - <nl> - # create directory structure <nl> - if self . stats_dir is not None : <nl> - for i in xrange ( self . num_hidden ) : <nl> - folder = " % s / hidden_ % d " % ( self . stats_dir , i ) <nl> - utils . makedirs ( folder ) <nl> - folder = " % s / final " % ( self . stats_dir , ) <nl> - utils . makedirs ( folder ) <nl> - <nl> - if save_hidden_posteriors : <nl> - self . logger . info ( " saving hidden posteriors " ) <nl> - self . hidden_posteriors_dir = " % s / hidden_posteriors " % ( self . stats_dir , ) <nl> - utils . makedirs ( self . hidden_posteriors_dir ) <nl> - <nl> - # used for asserts <nl> - self . state = 0 <nl> - <nl> - # some stats stuff <nl> - self . total_feats = 0 <nl> - self . diction = { str ( x ) : [ 0 , 0 ] for x in xrange ( self . n_outs ) } <nl> - <nl> - def partition_start ( self , name ) : <nl> - assert ( self . state = = 0 ) <nl> - self . state = 1 <nl> - <nl> - self . part_feats = 0 <nl> - <nl> - self . name = name # save the name for other functions <nl> - <nl> - if self . stats_dir is not None : <nl> - self . hidden_file1 = [ ] <nl> - self . hidden_file2 = [ ] <nl> - self . hidden_file3 = [ ] <nl> - for i in xrange ( self . num_hidden ) : <nl> - folder = " % s / hidden_ % d " % ( self . stats_dir , i ) <nl> - basename = " % s / % s " % ( folder , name ) <nl> - self . hidden_file1 + = [ file ( basename + " . stat_1 " , ' w ' ) ] <nl> - self . hidden_file2 + = [ file ( basename + " . stat_2 " , ' w ' ) ] <nl> - self . hidden_file3 + = [ file ( basename + " . stat_3 " , ' w ' ) ] <nl> - folder = " % s / final " % ( self . stats_dir , ) <nl> - basename = " % s / % s " % ( folder , name ) <nl> - self . final_file1 = file ( basename + " . stat_1 " , ' w ' ) <nl> - <nl> - if self . save_hidden_posteriors : <nl> - basename = " % s / % s " % ( self . hidden_posteriors_dir , name ) <nl> - self . f_handle = [ ] <nl> - for i in xrange ( self . num_hidden + 1 ) : # + 1 for hidden layer <nl> - self . f_handle + = [ file ( basename + " . L " + str ( i ) , ' w ' ) ] <nl> - <nl> - def partition_update ( self , hidden_posteriors , post , ans ) : <nl> - " " " <nl> - for each hidden layer : <nl> - update hidden_stat1 [ i ] <nl> - update hidden_stat2 [ i ] <nl> - update hidden_stat3 [ i ] <nl> - update final_stat1 <nl> - append to save posterior dir . . . <nl> - " " " <nl> - assert ( self . state = = 1 or self . state = = 2 ) <nl> - <nl> - if self . stats_dir is not None : <nl> - hidden_layer_sigmoids = [ ] <nl> - hidden_layer_softmaxs = [ ] <nl> - for i in xrange ( self . num_hidden ) : <nl> - hidden_layer_sigmoids + = [ numpy_sigmoid ( hidden_posteriors [ i ] ) ] <nl> - hidden_layer_softmaxs + = [ numpy_softmax ( hidden_posteriors [ i ] ) ] <nl> - <nl> - hidden_stat1_update = [ numpy_normalize_rows ( x ) . sum ( axis = 0 ) for x in hidden_layer_sigmoids ] <nl> - hidden_stat2_update = [ x . sum ( axis = 0 ) for x in hidden_layer_softmaxs ] <nl> - hidden_stat3_update = [ numpy . append ( numpy . sum ( x , axis = 0 ) , numpy . sum ( 1 . - x , axis = 0 ) ) for x in hidden_layer_sigmoids ] <nl> - <nl> - # TODO : should we be summing the log posteriors or the posteriors <nl> - # if log posteriors , uncomment 2 lines below <nl> - # post = numpy . log ( post ) <nl> - # post = numpy . nan_to_num ( post ) <nl> - <nl> - post = post . sum ( axis = 0 ) <nl> - <nl> - if self . state = = 1 : <nl> - self . state = 2 <nl> - # first update of partition <nl> - if self . stats_dir is not None : <nl> - self . hidden_stat1 = hidden_stat1_update <nl> - self . hidden_stat2 = hidden_stat2_update <nl> - self . hidden_stat3 = hidden_stat3_update <nl> - self . final_stat1 = FinalStat1 ( part_posterior_sum = post , part_answer = ans [ 0 ] ) <nl> - else : <nl> - # not the first update of partition <nl> - if self . stats_dir is not None : <nl> - for i in xrange ( self . num_hidden ) : <nl> - self . hidden_stat1 [ i ] + = hidden_stat1_update [ i ] <nl> - self . hidden_stat2 [ i ] + = hidden_stat2_update [ i ] <nl> - self . hidden_stat3 [ i ] + = hidden_stat3_update [ i ] <nl> - self . final_stat1 . part_posterior_sum + = post <nl> - assert ( self . final_stat1 . part_answer = = ans [ 0 ] ) <nl> - <nl> - self . part_feats + = len ( ans ) <nl> - self . total_feats + = len ( ans ) <nl> - <nl> - # append hidden posteriors <nl> - if self . save_hidden_posteriors : <nl> - for i in xrange ( len ( hidden_posteriors ) ) : <nl> - numpy . savetxt ( self . f_handle [ i ] , hidden_posteriors [ i ] ) <nl> - <nl> - def partition_end ( self ) : <nl> - " " " <nl> - if partition not empty : <nl> - for each hidden layer : <nl> - write hidden_file1 [ i ] <nl> - write hidden_file2 [ i ] <nl> - write hidden_file3 [ i ] <nl> - write final_stat1 <nl> - print final_stat1 : right / wrong , update diction , print diction <nl> - close all files <nl> - " " " <nl> - assert ( self . state ! = 0 ) <nl> - <nl> - if self . state = = 2 : <nl> - # write hidden_files <nl> - if self . stats_dir is not None : <nl> - for i in xrange ( self . num_hidden ) : <nl> - numpy . savetxt ( self . hidden_file1 [ i ] , self . hidden_stat1 [ i ] ) <nl> - numpy . savetxt ( self . hidden_file2 [ i ] , self . hidden_stat2 [ i ] ) <nl> - numpy . savetxt ( self . hidden_file3 [ i ] , self . hidden_stat3 [ i ] ) <nl> - # write final_file1 <nl> - numpy . savetxt ( self . final_file1 , self . final_stat1 . part_posterior_sum ) <nl> - <nl> - # print final_stat1 : right / wrong , update diction , print diction <nl> - <nl> - wrong = self . final_stat1 . part_posterior_sum . argmax ( ) ! = self . final_stat1 . part_answer <nl> - if wrong : <nl> - self . diction [ str ( self . final_stat1 . part_answer ) ] [ 0 ] + = 1 <nl> - self . diction [ str ( self . final_stat1 . part_answer ) ] [ 1 ] + = 1 <nl> - <nl> - out = " total_feats % s feats_in_part % s % s % s " % ( self . total_feats , self . part_feats , self . final_stat1 . part_posterior_sum , self . final_stat1 . part_answer ) <nl> - if wrong : <nl> - out + = " WRONG " <nl> - else : <nl> - out + = " CORRECT " <nl> - self . logger . debug ( self . name + " > > > > > " + out ) <nl> - <nl> - # close all files <nl> - if self . stats_dir is not None : <nl> - for i in xrange ( self . num_hidden ) : <nl> - self . hidden_file1 [ i ] . close ( ) <nl> - self . hidden_file2 [ i ] . close ( ) <nl> - self . hidden_file3 [ i ] . close ( ) <nl> - self . final_file1 . close ( ) <nl> - <nl> - if self . save_hidden_posteriors : <nl> - for i in xrange ( len ( self . f_handle ) ) : <nl> - if self . f_handle [ i ] is not None : <nl> - self . f_handle [ i ] . close ( ) <nl> - self . f_handle [ i ] = None <nl> - <nl> - self . state = 0 <nl> \ No newline at end of file <nl> deleted file mode 100644 <nl> index 64bb77d4973 . . 00000000000 <nl> mmm a / example / speech - demo / utils / info . py <nl> ppp / dev / null <nl> <nl> - import os <nl> - <nl> - _mydir = os . path . dirname ( __file__ ) or ' . ' <nl> - <nl> - ROOT = os . path . abspath ( os . path . join ( _mydir , " . . / . . " ) ) <nl> - CONFIGS = os . path . join ( ROOT , " configs " ) <nl> deleted file mode 100644 <nl> index 321d68e38a8 . . 00000000000 <nl> mmm a / example / speech - demo / utils / layerwise_trainer . py <nl> ppp / dev / null <nl> <nl> - import glob <nl> - import os <nl> - import sys <nl> - import utils <nl> - import logging <nl> - import shutil <nl> - import StringIO <nl> - <nl> - import utils <nl> - <nl> - import numpy <nl> - <nl> - from io_func . model_io import load , save <nl> - <nl> - class LayerwiseTrainer : <nl> - SCHEMA = { <nl> - " type " : " object " , <nl> - " properties " : { <nl> - " resume " : { " type " : [ " string " , " integer " , " boolean " ] , " required " : False } , <nl> - " wdir " : { " type " : " string " } , <nl> - " output_file " : { " type " : " string " } , <nl> - " max_iters " : { " type " : [ " string " , " integer " ] , " required " : False } , <nl> - " first_layer_to_train " : { " type " : [ " string " , " integer " ] , " required " : False } , <nl> - " last_layer_to_train " : { " type " : [ " string " , " integer " ] , " required " : False } <nl> - } <nl> - } <nl> - <nl> - def __init__ ( self , arguments , model , train_sets ) : <nl> - self . logger = logging . getLogger ( __name__ ) <nl> - <nl> - self . model = model <nl> - self . train_sets = train_sets <nl> - <nl> - # # # # # # # # # # # # # # # # # # # # parse configs # # # # # # # # # # # # # # # # # # # # <nl> - <nl> - self . resume = False <nl> - if " resume " in arguments : <nl> - self . resume = utils . to_bool ( arguments [ " resume " ] ) <nl> - <nl> - self . wdir = arguments [ " wdir " ] <nl> - self . output_file = arguments [ " output_file " ] <nl> - <nl> - self . max_iters = 20 <nl> - if " max_iters " in arguments : <nl> - self . max_iters = int ( arguments [ " max_iters " ] ) <nl> - <nl> - # self . max_iters_without_impr = 3 <nl> - # if " max_iters_without_impr " in arguments : <nl> - # self . max_iters_without_impr = int ( arguments [ " max_iters_without_impr " ] ) <nl> - <nl> - self . first_layer_to_train = 0 <nl> - if " first_layer_to_train " in arguments : <nl> - self . first_layer_to_train = int ( arguments [ " first_layer_to_train " ] ) <nl> - <nl> - self . last_layer_to_train = model . n_layers - 1 # number hidden layers - 1 <nl> - if " last_layer_to_train " in arguments : <nl> - self . last_layer_to_train = int ( arguments [ " last_layer_to_train " ] ) <nl> - <nl> - # other stuff <nl> - if self . resume : <nl> - if not os . path . exists ( self . wdir ) : <nl> - raise Exception ( " wdir must exist if resume = True " ) <nl> - else : <nl> - if not os . path . exists ( self . wdir ) : <nl> - os . makedirs ( self . wdir ) <nl> - else : <nl> - self . logger . info ( " Directory already exists . . . " ) <nl> - <nl> - out = StringIO . StringIO ( ) <nl> - print > > out , " \ n * * * * * * * * * * LayerwiseTrainer * * * * * * * * * * " <nl> - print > > out , " resume " , self . resume <nl> - print > > out , " wdir " , self . wdir <nl> - print > > out , " output_file " , self . output_file <nl> - print > > out , " max_iters " , self . max_iters <nl> - print > > out , " first_layer_to_train " , self . first_layer_to_train <nl> - print > > out , " last_layer_to_train " , self . last_layer_to_train <nl> - self . logger . info ( out . getvalue ( ) ) <nl> - <nl> - self . mlp_init = self . wdir + " / mlp_init " <nl> - if not self . resume : # brand new <nl> - save ( self . model , self . mlp_init ) <nl> - <nl> - # runtime state <nl> - self . layer_index = self . first_layer_to_train <nl> - self . iter = 0 <nl> - self . loss = sys . float_info . max <nl> - self . mlp_best = self . mlp_init <nl> - self . mlp_crrnt = self . mlp_init <nl> - self . iters_without_impr = 0 <nl> - <nl> - if self . resume : <nl> - if os . path . isfile ( self . wdir + " / layerwisetrainer_state " ) : <nl> - self . _load_state ( ) <nl> - <nl> - def _load_state ( self ) : <nl> - obj = utils . pickle_load ( self . wdir + " / layerwisetrainer_state " ) <nl> - self . layer_index = obj [ " layer_index " ] <nl> - self . iter = obj [ " iter " ] <nl> - self . loss = obj [ " loss " ] <nl> - self . mlp_best = obj [ " mlp_best " ] <nl> - self . mlp_crrnt = obj [ " mlp_crrnt " ] <nl> - # self . iters_without_impr = obj [ " iters_without_impr " ] <nl> - self . train_sets . set_state ( obj [ " train_sets " ] ) <nl> - <nl> - out = StringIO . StringIO ( ) <nl> - print > > out , " \ n * * * * * * * * * * Resuming from * * * * * * * * * * " <nl> - print > > out , " layer_index " , self . layer_index <nl> - print > > out , " iter " , self . iter <nl> - print > > out , " loss " , self . loss <nl> - print > > out , " mlp_best " , self . mlp_best <nl> - print > > out , " mlp_crrnt " , self . mlp_crrnt <nl> - self . logger . info ( out . getvalue ( ) ) <nl> - <nl> - load ( self . model , self . mlp_crrnt , gradients = True ) <nl> - <nl> - def _save_state ( self ) : <nl> - obj = { } <nl> - obj [ " layer_index " ] = self . layer_index <nl> - obj [ " iter " ] = self . iter <nl> - obj [ " loss " ] = self . loss <nl> - obj [ " mlp_best " ] = self . mlp_best <nl> - obj [ " mlp_crrnt " ] = self . mlp_crrnt <nl> - # obj [ " iters_without_impr " ] = self . iters_without_impr <nl> - obj [ " train_sets " ] = self . train_sets . get_state ( ) <nl> - <nl> - utils . pickle_save ( obj , self . wdir + " / layerwisetrainer_state " ) <nl> - <nl> - def train ( self , func ) : <nl> - for layer_index in xrange ( self . layer_index , self . last_layer_to_train + 1 ) : <nl> - self . loss = sys . float_info . max <nl> - <nl> - for epoch in xrange ( self . iter , self . max_iters ) : <nl> - filename , loss = func ( layer_index , epoch ) <nl> - filename = os . path . join ( self . wdir , filename ) <nl> - save ( self . model , filename ) <nl> - if loss < self . loss : <nl> - self . loss = loss <nl> - self . mlp_best = filename <nl> - self . mlp_crrnt = filename <nl> - <nl> - # self . logger . info ( " L % d ITER % 02d : % s " % ( layer_index , epoch , str ( info ) ) ) <nl> - self . logger . info ( " L % d ITER % 02d : best - of - this - layer : % s " % ( layer_index , epoch , self . mlp_best ) ) <nl> - <nl> - if epoch ! = self . max_iters - 1 : <nl> - self . layer_index = layer_index <nl> - self . iter = epoch + 1 <nl> - self . _save_state ( ) <nl> - <nl> - self . iter = 0 <nl> - self . layer_index = layer_index + 1 <nl> - self . _save_state ( ) <nl> - <nl> - # select the best network <nl> - if self . mlp_best = = self . mlp_init : <nl> - self . logger . critical ( " Error training neural network . . . " ) <nl> - # sys . exit ( 1 ) <nl> - <nl> - output_file = os . path . join ( self . wdir , self . output_file ) <nl> - shutil . copy2 ( self . mlp_best , output_file ) <nl> - self . logger . info ( " Succeeded training : " + output_file ) <nl> \ No newline at end of file <nl> deleted file mode 100644 <nl> index 28688383cf7 . . 00000000000 <nl> mmm a / example / speech - demo / utils / main_runner . py <nl> ppp / dev / null <nl> <nl> - import sys <nl> - import json <nl> - import pprint <nl> - import logging <nl> - import os <nl> - import validictory <nl> - <nl> - import utils <nl> - <nl> - def run_main ( main_func , schema , args ) : <nl> - if len ( args ) < 2 : <nl> - print " Usage : " + args [ 0 ] + " < json config file > " <nl> - print " For possible configs : " + args [ 0 ] + " help " <nl> - sys . exit ( 1 ) <nl> - if args [ 1 ] . lower ( ) in [ " - - h " , " - h " , " help " , " - help " , " - - help " ] : <nl> - # pprint . pprint ( schema ) <nl> - print json . dumps ( schema , indent = 2 ) <nl> - sys . exit ( 0 ) <nl> - <nl> - arguments = { } <nl> - for i in xrange ( 1 , len ( args ) ) : <nl> - try : <nl> - config = args [ i ] <nl> - print > > sys . stderr , " Merging % s into configuration " % ( config , ) <nl> - arguments . update ( json . load ( open ( config ) ) ) <nl> - except Exception , e : <nl> - print e <nl> - sys . exit ( 1 ) <nl> - <nl> - validictory . validate ( arguments , schema ) <nl> - logging_ini = None <nl> - if " logging_ini " in arguments : <nl> - logging_ini = arguments [ " logging_ini " ] <nl> - utils . setup_logger ( logging_ini ) <nl> - main_func ( arguments ) <nl> \ No newline at end of file <nl> deleted file mode 100644 <nl> index f80eb5c65b3 . . 00000000000 <nl> mmm a / example / speech - demo / utils / nnet_pdf_prior . py <nl> ppp / dev / null <nl> <nl> - " " " <nl> - Port of kaldi - head / src / nnet / nnet - pdf - prior . cc <nl> - " " " <nl> - <nl> - import logging <nl> - import sys <nl> - import numpy <nl> - <nl> - import theano <nl> - import theano . tensor as T <nl> - <nl> - class PdfPrior : <nl> - <nl> - def __init__ ( self , class_frame_counts = " " , prior_scale = 1 . 0 , prior_cutoff = 1e - 10 ) : <nl> - self . logger = logging . getLogger ( __name__ ) <nl> - self . class_frame_counts = class_frame_counts <nl> - self . prior_scale = prior_scale <nl> - self . prior_cutoff = prior_cutoff <nl> - <nl> - finfo = numpy . finfo ( ' float32 ' ) <nl> - <nl> - self . logger . info ( " Computing pdf - priors from : % s " % ( self . class_frame_counts , ) ) <nl> - <nl> - text_file = open ( self . class_frame_counts , " r " ) <nl> - content = text_file . read ( ) <nl> - tmp_priors = numpy . array ( content . strip ( ) . strip ( ' [ ] ' ) . strip ( ) . split ( ' ' ) , dtype = ' float32 ' ) <nl> - text_file . close ( ) <nl> - <nl> - prior_dim = len ( tmp_priors ) <nl> - tmp_mask = numpy . zeros ( prior_dim ) <nl> - num_cutoff = 0 <nl> - for i in xrange ( prior_dim ) : <nl> - if tmp_priors [ i ] < self . prior_cutoff : <nl> - tmp_priors [ i ] = self . prior_cutoff <nl> - tmp_mask [ i ] = finfo . max / 2 <nl> - num_cutoff + = 1 <nl> - <nl> - if num_cutoff > 0 : <nl> - self . logger . info ( " warning : % s out of % s classes have counts lower than % s " <nl> - % ( num_cutoff , prior_dim , self . prior_cutoff ) ) <nl> - <nl> - total = numpy . sum ( tmp_priors ) <nl> - tmp_priors = tmp_priors / total <nl> - tmp_priors = numpy . log ( tmp_priors ) <nl> - for i in xrange ( prior_dim ) : <nl> - if not finfo . min < tmp_priors [ i ] < finfo . max : <nl> - raise Exception ( ) <nl> - <nl> - tmp_priors_f = tmp_priors [ : ] <nl> - tmp_priors_f + = tmp_mask <nl> - <nl> - self . log_priors_ = tmp_priors_f <nl> - print self . log_priors_ <nl> - self . log_priors = theano . shared ( value = self . prior_scale * self . log_priors_ , borrow = True ) <nl> - <nl> - # llk is matrix <nl> - def SubtractOnLogpost ( self , llk ) : <nl> - # llk rows + = - self . prior_scale * self . log_priors <nl> - return llk - self . log_priors <nl> - <nl> - if __name__ = = " __main__ " : <nl> - # a = PdfPrior ( class_frame_counts = " / data / sls / scratch / leoliu / experiments / timit / exp1 / exp / dnn_512 / ali_train_pdf . counts " ) <nl> - a = PdfPrior ( class_frame_counts = " / usr / users / leoliu / src / v3 - sls - pdnn / configs / sample_frame_counts . txt " ) <nl> - print a . log_priors_ <nl> deleted file mode 100644 <nl> index 5f3d0b23ba6 . . 00000000000 <nl> mmm a / example / speech - demo / utils / norm . py <nl> ppp / dev / null <nl> <nl> - import sys <nl> - import numpy <nl> - import math <nl> - import logging <nl> - import theano <nl> - import theano . tensor as T <nl> - <nl> - class NormParamEstimator ( object ) : <nl> - def __init__ ( self , batch_size ) : <nl> - self . batch_size = batch_size <nl> - self . x = T . matrix ( ' x ' ) <nl> - self . logger = logging . getLogger ( __name__ ) <nl> - <nl> - def zeroth_order_stats ( self , x ) : <nl> - return x . shape [ 0 ] <nl> - <nl> - def first_order_stats ( self , x ) : <nl> - return T . sum ( x , axis = 0 ) <nl> - <nl> - def second_order_stats ( self , x ) : <nl> - return T . sum ( x * * 2 , axis = 0 ) <nl> - <nl> - def build_norm_estimation_functions ( self , data_sets ) : <nl> - ( corpus_feats , _ ) = data_sets . get_shared ( ) <nl> - <nl> - start_idx = T . lscalar ( ' start_idx ' ) # index to a [ mini ] batch <nl> - end_idx = T . lscalar ( ' end_idx ' ) # index to a [ mini ] batch <nl> - <nl> - # gives a vector of 0 ' s and 1 ' s where the 0 ' s are correct hypotheses <nl> - norm_func = theano . function ( inputs = [ start_idx , end_idx ] , <nl> - outputs = [ self . zeroth_order_stats ( self . x ) , <nl> - self . first_order_stats ( self . x ) , <nl> - self . second_order_stats ( self . x ) ] , <nl> - givens = { self . x : corpus_feats [ start_idx : end_idx ] } ) <nl> - <nl> - <nl> - return norm_func <nl> - <nl> - def estimate_norm_params ( self , data_sets ) : <nl> - gpu_norm_estimation_fn = self . build_norm_estimation_functions ( data_sets ) <nl> - <nl> - zeroth = 0 <nl> - first = None <nl> - second = None <nl> - <nl> - total_feats = 0 <nl> - data_sets . initialize_read ( ) <nl> - while True : <nl> - num_feats = data_sets . load_next_block ( ) <nl> - if num_feats < 0 : <nl> - break <nl> - num_batches = int ( math . ceil ( float ( num_feats ) / self . batch_size ) ) <nl> - for batch_index in xrange ( num_batches ) : <nl> - start_idx = batch_index * self . batch_size <nl> - end_idx = min ( start_idx + self . batch_size , num_feats ) <nl> - <nl> - stats = gpu_norm_estimation_fn ( start_idx , end_idx ) <nl> - <nl> - zeroth + = stats [ 0 ] <nl> - if first is None : <nl> - first = stats [ 1 ] <nl> - second = stats [ 2 ] <nl> - else : <nl> - first + = stats [ 1 ] <nl> - second + = stats [ 2 ] <nl> - <nl> - " " " <nl> - if first [ 1 ] / zeroth > 1000000 : <nl> - sys . exit ( 1 ) <nl> - print start_idx , end_idx <nl> - arr = data_sets . shared_x . get_value ( ) [ start_idx : end_idx ] <nl> - print " max " , arr . max ( ) <nl> - for i in xrange ( arr . shape [ 0 ] ) : <nl> - if arr [ i ] [ 1 ] > 100 : <nl> - print " > > > > > > > > > > > > > > > > > > > > > > > > " <nl> - print i , arr [ i ] [ 1 ] <nl> - sys . exit ( 1 ) <nl> - " " " <nl> - <nl> - total_feats + = num_feats <nl> - self . logger . debug ( " feats : % d - mean [ 1 ] % f - first [ 0 ] % f - second [ 0 ] % f " % ( total_feats , first [ 1 ] / zeroth , first [ 0 ] , second [ 0 ] ) ) <nl> - <nl> - mean = first / zeroth <nl> - var = second / zeroth - mean * * 2 <nl> - inv_std = 1 . 0 / numpy . sqrt ( var ) <nl> - <nl> - return [ mean , inv_std ] <nl> - <nl> deleted file mode 100644 <nl> index 886cd59e4e3 . . 00000000000 <nl> mmm a / example / speech - demo / utils / trainer . py <nl> ppp / dev / null <nl> <nl> - import glob <nl> - import os <nl> - import sys <nl> - import logging <nl> - import shutil <nl> - import math <nl> - import StringIO <nl> - <nl> - import utils <nl> - <nl> - import numpy <nl> - <nl> - from io_func . model_io import load , save <nl> - <nl> - " " " <nl> - Original pdnn setup : <nl> - <nl> - ' D : 0 . 08 : 0 . 5 : 0 . 05 , 0 . 05 : 15 ' <nl> - <nl> - start_rate 0 . 08 <nl> - <nl> - start decay <nl> - min_derror_decay_start 0 . 05 <nl> - AND <nl> - min_epoch_decay_start 15 <nl> - <nl> - scale_by 0 . 5 <nl> - <nl> - stop decay <nl> - min_derror_stop 0 . 05 <nl> - <nl> - self . learn_rate = 0 . 1 <nl> - self . halving_factor = 0 . 5 <nl> - self . max_iters = 20 <nl> - self . min_iters = 0 <nl> - self . keep_lr_iters = 15 <nl> - self . start_halving_impr = 0 . 01 <nl> - self . end_halving_impr = 0 . 001 <nl> - " " " <nl> - <nl> - def _isnum ( n ) : <nl> - return ( not math . isnan ( n ) and not math . isinf ( n ) ) <nl> - <nl> - class Trainer : <nl> - SCHEMA = { } <nl> - <nl> - def __init__ ( self , arguments , model , train_fn , valid_fn , <nl> - train_sets , valid_sets ) : <nl> - self . logger = logging . getLogger ( __name__ ) <nl> - <nl> - self . model = model <nl> - self . train_fn = train_fn <nl> - self . valid_fn = valid_fn <nl> - self . train_sets = train_sets <nl> - self . valid_sets = valid_sets <nl> - <nl> - # # # # # # # # # # # # # # # # # # # # parse configs # # # # # # # # # # # # # # # # # # # # <nl> - <nl> - self . resume = False <nl> - if " resume " in arguments : <nl> - self . resume = utils . to_bool ( arguments [ " resume " ] ) <nl> - <nl> - self . wdir = arguments [ " wdir " ] <nl> - self . output_file = arguments [ " output_file " ] <nl> - <nl> - self . learn_rate = 0 . 1 <nl> - if " learn_rate " in arguments : <nl> - self . learn_rate = float ( arguments [ " learn_rate " ] ) <nl> - <nl> - self . halving_factor = 0 . 5 <nl> - if " halving_factor " in arguments : <nl> - self . halving_factor = float ( arguments [ " halving_factor " ] ) <nl> - self . max_iters = 20 <nl> - if " max_iters " in arguments : <nl> - self . max_iters = int ( arguments [ " max_iters " ] ) <nl> - self . min_iters = 0 <nl> - if " min_iters " in arguments : <nl> - self . min_iters = int ( arguments [ " min_iters " ] ) <nl> - self . keep_lr_iters = 15 <nl> - if " keep_lr_iters " in arguments : <nl> - self . keep_lr_iters = int ( arguments [ " keep_lr_iters " ] ) <nl> - self . start_halving_impr = 0 . 01 <nl> - if " start_halving_impr " in arguments : <nl> - self . start_halving_impr = float ( arguments [ " start_halving_impr " ] ) <nl> - self . end_halving_impr = 0 . 001 <nl> - if " end_halving_impr " in arguments : <nl> - self . end_halving_impr = float ( arguments [ " end_halving_impr " ] ) <nl> - <nl> - self . continue_with_rate = False <nl> - if " continue_with_rate " in arguments : <nl> - self . continue_with_rate = utils . to_bool ( arguments [ " continue_with_rate " ] ) <nl> - <nl> - self . halving_criteria = " loss " <nl> - if " halving_criteria " in arguments : <nl> - self . halving_criteria = arguments [ " halving_criteria " ] <nl> - criteria_list = [ " loss " , " frame_err " ] <nl> - if self . halving_criteria not in criteria_list : <nl> - raise Exception ( " invalid halving criteria . must be one of " + str ( criteria_list ) ) <nl> - <nl> - # batch_size and momentum <nl> - self . batch_size = 256 <nl> - if arguments . has_key ( ' batch_size ' ) : <nl> - self . batch_size = int ( arguments [ ' batch_size ' ] ) <nl> - <nl> - self . momentum = 0 . 5 <nl> - self . momentum_start = 1 <nl> - if arguments . has_key ( ' momentum ' ) : <nl> - self . momentum = float ( arguments [ ' momentum ' ] ) <nl> - if ' momentum_start ' in arguments : <nl> - self . momentum_start = int ( arguments [ ' momentum_start ' ] ) <nl> - <nl> - # other stuff <nl> - if self . resume : <nl> - if not os . path . exists ( self . wdir ) : <nl> - raise Exception ( " wdir must exist if resume = True " ) <nl> - else : <nl> - if not os . path . exists ( self . wdir ) : <nl> - os . makedirs ( self . wdir ) <nl> - else : <nl> - self . logger . info ( " Directory already exists . . . " ) <nl> - <nl> - out = StringIO . StringIO ( ) <nl> - print > > out , " \ n * * * * * * * * * * Trainer * * * * * * * * * * " <nl> - print > > out , " resume " , self . resume <nl> - print > > out , " wdir " , self . wdir <nl> - print > > out , " output_file " , self . output_file <nl> - print > > out , " learn_rate " , self . learn_rate <nl> - print > > out , " halving_factor " , self . halving_factor <nl> - print > > out , " max_iters " , self . max_iters <nl> - print > > out , " min_iters " , self . min_iters <nl> - print > > out , " keep_lr_iters " , self . keep_lr_iters <nl> - print > > out , " start_halving_impr " , self . start_halving_impr <nl> - print > > out , " end_halving_impr " , self . end_halving_impr <nl> - print > > out , " continue_with_rate " , self . continue_with_rate <nl> - print > > out , " halving_criteria " , self . halving_criteria <nl> - print > > out , " batch_size " , self . batch_size <nl> - print > > out , " momentum " , self . momentum <nl> - print > > out , " momentum_start " , self . momentum_start <nl> - self . logger . info ( out . getvalue ( ) ) <nl> - <nl> - self . mlp_init = self . wdir + " / mlp_init " <nl> - if not self . resume : # brand new <nl> - save ( self . model , self . mlp_init ) <nl> - <nl> - # runtime state <nl> - self . iter = 0 <nl> - self . done = False <nl> - self . loss = sys . float_info . max <nl> - self . rate = self . learn_rate <nl> - self . mlp_best = self . mlp_init <nl> - self . halving = False <nl> - self . wasAccepted = True <nl> - <nl> - if self . resume : <nl> - if os . path . isfile ( self . wdir + " / trainer_state " ) : <nl> - self . _load_state ( ) <nl> - <nl> - def _load_state ( self ) : <nl> - obj = utils . pickle_load ( self . wdir + " / trainer_state " ) <nl> - self . iter = obj [ " iter " ] <nl> - self . done = obj [ " done " ] <nl> - self . loss = obj [ " loss " ] <nl> - self . rate = obj [ " rate " ] <nl> - self . mlp_best = obj [ " mlp_best " ] <nl> - self . halving = obj [ " halving " ] <nl> - self . wasAccepted = obj [ " wasAccepted " ] <nl> - self . train_sets . set_state ( obj [ " train_sets " ] ) <nl> - self . valid_sets . set_state ( obj [ " valid_sets " ] ) <nl> - <nl> - out = StringIO . StringIO ( ) <nl> - print > > out , " \ n * * * * * * * * * * Resuming from * * * * * * * * * * " <nl> - print > > out , " iter " , self . iter <nl> - print > > out , " done " , self . done <nl> - print > > out , " loss " , self . loss <nl> - print > > out , " rate " , self . rate <nl> - print > > out , " mlp_best " , self . mlp_best <nl> - print > > out , " halving " , self . halving <nl> - print > > out , " wasAccepted " , self . wasAccepted <nl> - self . logger . info ( out . getvalue ( ) ) <nl> - <nl> - def _save_state ( self ) : <nl> - obj = { } <nl> - obj [ " iter " ] = self . iter <nl> - obj [ " done " ] = self . done <nl> - obj [ " loss " ] = self . loss <nl> - obj [ " rate " ] = self . rate <nl> - obj [ " mlp_best " ] = self . mlp_best <nl> - obj [ " halving " ] = self . halving <nl> - obj [ " wasAccepted " ] = self . wasAccepted <nl> - obj [ " train_sets " ] = self . train_sets . get_state ( ) <nl> - obj [ " valid_sets " ] = self . valid_sets . get_state ( ) <nl> - <nl> - utils . pickle_save ( obj , self . wdir + " / trainer_state " ) <nl> - <nl> - def finished ( self ) : <nl> - done = self . done or ( self . iter > = self . max_iters ) <nl> - self . iter + = 1 <nl> - return done <nl> - <nl> - def accepted ( self , loss_new , mlp_next ) : <nl> - # accept or reject new parameters ( based on objective function ) <nl> - self . loss_prev = self . loss <nl> - if loss_new < self . loss or self . iter < = self . keep_lr_iters : <nl> - self . loss = loss_new <nl> - self . mlp_best = mlp_next <nl> - self . wasAccepted = True <nl> - return True <nl> - else : <nl> - self . wasAccepted = False <nl> - return False <nl> - <nl> - def _finalize_helper ( self , loss_new ) : <nl> - # no learn - rate halving yet , if keep_lr_iters set accordingly <nl> - if self . iter < = self . keep_lr_iters : <nl> - return <nl> - <nl> - # stopping criterion <nl> - # bug fix ( problem in Kaldi too ) <nl> - # if start_halving_impr = = 0 and we reject a net <nl> - # then we change self . loss_prev w / o changing self . loss <nl> - # which means rel_impr is 0 , instead of a negative number <nl> - # This will half when start_halving_impr = 0 . 01 <nl> - # but not when it equals 0 . 0 ( border case ) <nl> - <nl> - rel_impr = ( self . loss_prev - loss_new ) / self . loss_prev <nl> - if self . halving and rel_impr < self . end_halving_impr : <nl> - if self . min_iters > self . iter : <nl> - self . logger . info ( " we were supposed to finish , but we continue as min_iters : " + str ( self . min_iters ) ) <nl> - return <nl> - self . logger . info ( " finished , too small rel . improvement " + str ( rel_impr ) ) <nl> - self . done = True <nl> - <nl> - # start annealing when improvement is low <nl> - self . logger . info ( " * * * * * rel_impr vs start_halving % f % f " % ( rel_impr , self . start_halving_impr ) ) <nl> - if rel_impr < self . start_halving_impr : # and not self . wasAccepted : <nl> - self . halving = True <nl> - <nl> - keep_rate = self . continue_with_rate and self . wasAccepted <nl> - if not keep_rate : <nl> - if self . halving : <nl> - self . rate = self . rate * self . halving_factor <nl> - <nl> - def finalize ( self , loss_new ) : <nl> - self . _finalize_helper ( loss_new ) <nl> - self . _save_state ( ) <nl> - <nl> - def _validate ( self ) : <nl> - # print " CVCVCVCVCVCVCVCVCVCVCVCVCVCVCVCVCVCVCVCVCVCVCVCVCVCVCVCVCVCVCVCVCVCVCVCVCVCVCVCVCVCV " <nl> - <nl> - errors = [ ] <nl> - losses = [ ] <nl> - total_feats = 0 <nl> - self . valid_sets . initialize_read ( ) <nl> - while True : <nl> - num_feats = self . valid_sets . load_next_block ( ) <nl> - if num_feats < 0 : <nl> - break <nl> - num_batches = int ( math . ceil ( float ( num_feats ) / self . batch_size ) ) <nl> - for batch_index in xrange ( num_batches ) : <nl> - start_idx = batch_index * self . batch_size <nl> - end_idx = min ( start_idx + self . batch_size , num_feats ) <nl> - batch_feats = end_idx - start_idx <nl> - <nl> - # print " * * * * * * * * * * * * * * * * * * * " , start_idx , end_idx <nl> - if False : <nl> - error , loss = 0 , 0 <nl> - else : <nl> - error , loss = self . valid_fn ( start_idx , end_idx ) <nl> - assert ( _isnum ( loss ) ) <nl> - errors . append ( error ) <nl> - losses . append ( loss * batch_feats ) <nl> - <nl> - # print " * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * " <nl> - # for i in xrange ( predictions . shape [ 0 ] ) : <nl> - # print ( predictions [ i ] , answers [ i ] ) , <nl> - # print " \ n " <nl> - # sys . exit ( 1 ) <nl> - <nl> - if False and batch_index > = 0 : <nl> - print " * * * batch_index " , batch_index , " > > > > > " , error , loss <nl> - print " mmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - " <nl> - numpy . set_printoptions ( threshold = ' nan ' ) <nl> - print self . model . sigmoid_layers [ 0 ] . W . get_value ( ) [ 0 : 5 , 0 : 5 ] <nl> - print " * " <nl> - print self . model . sigmoid_layers [ 0 ] . b . get_value ( ) [ 0 : 5 ] <nl> - print " * " <nl> - print self . model . sigmoid_layers [ 1 ] . W . get_value ( ) [ 0 : 5 , 0 : 5 ] <nl> - print " * " <nl> - print self . model . sigmoid_layers [ 1 ] . b . get_value ( ) [ 0 : 5 ] <nl> - print " mmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - " <nl> - <nl> - <nl> - # var = raw_input ( " Please enter something : " ) <nl> - # print " you entered " , var <nl> - <nl> - total_feats + = num_feats <nl> - self . logger . debug ( " ITER % 02d : valid feats : % d error % 4f loss % 4f rate % 4f " % ( self . iter , total_feats , 100 * float ( numpy . sum ( errors ) ) / total_feats , numpy . sum ( losses ) / total_feats , self . rate ) ) <nl> - return float ( numpy . sum ( errors ) ) / total_feats , numpy . sum ( losses ) / total_feats <nl> - <nl> - def _train ( self , learning_rate , momentum ) : <nl> - # print " TRAINTRAINTRAINTRAINTRAINTRAINTRAINTRAINTRAINTRAINTRAINTRAINTRAINTRAINTRAINTRAIN " <nl> - errors = [ ] <nl> - losses = [ ] <nl> - total_feats = 0 <nl> - self . train_sets . initialize_read ( ) <nl> - while True : <nl> - num_feats = self . train_sets . load_next_block ( ) <nl> - if num_feats < 0 : <nl> - break <nl> - num_batches = int ( math . ceil ( float ( num_feats ) / self . batch_size ) ) <nl> - <nl> - if False and True : <nl> - print " WARNING TAKE THIS OUT " <nl> - # num_batches = 40 # 40 <nl> - <nl> - print " mmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - " <nl> - # numpy . set_printoptions ( threshold = ' nan ' ) <nl> - print self . model . sigmoid_layers [ 0 ] . W . get_value ( ) [ 0 : 5 , 0 : 5 ] <nl> - print " * " <nl> - print self . model . sigmoid_layers [ 0 ] . b . get_value ( ) [ 0 : 5 ] <nl> - print " * " <nl> - print self . model . sigmoid_layers [ 1 ] . W . get_value ( ) [ 0 : 5 , 0 : 5 ] <nl> - print " * " <nl> - print self . model . sigmoid_layers [ 1 ] . b . get_value ( ) [ 0 : 5 ] <nl> - print " mmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - " <nl> - print " ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ " <nl> - print self . model . sigmoid_layers [ 0 ] . delta_W . get_value ( ) [ 0 : 5 , 0 : 5 ] <nl> - print " * " <nl> - print self . model . sigmoid_layers [ 0 ] . delta_b . get_value ( ) [ 0 : 5 ] <nl> - print " * " <nl> - print self . model . sigmoid_layers [ 1 ] . delta_W . get_value ( ) [ 0 : 5 , 0 : 5 ] <nl> - print " * " <nl> - print self . model . sigmoid_layers [ 1 ] . delta_b . get_value ( ) [ 0 : 5 ] <nl> - print " $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ " <nl> - print " $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ " <nl> - print " $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ " <nl> - print " $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ " <nl> - print " $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ $ " <nl> - <nl> - for batch_index in xrange ( num_batches ) : <nl> - start_idx = batch_index * self . batch_size <nl> - end_idx = min ( start_idx + self . batch_size , num_feats ) <nl> - batch_feats = end_idx - start_idx <nl> - <nl> - rate = learning_rate * float ( batch_feats ) / self . batch_size <nl> - if False : <nl> - error , loss = 0 , 0 <nl> - else : <nl> - error , loss = self . train_fn ( start_idx , end_idx , learning_rate = rate , momentum = momentum ) <nl> - assert ( _isnum ( loss ) ) <nl> - errors . append ( error ) <nl> - losses . append ( loss * batch_feats ) <nl> - <nl> - # self . logger . debug ( " % d err = % f loss = % f " % ( batch_index , error , loss * batch_feats ) ) <nl> - # if batch_index = = 10 : <nl> - # sys . exit ( 1 ) <nl> - <nl> - if False and batch_index > = 0 : <nl> - # print " * * * batch_index " , batch_index , " > > > > > " , error , loss <nl> - print " mmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - " <nl> - # numpy . set_printoptions ( threshold = ' nan ' ) <nl> - print self . model . sigmoid_layers [ 0 ] . W . get_value ( ) [ 0 : 5 , 0 : 5 ] <nl> - print " * " <nl> - print self . model . sigmoid_layers [ 0 ] . b . get_value ( ) [ 0 : 5 ] <nl> - print " * " <nl> - print self . model . sigmoid_layers [ 1 ] . W . get_value ( ) [ 0 : 5 , 0 : 5 ] <nl> - print " * " <nl> - print self . model . sigmoid_layers [ 1 ] . b . get_value ( ) [ 0 : 5 ] <nl> - print " mmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - " <nl> - <nl> - # import sys <nl> - # sys . exit ( 1 ) <nl> - <nl> - total_feats + = num_feats <nl> - self . logger . debug ( " ITER % 02d : train feats : % d error % 4f loss % 4f rate % 4f " % ( self . iter , total_feats , 100 * float ( numpy . sum ( errors ) ) / total_feats , numpy . sum ( losses ) / total_feats , self . rate ) ) <nl> - # sys . exit ( 1 ) <nl> - # print " ERRORS : " , errors <nl> - return float ( numpy . sum ( errors ) ) / total_feats , numpy . sum ( losses ) / total_feats <nl> - <nl> - def train ( self ) : <nl> - if not self . resume : <nl> - # cross - validation on original network <nl> - if True : <nl> - valid_error , loss = self . _validate ( ) <nl> - assert ( _isnum ( loss ) ) <nl> - else : <nl> - valid_error , loss = ( 9999999 , 9999999 ) <nl> - self . logger . info ( " ITER % 02d : CROSSVAL PRERUN ERROR % . 4f AVG . LOSS % . 4f " % ( self . iter , 100 * valid_error , loss ) ) <nl> - self . loss = loss <nl> - <nl> - while not self . finished ( ) : <nl> - load ( self . model , filename = self . mlp_best , gradients = self . wasAccepted ) <nl> - <nl> - if self . iter > = self . momentum_start : <nl> - moment = self . momentum <nl> - else : <nl> - moment = 0 <nl> - if False : <nl> - tr_error , tr_loss = 0 , 0 <nl> - else : <nl> - tr_error , tr_loss = self . _train ( learning_rate = self . rate , momentum = moment ) <nl> - assert ( _isnum ( tr_loss ) ) <nl> - self . logger . info ( " ITER % 02d : TRAIN ERROR % 03 . 4f AVG . LOSS % . 4f lrate % . 6g " % ( self . iter , 100 * tr_error , tr_loss , self . rate ) ) <nl> - <nl> - # cross - validation <nl> - valid_error , valid_loss = self . _validate ( ) <nl> - self . logger . info ( " ITER % 02d : CROSSVAL ERROR % . 4f AVG . LOSS % . 4f lrate % . 6f " % ( self . iter , 100 * valid_error , valid_loss , self . rate ) ) <nl> - <nl> - mlp_next = " % s / it % 02drate % 4f_Terr % . 4f_Tloss % 4f_CVerr % 4f_CVloss % 4f " % ( self . wdir , self . iter , self . rate , 100 * tr_error , tr_loss , 100 * valid_error , valid_loss ) <nl> - <nl> - save ( self . model , filename = mlp_next + " . tmp " ) <nl> - <nl> - if self . halving_criteria = = " loss " : <nl> - loss_new = valid_loss <nl> - elif self . halving_criteria = = " frame_err " : <nl> - loss_new = valid_error <nl> - else : <nl> - raise Exception ( " bad halving_criteria " ) <nl> - <nl> - if self . accepted ( loss_new , mlp_next ) : <nl> - os . rename ( mlp_next + " . tmp " , mlp_next ) <nl> - self . logger . info ( " accepted " ) <nl> - else : <nl> - os . rename ( mlp_next + " . tmp " , mlp_next + " _rejected " ) <nl> - self . logger . info ( " rejected " ) <nl> - <nl> - self . finalize ( loss_new ) <nl> - <nl> - # select the best network <nl> - if self . mlp_best = = self . mlp_init : <nl> - self . logger . critical ( " Error training neural network . . . " ) <nl> - # sys . exit ( 1 ) <nl> - <nl> - output_file = os . path . join ( self . wdir , self . output_file ) <nl> - shutil . copy2 ( self . mlp_best , output_file ) <nl> - self . logger . info ( " Succeeded training : " + output_file ) <nl> \ No newline at end of file <nl> deleted file mode 100644 <nl> index 513261ea6f4 . . 00000000000 <nl> mmm a / example / speech - demo / utils / utils . py <nl> ppp / dev / null <nl> <nl> - import sys , subprocess , pickle , os , json , logging , socket <nl> - import logging . config <nl> - import datetime <nl> - <nl> - from . import info <nl> - <nl> - def getRunDir ( ) : <nl> - return os . path . dirname ( os . path . realpath ( sys . argv [ 0 ] ) ) <nl> - <nl> - def setup_logger ( logging_ini ) : <nl> - if logging_ini is not None : <nl> - print ( " Using custom logger " ) <nl> - else : <nl> - logging_ini = os . path . join ( info . CONFIGS , ' logging . ini ' ) <nl> - <nl> - logging . config . fileConfig ( logging_ini ) <nl> - logger = logging . getLogger ( __name__ ) <nl> - logger . info ( " * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * " ) <nl> - logger . info ( datetime . datetime . now ( ) . strftime ( " % Y - % m - % d % H : % M " ) ) <nl> - logger . info ( " Host : " + str ( socket . gethostname ( ) ) ) <nl> - logger . info ( " Screen : " + os . getenv ( " STY " , " unknown " ) ) <nl> - logger . info ( " PWD : " + os . getenv ( " PWD " , " unknown " ) ) <nl> - logger . info ( " Cmd : " + str ( sys . argv ) ) <nl> - logger . info ( " * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * " ) <nl> - <nl> - def to_bool ( obj ) : <nl> - if str ( obj ) . lower ( ) in [ " true " , " 1 " ] : <nl> - return True <nl> - elif str ( obj ) . lower ( ) in [ " false " , " 0 " ] : <nl> - return False <nl> - else : <nl> - raise Exception ( " to_bool : cannot convert to bool " ) <nl> - <nl> - def line_with_arg ( line ) : <nl> - line = line . strip ( ) <nl> - return line is not " " and not line . startswith ( " # " ) <nl> - <nl> - def parse_conv_spec ( conv_spec , batch_size ) : <nl> - # " 1x29x29 : 100 , 5x5 , p2x2 : 200 , 4x4 , p2x2 , f " <nl> - conv_spec = conv_spec . replace ( ' X ' , ' x ' ) <nl> - structure = conv_spec . split ( ' : ' ) <nl> - conv_layer_configs = [ ] <nl> - for i in range ( 1 , len ( structure ) ) : <nl> - config = { } <nl> - elements = structure [ i ] . split ( ' , ' ) <nl> - if i = = 1 : <nl> - input_dims = structure [ i - 1 ] . split ( ' x ' ) <nl> - prev_map_number = int ( input_dims [ 0 ] ) <nl> - prev_feat_dim_x = int ( input_dims [ 1 ] ) <nl> - prev_feat_dim_y = int ( input_dims [ 2 ] ) <nl> - else : <nl> - prev_map_number = conv_layer_configs [ - 1 ] [ ' output_shape ' ] [ 1 ] <nl> - prev_feat_dim_x = conv_layer_configs [ - 1 ] [ ' output_shape ' ] [ 2 ] <nl> - prev_feat_dim_y = conv_layer_configs [ - 1 ] [ ' output_shape ' ] [ 3 ] <nl> - <nl> - current_map_number = int ( elements [ 0 ] ) <nl> - filter_xy = elements [ 1 ] . split ( ' x ' ) <nl> - filter_size_x = int ( filter_xy [ 0 ] ) <nl> - filter_size_y = int ( filter_xy [ 1 ] ) <nl> - pool_xy = elements [ 2 ] . replace ( ' p ' , ' ' ) . replace ( ' P ' , ' ' ) . split ( ' x ' ) <nl> - pool_size_x = int ( pool_xy [ 0 ] ) <nl> - pool_size_y = int ( pool_xy [ 1 ] ) <nl> - output_dim_x = ( prev_feat_dim_x - filter_size_x + 1 ) / pool_size_x <nl> - output_dim_y = ( prev_feat_dim_y - filter_size_y + 1 ) / pool_size_y <nl> - <nl> - config [ ' input_shape ' ] = ( batch_size , prev_map_number , prev_feat_dim_x , prev_feat_dim_y ) <nl> - config [ ' filter_shape ' ] = ( current_map_number , prev_map_number , filter_size_x , filter_size_y ) <nl> - config [ ' poolsize ' ] = ( pool_size_x , pool_size_y ) <nl> - config [ ' output_shape ' ] = ( batch_size , current_map_number , output_dim_x , output_dim_y ) <nl> - if len ( elements ) = = 4 and elements [ 3 ] = = ' f ' : <nl> - config [ ' flatten ' ] = True <nl> - else : <nl> - config [ ' flatten ' ] = False <nl> - <nl> - conv_layer_configs . append ( config ) <nl> - return conv_layer_configs <nl> - <nl> - def _relu ( x ) : <nl> - return x * ( x > 0 ) <nl> - <nl> - def _capped_relu ( x ) : <nl> - return T . minimum ( x * ( x > 0 ) , 6 ) <nl> - <nl> - def _linear ( x ) : <nl> - return x * 1 . 0 <nl> - <nl> - def parse_activation ( act_str ) : <nl> - print ( " * * * " , act_str ) <nl> - if act_str = = ' sigmoid ' : <nl> - return T . nnet . sigmoid <nl> - elif act_str = = ' tanh ' : <nl> - return T . tanh <nl> - elif act_str = = ' relu ' : <nl> - return _relu <nl> - elif act_str = = ' capped_relu ' : <nl> - return _capped_relu <nl> - elif act_str = = ' linear ' : <nl> - return _linear <nl> - return T . nnet . sigmoid <nl> - <nl> - def activation_to_txt ( act_func ) : <nl> - if act_func = = T . nnet . sigmoid : <nl> - return ' sigmoid ' <nl> - if act_func = = T . tanh : <nl> - return ' tanh ' <nl> - <nl> - def parse_two_integers ( argument_str ) : <nl> - elements = argument_str . split ( " : " ) <nl> - int_strs = elements [ 1 ] . split ( " , " ) <nl> - return int ( int_strs [ 0 ] ) , int ( int_strs [ 1 ] ) <nl> - <nl> - " " " <nl> - Usage : <nl> - command = ' mysqladmin create test - uroot - pmysqladmin12 ' <nl> - for line in run_command ( command ) : <nl> - print ( line ) <nl> - " " " <nl> - def run_command ( command ) : <nl> - fnull = open ( os . devnull , ' w ' ) <nl> - p = subprocess . Popen ( command , <nl> - stdout = subprocess . PIPE , <nl> - stderr = fnull , <nl> - shell = True ) <nl> - return p , iter ( p . stdout . readline , b ' ' ) <nl> - <nl> - def pickle_load ( filename ) : <nl> - f = open ( filename , " rb " ) <nl> - try : <nl> - obj = pickle . load ( f ) <nl> - except Exception : <nl> - f . close ( ) <nl> - f = open ( filename , " rb " ) <nl> - print ( " Not a pickled file . . . try to load as text format : " + filename ) <nl> - obj = json . load ( f ) <nl> - f . close ( ) <nl> - return obj <nl> - <nl> - def pickle_save ( obj , filename ) : <nl> - f = open ( filename + " . new " , " wb " ) <nl> - pickle . dump ( obj , f ) <nl> - f . close ( ) <nl> - os . rename ( filename + " . new " , filename ) <nl> - <nl> - def makedirs ( path ) : <nl> - if not os . path . exists ( path ) : <nl> - os . makedirs ( path ) <nl> - <nl> - def kahan_add ( total , carry , inc ) : <nl> - cs = T . add_no_assoc ( carry , inc ) <nl> - s = T . add_no_assoc ( total , cs ) <nl> - update_carry = T . sub ( cs , T . sub ( s , total ) ) <nl> - update_total = s <nl> - return update_total , update_carry <nl> | Merge remote - tracking branch ' yzhang87 / master ' into truncated - bptt | apache/incubator-mxnet | fb0cc15ac00f642154bf49f94a8574ad0f99e8d1 | 2016-05-04T00:39:37Z |
mmm a / src / core / support / cpu_posix . c <nl> ppp b / src / core / support / cpu_posix . c <nl> <nl> <nl> # ifdef GPR_CPU_POSIX <nl> <nl> - # include " src / core / support / cpu . h " <nl> - <nl> # include < errno . h > <nl> # include < unistd . h > <nl> # include < string . h > <nl> | Do not need a Mac - specific CPU header yet as there are no CPU - specific features | grpc/grpc | 20d6c1aa2ac95a1415620398266199c85fd21e90 | 2015-02-19T06:01:22Z |
new file mode 100644 <nl> index 000000000 . . 032494cc6 <nl> mmm / dev / null <nl> ppp b / trunk / conf / console . conf <nl> <nl> + # main config for srs . <nl> + # @ see full . conf for detail config . <nl> + <nl> + listen 1935 ; <nl> + daemon off ; <nl> + srs_log_tank console ; <nl> + vhost __defaultVhost__ { <nl> + } <nl> mmm a / trunk / src / app / srs_app_config . cpp <nl> ppp b / trunk / src / app / srs_app_config . cpp <nl> bool SrsConfig : : get_deamon ( ) <nl> { <nl> srs_assert ( root ) ; <nl> <nl> - SrsConfDirective * conf = root - > get ( " deamon " ) ; <nl> + SrsConfDirective * conf = root - > get ( " daemon " ) ; <nl> if ( conf & & conf - > arg0 ( ) = = " off " ) { <nl> return false ; <nl> } <nl> | fix bug of get daemon | ossrs/srs | 76290a5a12dad69b94231c98475cbf8dd6002aed | 2014-03-23T06:47:09Z |
mmm a / UnitTests / Makefile . unittests <nl> ppp b / UnitTests / Makefile . unittests <nl> SHELL_SERVER_ONLY = \ <nl> @ top_srcdir @ / js / server / tests / shell - compaction - noncluster - timecritical . js \ <nl> @ top_srcdir @ / js / server / tests / shell - shaped - noncluster . js \ <nl> @ top_srcdir @ / js / server / tests / shell - transactions - noncluster . js \ <nl> - @ top_srcdir @ / js / server / tests / shell - routing . js \ <nl> @ top_srcdir @ / js / server / tests / shell - any - noncluster . js \ <nl> @ top_srcdir @ / js / server / tests / shell - database - noncluster . js \ <nl> @ top_srcdir @ / js / server / tests / shell - foxx . js \ <nl> | Executed shell routing tests . Internals tested there have been changed dramatically | arangodb/arangodb | 7c3b7b02f6a4551e4bcfa89e2332598c5575780d | 2015-02-02T15:54:25Z |
mmm a / docker / caffe2 / jenkins / common / install_python . sh <nl> ppp b / docker / caffe2 / jenkins / common / install_python . sh <nl> pip install - - no - cache - dir \ <nl> scikit - image \ <nl> tabulate \ <nl> virtualenv \ <nl> + mock \ <nl> typing \ <nl> typing - extensions <nl> <nl> | Add mock python module for testing ( ) | pytorch/pytorch | 73a23b492c0ac1f6c88e0311790728ff23f50f8a | 2018-04-09T16:12:10Z |
mmm a / xbmc / pvr / windows / GUIWindowPVRGuide . cpp <nl> ppp b / xbmc / pvr / windows / GUIWindowPVRGuide . cpp <nl> using namespace PVR ; <nl> <nl> CGUIWindowPVRGuideBase : : CGUIWindowPVRGuideBase ( bool bRadio , int id , const std : : string & xmlFile ) : <nl> CGUIWindowPVRBase ( bRadio , id , xmlFile ) , <nl> - m_bChannelSelectionRestored ( false ) <nl> + m_bChannelSelectionRestored ( false ) , <nl> + m_bFirstOpen ( true ) <nl> { <nl> m_bRefreshTimelineItems = false ; <nl> m_bSyncRefreshTimelineItems = false ; <nl> bool CGUIWindowPVRGuideBase : : RefreshTimelineItems ( ) <nl> <nl> std : : unique_ptr < CFileItemList > timeline ( new CFileItemList ) ; <nl> <nl> - / / can be very expensive . never call with lock acquired . <nl> - group - > GetEPGAll ( * timeline , true ) ; <nl> + if ( m_bFirstOpen ) <nl> + { <nl> + m_bFirstOpen = false ; <nl> + <nl> + / / very first open of the window . come up with some data very fast . . . <nl> + const std : : vector < PVRChannelGroupMember > groupMembers = group - > GetMembers ( ) ; <nl> + for ( const auto & groupMember : groupMembers ) <nl> + { <nl> + / / fake a channel without epg <nl> + const std : : shared_ptr < CPVREpgInfoTag > gapTag = std : : make_shared < CPVREpgInfoTag > ( groupMember . channel ) ; <nl> + timeline - > Add ( std : : make_shared < CFileItem > ( gapTag ) ) ; <nl> + } <nl> + <nl> + / / next , fetch actual data . <nl> + m_bRefreshTimelineItems = true ; <nl> + } <nl> + else <nl> + { <nl> + / / can be very expensive . never call with lock acquired . <nl> + group - > GetEPGAll ( * timeline , true ) ; <nl> + } <nl> <nl> CDateTime startDate ( group - > GetFirstEPGDate ( ) ) ; <nl> CDateTime endDate ( group - > GetLastEPGDate ( ) ) ; <nl> mmm a / xbmc / pvr / windows / GUIWindowPVRGuide . h <nl> ppp b / xbmc / pvr / windows / GUIWindowPVRGuide . h <nl> namespace PVR <nl> std : : unique_ptr < CFileItemList > m_newTimeline ; <nl> <nl> bool m_bChannelSelectionRestored ; <nl> + std : : atomic_bool m_bFirstOpen ; <nl> } ; <nl> <nl> class CGUIWindowPVRTVGuide : public CGUIWindowPVRGuideBase <nl> | [ PVR ] Speedup first open of Guide window . | xbmc/xbmc | 6c187f8aadc1606253327a686e91ddb17bb33654 | 2019-03-09T23:03:44Z |
mmm a / xbmc / network / TCPServer . cpp <nl> ppp b / xbmc / network / TCPServer . cpp <nl> <nl> # include " utils / Variant . h " <nl> # include " threads / SingleLock . h " <nl> <nl> + static const char bt_service_name [ ] = " XBMC JSON - RPC " ; <nl> + static const char bt_service_desc [ ] = " Interface for XBMC remote control over bluetooth " ; <nl> + static const char bt_service_prov [ ] = " XBMC JSON - RPC Provider " ; <nl> + static const uint32_t bt_service_guid [ ] = { 0x65AE4CC0 , 0x775D11E0 , 0xBE16CE28 , 0x4824019B } ; <nl> + <nl> <nl> using namespace JSONRPC ; <nl> using namespace ANNOUNCEMENT ; <nl> void CTCPServer : : Process ( ) <nl> { <nl> CLog : : Log ( LOGDEBUG , " JSONRPC Server : New connection detected " ) ; <nl> CTCPClient newconnection ; <nl> - newconnection . m_socket = accept ( * it , & newconnection . m_cliaddr , & newconnection . m_addrlen ) ; <nl> + newconnection . m_socket = accept ( * it , ( sockaddr * ) & newconnection . m_cliaddr , & newconnection . m_addrlen ) ; <nl> <nl> if ( newconnection . m_socket = = INVALID_SOCKET ) <nl> CLog : : Log ( LOGERROR , " JSONRPC Server : Accept of new connection failed " ) ; <nl> bool CTCPServer : : Initialize ( ) <nl> <nl> bool CTCPServer : : InitializeBlue ( ) <nl> { <nl> + if ( ! m_nonlocal ) <nl> + return false ; <nl> + <nl> + # ifdef _WIN32 <nl> + <nl> + SOCKET fd = socket ( AF_BTH , SOCK_STREAM , BTHPROTO_RFCOMM ) ; <nl> + if ( fd = = INVALID_SOCKET ) <nl> + { <nl> + CLog : : Log ( LOGINFO , " JSONRPC Server : Unable to get bluetooth socket " ) ; <nl> + return false ; <nl> + } <nl> + SOCKADDR_BTH sa = { } ; <nl> + sa . addressFamily = AF_BTH ; <nl> + sa . port = BT_PORT_ANY ; <nl> + <nl> + if ( bind ( fd , ( SOCKADDR * ) & sa , sizeof ( sa ) ) < 0 ) <nl> + { <nl> + CLog : : Log ( LOGINFO , " JSONRPC Server : Unable to bind to bluetooth socket " ) ; <nl> + closesocket ( fd ) ; <nl> + return false ; <nl> + } <nl> + <nl> + ULONG optval = TRUE ; <nl> + if ( setsockopt ( fd , SOL_RFCOMM , SO_BTH_AUTHENTICATE , ( const char * ) & optval , sizeof ( optval ) ) = = SOCKET_ERROR ) <nl> + { <nl> + CLog : : Log ( LOGERROR , " JSONRPC Server : Failed to force authentication for bluetooth socket " ) ; <nl> + closesocket ( fd ) ; <nl> + return false ; <nl> + } <nl> + <nl> + int len = sizeof ( sa ) ; <nl> + if ( getsockname ( fd , ( SOCKADDR * ) & sa , & len ) < 0 ) <nl> + CLog : : Log ( LOGERROR , " JSONRPC Server : Failed to get bluetooth port " ) ; <nl> + <nl> + if ( listen ( fd , 10 ) < 0 ) <nl> + { <nl> + CLog : : Log ( LOGERROR , " JSONRPC Server : Failed to listen to bluetooth port " ) ; <nl> + closesocket ( fd ) ; <nl> + return false ; <nl> + } <nl> + <nl> + m_servers . push_back ( fd ) ; <nl> + <nl> + CSADDR_INFO addrinfo ; <nl> + addrinfo . iProtocol = BTHPROTO_RFCOMM ; <nl> + addrinfo . iSocketType = SOCK_STREAM ; <nl> + addrinfo . LocalAddr . lpSockaddr = ( SOCKADDR * ) & sa ; <nl> + addrinfo . LocalAddr . iSockaddrLength = sizeof ( sa ) ; <nl> + addrinfo . RemoteAddr . lpSockaddr = ( SOCKADDR * ) & sa ; <nl> + addrinfo . RemoteAddr . iSockaddrLength = sizeof ( sa ) ; <nl> + <nl> + WSAQUERYSET service = { } ; <nl> + service . dwSize = sizeof ( service ) ; <nl> + service . lpszServiceInstanceName = ( LPSTR ) bt_service_name ; <nl> + service . lpServiceClassId = ( LPGUID ) & bt_service_guid ; <nl> + service . lpszComment = ( LPSTR ) bt_service_desc ; <nl> + service . dwNameSpace = NS_BTH ; <nl> + service . lpNSProviderId = NULL ; / * RFCOMM ? * / <nl> + service . lpcsaBuffer = & addrinfo ; <nl> + service . dwNumberOfCsAddrs = 1 ; <nl> + <nl> + if ( WSASetService ( & service , RNRSERVICE_REGISTER , 0 ) = = SOCKET_ERROR ) <nl> + CLog : : Log ( LOGERROR , " JSONRPC Server : failed to register bluetooth service error % d " , WSAGetLastError ( ) ) ; <nl> + <nl> + return true ; <nl> + # endif <nl> return false ; <nl> } <nl> <nl> CTCPServer : : CTCPClient : : CTCPClient ( ) <nl> m_beginChar = 0 ; <nl> m_endChar = 0 ; <nl> <nl> - m_addrlen = sizeof ( struct sockaddr ) ; <nl> + m_addrlen = sizeof ( m_cliaddr ) ; <nl> } <nl> <nl> CTCPServer : : CTCPClient : : CTCPClient ( const CTCPClient & client ) <nl> mmm a / xbmc / network / TCPServer . h <nl> ppp b / xbmc / network / TCPServer . h <nl> namespace JSONRPC <nl> void PushBuffer ( CTCPServer * host , const char * buffer , int length ) ; <nl> void Disconnect ( ) ; <nl> <nl> - int m_socket ; <nl> - struct sockaddr m_cliaddr ; <nl> - socklen_t m_addrlen ; <nl> + SOCKET m_socket ; <nl> + sockaddr_storage m_cliaddr ; <nl> + socklen_t m_addrlen ; <nl> CCriticalSection m_critSection ; <nl> <nl> private : <nl> | changed : publish an rfcomm like service over bluetooth for JSON - RPC api | xbmc/xbmc | 25cecfcdb5e494b011ea32967e3915f89087f147 | 2011-05-10T17:17:28Z |
mmm a / build / cocos2d - js - win32 . vc2013 . sln <nl> ppp b / build / cocos2d - js - win32 . vc2013 . sln <nl> Project ( " { 8BC9CEB8 - 8B4A - 11D0 - 8D11 - 00A0C91BC942 } " ) = " libjscocos2d " , " . . \ cocos \ sc <nl> EndProject <nl> Project ( " { 8BC9CEB8 - 8B4A - 11D0 - 8D11 - 00A0C91BC942 } " ) = " js - tests " , " . . \ tests \ js - tests \ project \ proj . win32 \ js - tests . vcxproj " , " { D0F06A44 - A245 - 4D13 - A498 - 0120C203B539 } " <nl> EndProject <nl> + Project ( " { 8BC9CEB8 - 8B4A - 11D0 - 8D11 - 00A0C91BC942 } " ) = " libbullet " , " . . \ external \ bullet \ proj . win32 \ libbullet . vcxproj " , " { 012DFF48 - A13F - 4F52 - B07B - F8B9D21CE95B } " <nl> + EndProject <nl> Global <nl> GlobalSection ( SolutionConfigurationPlatforms ) = preSolution <nl> Debug | ARM = Debug | ARM <nl> Global <nl> { D0F06A44 - A245 - 4D13 - A498 - 0120C203B539 } . Release | ARM . ActiveCfg = Release | Win32 <nl> { D0F06A44 - A245 - 4D13 - A498 - 0120C203B539 } . Release | Win32 . ActiveCfg = Release | Win32 <nl> { D0F06A44 - A245 - 4D13 - A498 - 0120C203B539 } . Release | Win32 . Build . 0 = Release | Win32 <nl> + { 012DFF48 - A13F - 4F52 - B07B - F8B9D21CE95B } . Debug | ARM . ActiveCfg = Debug | Win32 <nl> + { 012DFF48 - A13F - 4F52 - B07B - F8B9D21CE95B } . Debug | Win32 . ActiveCfg = Debug | Win32 <nl> + { 012DFF48 - A13F - 4F52 - B07B - F8B9D21CE95B } . Debug | Win32 . Build . 0 = Debug | Win32 <nl> + { 012DFF48 - A13F - 4F52 - B07B - F8B9D21CE95B } . Release | ARM . ActiveCfg = Release | Win32 <nl> + { 012DFF48 - A13F - 4F52 - B07B - F8B9D21CE95B } . Release | Win32 . ActiveCfg = Release | Win32 <nl> + { 012DFF48 - A13F - 4F52 - B07B - F8B9D21CE95B } . Release | Win32 . Build . 0 = Release | Win32 <nl> EndGlobalSection <nl> GlobalSection ( SolutionProperties ) = preSolution <nl> HideSolutionNode = FALSE <nl> Global <nl> GlobalSection ( NestedProjects ) = preSolution <nl> { B7C2A162 - DEC9 - 4418 - 972E - 240AB3CBFCAE } = { 92D54E36 - 7916 - 48EF - A951 - 224DD3B25442 } <nl> { 929480E7 - 23C0 - 4DF6 - 8456 - 096D71547116 } = { 92D54E36 - 7916 - 48EF - A951 - 224DD3B25442 } <nl> + { 012DFF48 - A13F - 4F52 - B07B - F8B9D21CE95B } = { 92D54E36 - 7916 - 48EF - A951 - 224DD3B25442 } <nl> EndGlobalSection <nl> GlobalSection ( DPCodeReviewSolutionGUID ) = preSolution <nl> DPCodeReviewSolutionGUID = { 00000000 - 0000 - 0000 - 0000 - 000000000000 } <nl> mmm a / cocos / 2d / libcocos2d . vcxproj <nl> ppp b / cocos / 2d / libcocos2d . vcxproj <nl> xcopy / Y / Q " $ ( ProjectDir ) . . \ . . \ external \ chipmunk \ prebuilt \ win32 \ release - lib \ * . * <nl> < ProjectReference Include = " . . \ . . \ external \ Box2D \ proj . win32 \ libbox2d . vcxproj " > <nl> < Project > { 929480e7 - 23c0 - 4df6 - 8456 - 096d71547116 } < / Project > <nl> < / ProjectReference > <nl> + < ProjectReference Include = " . . \ . . \ external \ bullet \ proj . win32 \ libbullet . vcxproj " > <nl> + < Project > { 012dff48 - a13f - 4f52 - b07b - f8b9d21ce95b } < / Project > <nl> + < / ProjectReference > <nl> < ProjectReference Include = " . . \ editor - support \ spine \ proj . win32 \ libSpine . vcxproj " > <nl> < Project > { b7c2a162 - dec9 - 4418 - 972e - 240ab3cbfcae } < / Project > <nl> < / ProjectReference > <nl> mmm a / cocos / scripting / js - bindings / CMakeLists . txt <nl> ppp b / cocos / scripting / js - bindings / CMakeLists . txt <nl> set ( JSBINDING_SRC <nl> auto / jsb_cocos2dx_ui_auto . cpp <nl> auto / jsb_cocos2dx_3d_auto . cpp <nl> auto / jsb_cocos2dx_3d_extension_auto . cpp <nl> + auto / jsb_cocos2dx_experimental . cpp <nl> manual / ScriptingCore . cpp <nl> manual / cocos2d_specifics . cpp <nl> manual / js_manual_conversions . cpp <nl> set ( JSBINDING_SRC <nl> manual / spine / jsb_cocos2dx_spine_manual . cpp <nl> manual / ui / jsb_cocos2dx_ui_manual . cpp <nl> manual / 3d / jsb_cocos2dx_3d_manual . cpp <nl> + manual / experimental / jsb_cocos2dx_experimental_manual . cpp <nl> $ { cocos_root } / cocos / storage / local - storage / LocalStorage . cpp <nl> ) <nl> <nl> mmm a / cocos / scripting / js - bindings / proj . android / Android . mk <nl> ppp b / cocos / scripting / js - bindings / proj . android / Android . mk <nl> LOCAL_SRC_FILES : = . . / auto / jsb_cocos2dx_3d_auto . cpp \ <nl> . . / auto / jsb_cocos2dx_studio_auto . cpp \ <nl> . . / auto / jsb_cocos2dx_builder_auto . cpp \ <nl> . . / auto / jsb_cocos2dx_ui_auto . cpp \ <nl> + . . / auto / jsb_cocos2dx_experimental . cpp \ <nl> . . / manual / ScriptingCore . cpp \ \ <nl> . . / manual / cocos2d_specifics . cpp \ <nl> . . / manual / js_manual_conversions . cpp \ <nl> LOCAL_SRC_FILES : = . . / auto / jsb_cocos2dx_3d_auto . cpp \ <nl> . . / manual / network / jsb_websocket . cpp \ <nl> . . / manual / network / XMLHTTPRequest . cpp \ <nl> . . / manual / spine / jsb_cocos2dx_spine_manual . cpp \ <nl> - . . / manual / ui / jsb_cocos2dx_ui_manual . cpp <nl> + . . / manual / ui / jsb_cocos2dx_ui_manual . cpp \ <nl> + . . / manual / experimental / jsb_cocos2dx_experimental_manual . cpp <nl> <nl> <nl> LOCAL_CFLAGS : = - DCOCOS2D_JAVASCRIPT <nl> mmm a / cocos / scripting / js - bindings / proj . win32 / libjscocos2d . vcxproj <nl> ppp b / cocos / scripting / js - bindings / proj . win32 / libjscocos2d . vcxproj <nl> <nl> < ClCompile Include = " . . \ auto \ jsb_cocos2dx_3d_extension_auto . cpp " / > <nl> < ClCompile Include = " . . \ auto \ jsb_cocos2dx_auto . cpp " / > <nl> < ClCompile Include = " . . \ auto \ jsb_cocos2dx_builder_auto . cpp " / > <nl> + < ClCompile Include = " . . \ auto \ jsb_cocos2dx_experimental . cpp " / > <nl> < ClCompile Include = " . . \ auto \ jsb_cocos2dx_extension_auto . cpp " / > <nl> < ClCompile Include = " . . \ auto \ jsb_cocos2dx_spine_auto . cpp " / > <nl> < ClCompile Include = " . . \ auto \ jsb_cocos2dx_studio_auto . cpp " / > <nl> <nl> < ClCompile Include = " . . \ manual \ cocosbuilder \ js_bindings_ccbreader . cpp " / > <nl> < ClCompile Include = " . . \ manual \ cocostudio \ jsb_cocos2dx_studio_conversions . cpp " / > <nl> < ClCompile Include = " . . \ manual \ cocostudio \ jsb_cocos2dx_studio_manual . cpp " / > <nl> + < ClCompile Include = " . . \ manual \ experimental \ jsb_cocos2dx_experimental_manual . cpp " / > <nl> < ClCompile Include = " . . \ manual \ extension \ jsb_cocos2dx_extension_manual . cpp " / > <nl> < ClCompile Include = " . . \ manual \ jsb_event_dispatcher_manual . cpp " / > <nl> < ClCompile Include = " . . \ manual \ jsb_opengl_functions . cpp " / > <nl> <nl> < ClInclude Include = " . . \ auto \ jsb_cocos2dx_3d_extension_auto . hpp " / > <nl> < ClInclude Include = " . . \ auto \ jsb_cocos2dx_auto . hpp " / > <nl> < ClInclude Include = " . . \ auto \ jsb_cocos2dx_builder_auto . hpp " / > <nl> + < ClInclude Include = " . . \ auto \ jsb_cocos2dx_experimental . hpp " / > <nl> < ClInclude Include = " . . \ auto \ jsb_cocos2dx_extension_auto . hpp " / > <nl> < ClInclude Include = " . . \ auto \ jsb_cocos2dx_spine_auto . hpp " / > <nl> < ClInclude Include = " . . \ auto \ jsb_cocos2dx_studio_auto . hpp " / > <nl> <nl> < ClInclude Include = " . . \ manual \ cocosbuilder \ js_bindings_ccbreader . h " / > <nl> < ClInclude Include = " . . \ manual \ cocostudio \ jsb_cocos2dx_studio_conversions . h " / > <nl> < ClInclude Include = " . . \ manual \ cocostudio \ jsb_cocos2dx_studio_manual . h " / > <nl> + < ClInclude Include = " . . \ manual \ experimental \ jsb_cocos2dx_experimental_manual . h " / > <nl> < ClInclude Include = " . . \ manual \ extension \ jsb_cocos2dx_extension_manual . h " / > <nl> < ClInclude Include = " . . \ manual \ jsb_event_dispatcher_manual . h " / > <nl> < ClInclude Include = " . . \ manual \ jsb_helper . h " / > <nl> mmm a / cocos / scripting / js - bindings / proj . win32 / libjscocos2d . vcxproj . filters <nl> ppp b / cocos / scripting / js - bindings / proj . win32 / libjscocos2d . vcxproj . filters <nl> <nl> < Filter Include = " manual \ 3d " > <nl> < UniqueIdentifier > { 3d1a97d2 - 47a9 - 4fc2 - 8458 - a58c4f1fc111 } < / UniqueIdentifier > <nl> < / Filter > <nl> + < Filter Include = " manual \ experimental " > <nl> + < UniqueIdentifier > { 9f37ec66 - 1e00 - 4015 - baa6 - dabdf755f0ff } < / UniqueIdentifier > <nl> + < / Filter > <nl> < / ItemGroup > <nl> < ItemGroup > <nl> < ClCompile Include = " . . \ auto \ jsb_cocos2dx_auto . cpp " > <nl> <nl> < ClCompile Include = " . . \ manual \ 3d \ jsb_cocos2dx_3d_manual . cpp " > <nl> < Filter > manual \ 3d < / Filter > <nl> < / ClCompile > <nl> + < ClCompile Include = " . . \ auto \ jsb_cocos2dx_experimental . cpp " > <nl> + < Filter > auto < / Filter > <nl> + < / ClCompile > <nl> + < ClCompile Include = " . . \ manual \ experimental \ jsb_cocos2dx_experimental_manual . cpp " > <nl> + < Filter > manual \ experimental < / Filter > <nl> + < / ClCompile > <nl> < / ItemGroup > <nl> < ItemGroup > <nl> < ClInclude Include = " . . \ auto \ jsb_cocos2dx_auto . hpp " > <nl> <nl> < ClInclude Include = " . . \ manual \ 3d \ jsb_cocos2dx_3d_manual . h " > <nl> < Filter > manual \ 3d < / Filter > <nl> < / ClInclude > <nl> + < ClInclude Include = " . . \ auto \ jsb_cocos2dx_experimental . hpp " > <nl> + < Filter > auto < / Filter > <nl> + < / ClInclude > <nl> + < ClInclude Include = " . . \ manual \ experimental \ jsb_cocos2dx_experimental_manual . h " > <nl> + < Filter > manual \ experimental < / Filter > <nl> + < / ClInclude > <nl> < / ItemGroup > <nl> < / Project > <nl> \ No newline at end of file <nl> mmm a / cocos / scripting / js - bindings / proj . win8 . 1 - universal / libjscocos2d / libjscocos2d . Shared / libjscocos2d . Shared . vcxitems <nl> ppp b / cocos / scripting / js - bindings / proj . win8 . 1 - universal / libjscocos2d / libjscocos2d . Shared / libjscocos2d . Shared . vcxitems <nl> <nl> < ClInclude Include = " $ ( MSBuildThisFileDirectory ) . . \ . . \ . . \ auto \ jsb_cocos2dx_3d_extension_auto . hpp " / > <nl> < ClInclude Include = " $ ( MSBuildThisFileDirectory ) . . \ . . \ . . \ auto \ jsb_cocos2dx_auto . hpp " / > <nl> < ClInclude Include = " $ ( MSBuildThisFileDirectory ) . . \ . . \ . . \ auto \ jsb_cocos2dx_builder_auto . hpp " / > <nl> + < ClInclude Include = " $ ( MSBuildThisFileDirectory ) . . \ . . \ . . \ auto \ jsb_cocos2dx_experimental . hpp " / > <nl> < ClInclude Include = " $ ( MSBuildThisFileDirectory ) . . \ . . \ . . \ auto \ jsb_cocos2dx_extension_auto . hpp " / > <nl> < ClInclude Include = " $ ( MSBuildThisFileDirectory ) . . \ . . \ . . \ auto \ jsb_cocos2dx_spine_auto . hpp " / > <nl> < ClInclude Include = " $ ( MSBuildThisFileDirectory ) . . \ . . \ . . \ auto \ jsb_cocos2dx_studio_auto . hpp " / > <nl> <nl> < ClInclude Include = " $ ( MSBuildThisFileDirectory ) . . \ . . \ . . \ manual \ cocosbuilder \ js_bindings_ccbreader . h " / > <nl> < ClInclude Include = " $ ( MSBuildThisFileDirectory ) . . \ . . \ . . \ manual \ cocostudio \ jsb_cocos2dx_studio_conversions . h " / > <nl> < ClInclude Include = " $ ( MSBuildThisFileDirectory ) . . \ . . \ . . \ manual \ cocostudio \ jsb_cocos2dx_studio_manual . h " / > <nl> + < ClInclude Include = " $ ( MSBuildThisFileDirectory ) . . \ . . \ . . \ manual \ experimental \ jsb_cocos2dx_experimental_manual . h " / > <nl> < ClInclude Include = " $ ( MSBuildThisFileDirectory ) . . \ . . \ . . \ manual \ extension \ jsb_cocos2dx_extension_manual . h " / > <nl> < ClInclude Include = " $ ( MSBuildThisFileDirectory ) . . \ . . \ . . \ manual \ jsb_event_dispatcher_manual . h " / > <nl> < ClInclude Include = " $ ( MSBuildThisFileDirectory ) . . \ . . \ . . \ manual \ jsb_helper . h " / > <nl> <nl> < ClCompile Include = " $ ( MSBuildThisFileDirectory ) . . \ . . \ . . \ auto \ jsb_cocos2dx_3d_extension_auto . cpp " / > <nl> < ClCompile Include = " $ ( MSBuildThisFileDirectory ) . . \ . . \ . . \ auto \ jsb_cocos2dx_auto . cpp " / > <nl> < ClCompile Include = " $ ( MSBuildThisFileDirectory ) . . \ . . \ . . \ auto \ jsb_cocos2dx_builder_auto . cpp " / > <nl> + < ClCompile Include = " $ ( MSBuildThisFileDirectory ) . . \ . . \ . . \ auto \ jsb_cocos2dx_experimental . cpp " / > <nl> < ClCompile Include = " $ ( MSBuildThisFileDirectory ) . . \ . . \ . . \ auto \ jsb_cocos2dx_extension_auto . cpp " / > <nl> < ClCompile Include = " $ ( MSBuildThisFileDirectory ) . . \ . . \ . . \ auto \ jsb_cocos2dx_spine_auto . cpp " / > <nl> < ClCompile Include = " $ ( MSBuildThisFileDirectory ) . . \ . . \ . . \ auto \ jsb_cocos2dx_studio_auto . cpp " / > <nl> <nl> < ClCompile Include = " $ ( MSBuildThisFileDirectory ) . . \ . . \ . . \ manual \ cocosbuilder \ js_bindings_ccbreader . cpp " / > <nl> < ClCompile Include = " $ ( MSBuildThisFileDirectory ) . . \ . . \ . . \ manual \ cocostudio \ jsb_cocos2dx_studio_conversions . cpp " / > <nl> < ClCompile Include = " $ ( MSBuildThisFileDirectory ) . . \ . . \ . . \ manual \ cocostudio \ jsb_cocos2dx_studio_manual . cpp " / > <nl> + < ClCompile Include = " $ ( MSBuildThisFileDirectory ) . . \ . . \ . . \ manual \ experimental \ jsb_cocos2dx_experimental_manual . cpp " / > <nl> < ClCompile Include = " $ ( MSBuildThisFileDirectory ) . . \ . . \ . . \ manual \ extension \ jsb_cocos2dx_extension_manual . cpp " / > <nl> < ClCompile Include = " $ ( MSBuildThisFileDirectory ) . . \ . . \ . . \ manual \ jsb_event_dispatcher_manual . cpp " / > <nl> < ClCompile Include = " $ ( MSBuildThisFileDirectory ) . . \ . . \ . . \ manual \ jsb_opengl_functions . cpp " / > <nl> <nl> < None Include = " $ ( MSBuildThisFileDirectory ) . . \ . . \ . . \ auto \ api \ jsb_cocos2dx_studio_auto_api . js " / > <nl> < None Include = " $ ( MSBuildThisFileDirectory ) . . \ . . \ . . \ auto \ api \ jsb_cocos2dx_ui_auto_api . js " / > <nl> < / ItemGroup > <nl> - < / Project > <nl> + < / Project > <nl> \ No newline at end of file <nl> mmm a / cocos / scripting / js - bindings / proj . win8 . 1 - universal / libjscocos2d / libjscocos2d . Shared / libjscocos2d . Shared . vcxitems . filters <nl> ppp b / cocos / scripting / js - bindings / proj . win8 . 1 - universal / libjscocos2d / libjscocos2d . Shared / libjscocos2d . Shared . vcxitems . filters <nl> <nl> < ClInclude Include = " $ ( MSBuildThisFileDirectory ) . . \ . . \ . . \ auto \ jsb_cocos2dx_3d_extension_auto . hpp " > <nl> < Filter > auto < / Filter > <nl> < / ClInclude > <nl> + < ClInclude Include = " $ ( MSBuildThisFileDirectory ) . . \ . . \ . . \ auto \ jsb_cocos2dx_experimental . hpp " > <nl> + < Filter > auto < / Filter > <nl> + < / ClInclude > <nl> + < ClInclude Include = " $ ( MSBuildThisFileDirectory ) . . \ . . \ . . \ manual \ experimental \ jsb_cocos2dx_experimental_manual . h " > <nl> + < Filter > manual \ experimental < / Filter > <nl> + < / ClInclude > <nl> < / ItemGroup > <nl> < ItemGroup > <nl> < ClCompile Include = " $ ( MSBuildThisFileDirectory ) . . \ . . \ . . \ auto \ jsb_cocos2dx_3d_auto . cpp " > <nl> <nl> < ClCompile Include = " $ ( MSBuildThisFileDirectory ) . . \ . . \ . . \ auto \ jsb_cocos2dx_3d_extension_auto . cpp " > <nl> < Filter > auto < / Filter > <nl> < / ClCompile > <nl> + < ClCompile Include = " $ ( MSBuildThisFileDirectory ) . . \ . . \ . . \ auto \ jsb_cocos2dx_experimental . cpp " > <nl> + < Filter > auto < / Filter > <nl> + < / ClCompile > <nl> + < ClCompile Include = " $ ( MSBuildThisFileDirectory ) . . \ . . \ . . \ manual \ experimental \ jsb_cocos2dx_experimental_manual . cpp " > <nl> + < Filter > manual \ experimental < / Filter > <nl> + < / ClCompile > <nl> < / ItemGroup > <nl> < ItemGroup > <nl> < Filter Include = " auto " > <nl> <nl> < Filter Include = " manual \ ui " > <nl> < UniqueIdentifier > { d5fa188f - db09 - 4c4b - a366 - 682bf76fcea2 } < / UniqueIdentifier > <nl> < / Filter > <nl> + < Filter Include = " manual \ experimental " > <nl> + < UniqueIdentifier > { ddd3651e - c6e7 - 4dea - 80c1 - a0a3dc51a740 } < / UniqueIdentifier > <nl> + < / Filter > <nl> < / ItemGroup > <nl> < ItemGroup > <nl> < None Include = " $ ( MSBuildThisFileDirectory ) . . \ . . \ . . \ auto \ api \ jsb_cocos2dx_3d_auto_api . js " > <nl> mmm a / tools / tojs / cocos2dx . ini <nl> ppp b / tools / tojs / cocos2dx . ini <nl> skip = Node : : [ ^ setPosition $ setGLServerState description getUserObject . * UserDat <nl> Component : : [ serialize ] , <nl> EventListenerCustom : : [ init ] , <nl> EventListener : : [ init ] , <nl> - Scene : : [ getCameras getLights initWithPhysics createWithPhysics getPhysicsWorld getPhysics3DWorld ] , <nl> + Scene : : [ getCameras getLights initWithPhysics createWithPhysics getPhysicsWorld getPhysics3DWorld setPhysics3DDebugCamera ] , <nl> Animate3D : : [ * ] , <nl> Sprite3D : : [ * ] , <nl> AttachNode : : [ * ] , <nl> | add new files to project | cocos2d/cocos2d-x | 7a4ca3fe6a4b655e47600dd48195456a136e0887 | 2015-05-12T02:40:51Z |
mmm a / make_unique . h <nl> ppp b / make_unique . h <nl> template < typename T , typename . . . Args > <nl> std : : unique_ptr < T > make_unique ( Args & & . . . args ) { <nl> return std : : unique_ptr < T > ( new T ( std : : forward < Args > ( args ) . . . ) ) ; <nl> } <nl> + <nl> + template < typename T , typename Deleter , typename . . . Args > <nl> + std : : unique_ptr < T , Deleter > make_unique ( Args & & . . . args ) { <nl> + return std : : unique_ptr < T , Deleter > ( new T ( std : : forward < Args > ( args ) . . . ) ) ; <nl> + } <nl> } <nl> mmm a / thirdparty / libart / src / art - inl . h <nl> ppp b / thirdparty / libart / src / art - inl . h <nl> <nl> - # include " watchman_log . h " <nl> + # include " make_unique . h " <nl> <nl> # ifdef __SSE__ <nl> # include < emmintrin . h > <nl> art_tree < ValueType > : : Node : : Node ( Node_type type , const Node & other ) <nl> template < typename ValueType > <nl> art_tree < ValueType > : : Node4 : : Node4 ( ) : Node ( NODE4 ) { <nl> memset ( keys , 0 , sizeof ( keys ) ) ; <nl> - children . fill ( nullptr ) ; <nl> } <nl> <nl> template < typename ValueType > <nl> art_tree < ValueType > : : Node4 : : Node4 ( Node16 & & n16 ) : Node ( NODE4 , n16 ) { <nl> memset ( keys , 0 , sizeof ( keys ) ) ; <nl> - children . fill ( nullptr ) ; <nl> memcpy ( keys , n16 . keys , n16 . num_children * sizeof ( keys [ 0 ] ) ) ; <nl> std : : move ( <nl> n16 . children . begin ( ) , <nl> art_tree < ValueType > : : Node4 : : Node4 ( Node16 & & n16 ) : Node ( NODE4 , n16 ) { <nl> n16 . num_children = 0 ; <nl> } <nl> <nl> - template < typename ValueType > <nl> - art_tree < ValueType > : : Node4 : : ~ Node4 ( ) { <nl> - int i ; <nl> - for ( i = 0 ; i < this - > num_children ; i + + ) { <nl> - Deleter ( ) ( children [ i ] ) ; <nl> - } <nl> - } <nl> - <nl> template < typename ValueType > <nl> void art_tree < ValueType > : : Node4 : : addChild ( <nl> - Node * * ref , <nl> + NodePtr & ref , <nl> unsigned char c , <nl> - Node * child ) { <nl> + NodePtr & & child ) { <nl> if ( this - > num_children < 4 ) { <nl> int idx ; <nl> for ( idx = 0 ; idx < this - > num_children ; idx + + ) { <nl> void art_tree < ValueType > : : Node4 : : addChild ( <nl> <nl> / / Insert element <nl> keys [ idx ] = c ; <nl> - children [ idx ] = child ; <nl> + children [ idx ] = std : : move ( child ) ; <nl> this - > num_children + + ; <nl> <nl> } else { <nl> - auto new_node = new Node16 ( std : : move ( * this ) ) ; <nl> - * ref = new_node ; <nl> - delete this ; <nl> - new_node - > addChild ( ref , c , child ) ; <nl> + ref = watchman : : make_unique < Node16 , Deleter > ( std : : move ( * this ) ) ; <nl> + ref - > addChild ( ref , c , std : : move ( child ) ) ; <nl> } <nl> } <nl> <nl> template < typename ValueType > <nl> - typename art_tree < ValueType > : : Node * * art_tree < ValueType > : : Node4 : : findChild ( <nl> + typename art_tree < ValueType > : : NodePtr * art_tree < ValueType > : : Node4 : : findChild ( <nl> unsigned char c ) { <nl> int i ; <nl> for ( i = 0 ; i < this - > num_children ; i + + ) { <nl> typename art_tree < ValueType > : : Node * * art_tree < ValueType > : : Node4 : : findChild ( <nl> } <nl> <nl> template < typename ValueType > <nl> - void art_tree < ValueType > : : Node4 : : removeChild ( <nl> - Node * * ref , <nl> + typename art_tree < ValueType > : : NodePtr art_tree < ValueType > : : Node4 : : removeChild ( <nl> + NodePtr & ref , <nl> unsigned char , <nl> - Node * * l ) { <nl> + NodePtr * l ) { <nl> auto pos = l - children . data ( ) ; <nl> memmove ( keys + pos , keys + pos + 1 , this - > num_children - 1 - pos ) ; <nl> <nl> + NodePtr result = std : : move ( children [ pos ] ) ; <nl> + <nl> std : : move ( <nl> children . begin ( ) + pos + 1 , <nl> children . begin ( ) + this - > num_children , <nl> void art_tree < ValueType > : : Node4 : : removeChild ( <nl> <nl> / / Remove nodes with only a single child <nl> if ( this - > num_children = = 1 ) { <nl> - auto child = children [ 0 ] ; <nl> + auto child = children [ 0 ] . get ( ) ; <nl> + <nl> if ( ! IS_LEAF ( child ) ) { <nl> / / Concatenate the prefixes <nl> auto prefix = this - > partial_len ; <nl> void art_tree < ValueType > : : Node4 : : removeChild ( <nl> child - > partial , this - > partial , std : : min ( prefix , ART_MAX_PREFIX_LEN ) ) ; <nl> child - > partial_len + = this - > partial_len + 1 ; <nl> } <nl> - * ref = child ; <nl> - this - > num_children = 0 ; <nl> - delete this ; <nl> + <nl> + ref = std : : move ( children [ 0 ] ) ; <nl> } <nl> + <nl> + return result ; <nl> } <nl> <nl> / / mmmmmmmmmmmmmmmmmmmmm Node16 <nl> void art_tree < ValueType > : : Node4 : : removeChild ( <nl> template < typename ValueType > <nl> art_tree < ValueType > : : Node16 : : Node16 ( ) : Node ( NODE16 ) { <nl> memset ( keys , 0 , sizeof ( keys ) ) ; <nl> - children . fill ( nullptr ) ; <nl> } <nl> <nl> template < typename ValueType > <nl> art_tree < ValueType > : : Node16 : : Node16 ( Node4 & & n4 ) : Node ( NODE16 , n4 ) { <nl> - children . fill ( nullptr ) ; <nl> memset ( keys , 0 , sizeof ( keys ) ) ; <nl> <nl> std : : move ( <nl> template < typename ValueType > <nl> art_tree < ValueType > : : Node16 : : Node16 ( Node48 & & n48 ) : Node ( NODE16 , n48 ) { <nl> int i , child = 0 ; <nl> memset ( keys , 0 , sizeof ( keys ) ) ; <nl> - children . fill ( nullptr ) ; <nl> <nl> for ( i = 0 ; i < 256 ; i + + ) { <nl> auto pos = n48 . keys [ i ] ; <nl> if ( pos ) { <nl> keys [ child ] = i ; <nl> - children [ child ] = n48 . children [ pos - 1 ] ; <nl> + children [ child ] = std : : move ( n48 . children [ pos - 1 ] ) ; <nl> child + + ; <nl> } <nl> } <nl> art_tree < ValueType > : : Node16 : : Node16 ( Node48 & & n48 ) : Node ( NODE16 , n48 ) { <nl> n48 . num_children = 0 ; <nl> } <nl> <nl> - template < typename ValueType > <nl> - art_tree < ValueType > : : Node16 : : ~ Node16 ( ) { <nl> - int i ; <nl> - for ( i = 0 ; i < this - > num_children ; i + + ) { <nl> - Deleter ( ) ( children [ i ] ) ; <nl> - } <nl> - } <nl> - <nl> template < typename ValueType > <nl> void art_tree < ValueType > : : Node16 : : addChild ( <nl> - Node * * ref , <nl> + NodePtr & ref , <nl> unsigned char c , <nl> - Node * child ) { <nl> + NodePtr & & child ) { <nl> if ( this - > num_children < 16 ) { <nl> unsigned idx ; <nl> # ifdef __SSE__ <nl> void art_tree < ValueType > : : Node16 : : addChild ( <nl> <nl> / / Set the child <nl> keys [ idx ] = c ; <nl> - children [ idx ] = child ; <nl> + children [ idx ] = std : : move ( child ) ; <nl> this - > num_children + + ; <nl> <nl> } else { <nl> - auto new_node = new Node48 ( std : : move ( * this ) ) ; <nl> - * ref = new_node ; <nl> - delete this ; <nl> - new_node - > addChild ( ref , c , child ) ; <nl> + ref = watchman : : make_unique < Node48 , Deleter > ( std : : move ( * this ) ) ; <nl> + ref - > addChild ( ref , c , std : : move ( child ) ) ; <nl> } <nl> } <nl> <nl> template < typename ValueType > <nl> - typename art_tree < ValueType > : : Node * * art_tree < ValueType > : : Node16 : : findChild ( <nl> + typename art_tree < ValueType > : : NodePtr * art_tree < ValueType > : : Node16 : : findChild ( <nl> unsigned char c ) { <nl> # ifdef __SSE__ <nl> __m128i cmp ; <nl> typename art_tree < ValueType > : : Node * * art_tree < ValueType > : : Node16 : : findChild ( <nl> } <nl> <nl> template < typename ValueType > <nl> - void art_tree < ValueType > : : Node16 : : removeChild ( <nl> - Node * * ref , <nl> + typename art_tree < ValueType > : : NodePtr art_tree < ValueType > : : Node16 : : removeChild ( <nl> + NodePtr & ref , <nl> unsigned char , <nl> - Node * * l ) { <nl> + NodePtr * l ) { <nl> auto pos = l - children . data ( ) ; <nl> memmove ( keys + pos , keys + pos + 1 , this - > num_children - 1 - pos ) ; <nl> <nl> + NodePtr result = std : : move ( children [ pos ] ) ; <nl> + <nl> std : : move ( <nl> children . begin ( ) + pos + 1 , <nl> children . begin ( ) + this - > num_children , <nl> void art_tree < ValueType > : : Node16 : : removeChild ( <nl> this - > num_children - - ; <nl> <nl> if ( this - > num_children = = 3 ) { <nl> - auto new_node = new Node4 ( std : : move ( * this ) ) ; <nl> - * ref = new_node ; <nl> - delete this ; <nl> + ref = watchman : : make_unique < Node4 , Deleter > ( std : : move ( * this ) ) ; <nl> } <nl> + <nl> + return result ; <nl> } <nl> <nl> / / mmmmmmmmmmmmmmmmmmmmm Node48 <nl> void art_tree < ValueType > : : Node16 : : removeChild ( <nl> template < typename ValueType > <nl> art_tree < ValueType > : : Node48 : : Node48 ( ) : Node ( NODE48 ) { <nl> memset ( keys , 0 , sizeof ( keys ) ) ; <nl> - children . fill ( nullptr ) ; <nl> } <nl> <nl> template < typename ValueType > <nl> art_tree < ValueType > : : Node48 : : Node48 ( Node16 & & n16 ) : Node ( NODE48 , n16 ) { <nl> int i ; <nl> memset ( keys , 0 , sizeof ( keys ) ) ; <nl> - children . fill ( nullptr ) ; <nl> <nl> std : : move ( <nl> n16 . children . begin ( ) , <nl> art_tree < ValueType > : : Node48 : : Node48 ( Node256 & & n256 ) <nl> : art_tree : : Node ( NODE48 , n256 ) { <nl> int i , pos = 0 ; <nl> memset ( keys , 0 , sizeof ( keys ) ) ; <nl> - children . fill ( nullptr ) ; <nl> <nl> for ( i = 0 ; i < 256 ; i + + ) { <nl> if ( n256 . children [ i ] ) { <nl> - children [ pos ] = n256 . children [ i ] ; <nl> + children [ pos ] = std : : move ( n256 . children [ i ] ) ; <nl> keys [ i ] = pos + 1 ; <nl> pos + + ; <nl> } <nl> art_tree < ValueType > : : Node48 : : Node48 ( Node256 & & n256 ) <nl> n256 . num_children = 0 ; <nl> } <nl> <nl> - template < typename ValueType > <nl> - art_tree < ValueType > : : Node48 : : ~ Node48 ( ) { <nl> - int i ; <nl> - for ( i = 0 ; i < this - > num_children ; i + + ) { <nl> - Deleter ( ) ( children [ i ] ) ; <nl> - } <nl> - } <nl> - <nl> template < typename ValueType > <nl> void art_tree < ValueType > : : Node48 : : addChild ( <nl> - Node * * ref , <nl> + NodePtr & ref , <nl> unsigned char c , <nl> - Node * child ) { <nl> + NodePtr & & child ) { <nl> if ( this - > num_children < 48 ) { <nl> int pos = 0 ; <nl> while ( children [ pos ] ) { <nl> pos + + ; <nl> } <nl> - children [ pos ] = child ; <nl> + children [ pos ] = std : : move ( child ) ; <nl> keys [ c ] = pos + 1 ; <nl> this - > num_children + + ; <nl> } else { <nl> - auto new_node = new Node256 ( std : : move ( * this ) ) ; <nl> - * ref = new_node ; <nl> - delete this ; <nl> - new_node - > addChild ( ref , c , child ) ; <nl> + ref = watchman : : make_unique < Node256 , Deleter > ( std : : move ( * this ) ) ; <nl> + ref - > addChild ( ref , c , std : : move ( child ) ) ; <nl> } <nl> } <nl> <nl> template < typename ValueType > <nl> - typename art_tree < ValueType > : : Node * * art_tree < ValueType > : : Node48 : : findChild ( <nl> + typename art_tree < ValueType > : : NodePtr * art_tree < ValueType > : : Node48 : : findChild ( <nl> unsigned char c ) { <nl> auto i = keys [ c ] ; <nl> if ( i ) { <nl> typename art_tree < ValueType > : : Node * * art_tree < ValueType > : : Node48 : : findChild ( <nl> } <nl> <nl> template < typename ValueType > <nl> - void art_tree < ValueType > : : Node48 : : removeChild ( <nl> - Node * * ref , <nl> + typename art_tree < ValueType > : : NodePtr art_tree < ValueType > : : Node48 : : removeChild ( <nl> + NodePtr & ref , <nl> unsigned char c , <nl> - Node * * ) { <nl> + NodePtr * ) { <nl> int pos = keys [ c ] ; <nl> keys [ c ] = 0 ; <nl> - children [ pos - 1 ] = nullptr ; <nl> + <nl> + NodePtr result = std : : move ( children [ pos - 1 ] ) ; <nl> this - > num_children - - ; <nl> <nl> if ( this - > num_children = = 12 ) { <nl> - auto new_node = new Node16 ( std : : move ( * this ) ) ; <nl> - * ref = new_node ; <nl> - delete this ; <nl> + ref = watchman : : make_unique < Node16 , Deleter > ( std : : move ( * this ) ) ; <nl> } <nl> + <nl> + return result ; <nl> } <nl> <nl> / / mmmmmmmmmmmmmmmmmmmmm Node256 <nl> <nl> - template < typename ValueType > <nl> - art_tree < ValueType > : : Node256 : : Node256 ( ) : Node ( NODE256 ) { <nl> - children . fill ( nullptr ) ; <nl> - } <nl> - <nl> template < typename ValueType > <nl> art_tree < ValueType > : : Node256 : : Node256 ( Node48 & & n48 ) : Node ( NODE256 , n48 ) { <nl> int i ; <nl> - children . fill ( nullptr ) ; <nl> for ( i = 0 ; i < 256 ; i + + ) { <nl> if ( n48 . keys [ i ] ) { <nl> - children [ i ] = n48 . children [ n48 . keys [ i ] - 1 ] ; <nl> + children [ i ] = std : : move ( n48 . children [ n48 . keys [ i ] - 1 ] ) ; <nl> } <nl> } <nl> <nl> n48 . num_children = 0 ; <nl> } <nl> <nl> - template < typename ValueType > <nl> - art_tree < ValueType > : : Node256 : : ~ Node256 ( ) { <nl> - int i ; <nl> - for ( i = 0 ; this - > num_children > 0 & & i < 256 ; i + + ) { <nl> - if ( children [ i ] ) { <nl> - Deleter ( ) ( children [ i ] ) ; <nl> - } <nl> - } <nl> - } <nl> - <nl> template < typename ValueType > <nl> void art_tree < ValueType > : : Node256 : : addChild ( <nl> - Node * * , <nl> + NodePtr & , <nl> unsigned char c , <nl> - Node * child ) { <nl> + NodePtr & & child ) { <nl> this - > num_children + + ; <nl> - children [ c ] = child ; <nl> + children [ c ] = std : : move ( child ) ; <nl> } <nl> <nl> template < typename ValueType > <nl> - typename art_tree < ValueType > : : Node * * art_tree < ValueType > : : Node256 : : findChild ( <nl> + typename art_tree < ValueType > : : NodePtr * art_tree < ValueType > : : Node256 : : findChild ( <nl> unsigned char c ) { <nl> if ( children [ c ] ) { <nl> return & children [ c ] ; <nl> typename art_tree < ValueType > : : Node * * art_tree < ValueType > : : Node256 : : findChild ( <nl> } <nl> <nl> template < typename ValueType > <nl> - void art_tree < ValueType > : : Node256 : : removeChild ( <nl> - Node * * ref , <nl> + typename art_tree < ValueType > : : NodePtr art_tree < ValueType > : : Node256 : : removeChild ( <nl> + NodePtr & ref , <nl> unsigned char c , <nl> - Node * * ) { <nl> - children [ c ] = NULL ; <nl> + NodePtr * ) { <nl> + NodePtr result = std : : move ( children [ c ] ) ; <nl> this - > num_children - - ; <nl> <nl> / / Resize to a node48 on underflow , not immediately to prevent <nl> / / trashing if we sit on the 48 / 49 boundary <nl> if ( this - > num_children = = 37 ) { <nl> - auto new_node = new Node48 ( std : : move ( * this ) ) ; <nl> - * ref = new_node ; <nl> - delete this ; <nl> + ref = watchman : : make_unique < Node48 , Deleter > ( std : : move ( * this ) ) ; <nl> } <nl> + <nl> + return result ; <nl> } <nl> <nl> / * * <nl> art_tree < ValueType > : : ~ art_tree ( ) { <nl> <nl> template < typename ValueType > <nl> void art_tree < ValueType > : : clear ( ) { <nl> - Deleter ( ) ( root_ ) ; <nl> - root_ = nullptr ; <nl> + root_ . reset ( ) ; <nl> size_ = 0 ; <nl> } <nl> <nl> template < typename ValueType > <nl> ValueType * art_tree < ValueType > : : search ( <nl> const unsigned char * key , <nl> uint32_t key_len ) const { <nl> - auto n = root_ ; <nl> + auto n = root_ . get ( ) ; <nl> uint32_t depth = 0 ; <nl> while ( n ) { <nl> / / Might be a leaf <nl> ValueType * art_tree < ValueType > : : search ( <nl> <nl> / / Recursively search <nl> auto child = n - > findChild ( keyAt ( key , key_len , depth ) ) ; <nl> - n = ( child ) ? * child : NULL ; <nl> + n = child ? child - > get ( ) : nullptr ; <nl> depth + + ; <nl> } <nl> return nullptr ; <nl> template < typename ValueType > <nl> typename art_tree < ValueType > : : Leaf * art_tree < ValueType > : : longestMatch ( <nl> const unsigned char * key , <nl> uint32_t key_len ) const { <nl> - auto n = root_ ; <nl> + auto n = root_ . get ( ) ; <nl> uint32_t depth = 0 ; <nl> while ( n ) { <nl> / / Might be a leaf <nl> typename art_tree < ValueType > : : Leaf * art_tree < ValueType > : : longestMatch ( <nl> <nl> / / Recursively search <nl> auto child = n - > findChild ( keyAt ( key , key_len , depth ) ) ; <nl> - n = ( child ) ? * child : nullptr ; <nl> + n = child ? child - > get ( ) : nullptr ; <nl> depth + + ; <nl> } <nl> return nullptr ; <nl> typename art_tree < ValueType > : : Leaf * art_tree < ValueType > : : Node : : minimum ( ) const { <nl> <nl> switch ( p . n - > type ) { <nl> case NODE4 : <nl> - p . n = p . n4 - > children [ 0 ] ; <nl> + p . n = p . n4 - > children [ 0 ] . get ( ) ; <nl> break ; <nl> case NODE16 : <nl> - p . n = p . n16 - > children [ 0 ] ; <nl> + p . n = p . n16 - > children [ 0 ] . get ( ) ; <nl> break ; <nl> case NODE48 : <nl> idx = 0 ; <nl> typename art_tree < ValueType > : : Leaf * art_tree < ValueType > : : Node : : minimum ( ) const { <nl> idx + + ; <nl> } <nl> idx = p . n48 - > keys [ idx ] - 1 ; <nl> - p . n = p . n48 - > children [ idx ] ; <nl> + p . n = p . n48 - > children [ idx ] . get ( ) ; <nl> break ; <nl> case NODE256 : <nl> idx = 0 ; <nl> while ( ! p . n256 - > children [ idx ] ) { <nl> idx + + ; <nl> } <nl> - p . n = p . n256 - > children [ idx ] ; <nl> + p . n = p . n256 - > children [ idx ] . get ( ) ; <nl> break ; <nl> default : <nl> abort ( ) ; <nl> typename art_tree < ValueType > : : Leaf * art_tree < ValueType > : : Node : : maximum ( ) const { <nl> <nl> switch ( p . n - > type ) { <nl> case NODE4 : <nl> - p . n = p . n4 - > children [ p . n - > num_children - 1 ] ; <nl> + p . n = p . n4 - > children [ p . n - > num_children - 1 ] . get ( ) ; <nl> break ; <nl> case NODE16 : <nl> - p . n = p . n16 - > children [ p . n - > num_children - 1 ] ; <nl> + p . n = p . n16 - > children [ p . n - > num_children - 1 ] . get ( ) ; <nl> break ; <nl> case NODE48 : <nl> idx = 255 ; <nl> typename art_tree < ValueType > : : Leaf * art_tree < ValueType > : : Node : : maximum ( ) const { <nl> idx - - ; <nl> } <nl> idx = p . n48 - > keys [ idx ] - 1 ; <nl> - p . n = p . n48 - > children [ idx ] ; <nl> + p . n = p . n48 - > children [ idx ] . get ( ) ; <nl> break ; <nl> case NODE256 : <nl> idx = 255 ; <nl> while ( ! p . n256 - > children [ idx ] ) { <nl> idx - - ; <nl> } <nl> - p . n = p . n256 - > children [ idx ] ; <nl> + p . n = p . n256 - > children [ idx ] . get ( ) ; <nl> break ; <nl> default : <nl> abort ( ) ; <nl> typename art_tree < ValueType > : : Leaf * art_tree < ValueType > : : maximum ( ) const { <nl> <nl> / / Constructs a new leaf using the provided key . <nl> template < typename ValueType > <nl> - typename art_tree < ValueType > : : Leaf * art_tree < ValueType > : : Leaf : : make ( <nl> + typename art_tree < ValueType > : : LeafPtr art_tree < ValueType > : : Leaf : : make ( <nl> const unsigned char * key , <nl> uint32_t key_len , <nl> const ValueType & value ) { <nl> typename art_tree < ValueType > : : Leaf * art_tree < ValueType > : : Leaf : : make ( <nl> l - > value = value ; <nl> l - > key_len = key_len ; <nl> memcpy ( l - > key , key , key_len ) ; <nl> - return l ; <nl> + return LeafPtr ( l ) ; <nl> } <nl> <nl> template < typename ValueType > <nl> uint32_t art_tree < ValueType > : : Node : : prefixMismatch ( <nl> <nl> template < typename ValueType > <nl> void art_tree < ValueType > : : recursiveInsert ( <nl> - Node * n , <nl> - Node * * ref , <nl> + NodePtr & ref , <nl> const unsigned char * key , <nl> uint32_t key_len , <nl> const ValueType & value , <nl> uint32_t depth , <nl> int * old ) { <nl> / / If we are at a NULL node , inject a leaf <nl> - if ( ! n ) { <nl> - * ref = SET_LEAF ( art_tree < ValueType > : : Leaf : : make ( key , key_len , value ) ) ; <nl> + if ( ! ref ) { <nl> + ref = LeafToNode ( Leaf : : make ( key , key_len , value ) ) ; <nl> return ; <nl> } <nl> <nl> / / If we are at a leaf , we need to replace it with a node <nl> - if ( IS_LEAF ( n ) ) { <nl> - Node4 * new_node ; <nl> - Leaf * l2 ; <nl> - <nl> - auto l = LEAF_RAW ( n ) ; <nl> + if ( IS_LEAF ( ref . get ( ) ) ) { <nl> + auto l = LEAF_RAW ( ref . get ( ) ) ; <nl> <nl> / / Check if we are updating an existing value <nl> if ( l - > matches ( key , key_len ) ) { <nl> void art_tree < ValueType > : : recursiveInsert ( <nl> } <nl> <nl> / / New value , we must split the leaf into a node4 <nl> - new_node = new Node4 ; <nl> + NodePtr new_node = watchman : : make_unique < Node4 , Deleter > ( ) ; <nl> <nl> / / Create a new leaf <nl> - l2 = Leaf : : make ( key , key_len , value ) ; <nl> + auto l2 = Leaf : : make ( key , key_len , value ) ; <nl> <nl> / / Determine longest prefix <nl> - auto longest_prefix = l - > longestCommonPrefix ( l2 , depth ) ; <nl> + auto longest_prefix = l - > longestCommonPrefix ( l2 . get ( ) , depth ) ; <nl> new_node - > partial_len = longest_prefix ; <nl> memcpy ( <nl> new_node - > partial , <nl> l2 - > key + depth , <nl> std : : min ( ART_MAX_PREFIX_LEN , longest_prefix ) ) ; <nl> + <nl> / / Add the leafs to the new node4 <nl> - * ref = new_node ; <nl> - new_node - > addChild ( ref , l - > keyAt ( depth + longest_prefix ) , SET_LEAF ( l ) ) ; <nl> - new_node - > addChild ( ref , l2 - > keyAt ( depth + longest_prefix ) , SET_LEAF ( l2 ) ) ; <nl> + new_node - > addChild ( <nl> + new_node , l - > keyAt ( depth + longest_prefix ) , std : : move ( ref ) ) ; <nl> + <nl> + auto leaf2Weak = l2 . get ( ) ; <nl> + new_node - > addChild ( <nl> + new_node , <nl> + leaf2Weak - > keyAt ( depth + longest_prefix ) , <nl> + LeafToNode ( std : : move ( l2 ) ) ) ; <nl> + <nl> + ref = std : : move ( new_node ) ; <nl> return ; <nl> } <nl> <nl> / / Check if given node has a prefix <nl> - if ( n - > partial_len ) { <nl> + if ( ref - > partial_len ) { <nl> / / Determine if the prefixes differ , since we need to split <nl> - auto prefix_diff = n - > prefixMismatch ( key , key_len , depth ) ; <nl> - if ( prefix_diff > = n - > partial_len ) { <nl> - depth + = n - > partial_len ; <nl> + auto prefix_diff = ref - > prefixMismatch ( key , key_len , depth ) ; <nl> + if ( prefix_diff > = ref - > partial_len ) { <nl> + depth + = ref - > partial_len ; <nl> goto RECURSE_SEARCH ; <nl> } <nl> <nl> + / / Weak ref to current node <nl> + auto origNode = ref . get ( ) ; <nl> + <nl> / / Create a new node <nl> - auto new_node = new art_tree < ValueType > : : Node4 ; <nl> - * ref = new_node ; <nl> + NodePtr new_node = watchman : : make_unique < Node4 , Deleter > ( ) ; <nl> new_node - > partial_len = prefix_diff ; <nl> memcpy ( <nl> new_node - > partial , <nl> - n - > partial , <nl> + origNode - > partial , <nl> std : : min ( ART_MAX_PREFIX_LEN , prefix_diff ) ) ; <nl> <nl> / / Adjust the prefix of the old node <nl> - if ( n - > partial_len < = ART_MAX_PREFIX_LEN ) { <nl> - new_node - > addChild ( ref , n - > partial [ prefix_diff ] , n ) ; <nl> - n - > partial_len - = ( prefix_diff + 1 ) ; <nl> + if ( origNode - > partial_len < = ART_MAX_PREFIX_LEN ) { <nl> + new_node - > addChild ( <nl> + new_node , origNode - > partial [ prefix_diff ] , std : : move ( ref ) ) ; <nl> + origNode - > partial_len - = ( prefix_diff + 1 ) ; <nl> memmove ( <nl> - n - > partial , <nl> - n - > partial + prefix_diff + 1 , <nl> - std : : min ( ART_MAX_PREFIX_LEN , n - > partial_len ) ) ; <nl> + origNode - > partial , <nl> + origNode - > partial + prefix_diff + 1 , <nl> + std : : min ( ART_MAX_PREFIX_LEN , origNode - > partial_len ) ) ; <nl> } else { <nl> - n - > partial_len - = ( prefix_diff + 1 ) ; <nl> - auto minLeaf = n - > minimum ( ) ; <nl> - new_node - > addChild ( ref , minLeaf - > keyAt ( depth + prefix_diff ) , n ) ; <nl> + origNode - > partial_len - = ( prefix_diff + 1 ) ; <nl> + auto minLeaf = origNode - > minimum ( ) ; <nl> + new_node - > addChild ( <nl> + new_node , minLeaf - > keyAt ( depth + prefix_diff ) , std : : move ( ref ) ) ; <nl> memcpy ( <nl> - n - > partial , <nl> + origNode - > partial , <nl> minLeaf - > key + depth + prefix_diff + 1 , <nl> - std : : min ( ART_MAX_PREFIX_LEN , n - > partial_len ) ) ; <nl> + std : : min ( ART_MAX_PREFIX_LEN , origNode - > partial_len ) ) ; <nl> } <nl> <nl> / / Insert the new leaf <nl> auto l = Leaf : : make ( key , key_len , value ) ; <nl> - new_node - > addChild ( ref , l - > keyAt ( depth + prefix_diff ) , SET_LEAF ( l ) ) ; <nl> + auto leafWeak = l . get ( ) ; <nl> + new_node - > addChild ( <nl> + new_node , <nl> + leafWeak - > keyAt ( depth + prefix_diff ) , <nl> + LeafToNode ( std : : move ( l ) ) ) ; <nl> + <nl> + ref = std : : move ( new_node ) ; <nl> return ; <nl> } <nl> <nl> RECURSE_SEARCH : ; <nl> { <nl> / / Find a child to recurse to <nl> - auto child = n - > findChild ( keyAt ( key , key_len , depth ) ) ; <nl> + auto child = ref - > findChild ( keyAt ( key , key_len , depth ) ) ; <nl> if ( child ) { <nl> - recursiveInsert ( * child , child , key , key_len , value , depth + 1 , old ) ; <nl> + recursiveInsert ( * child , key , key_len , value , depth + 1 , old ) ; <nl> return ; <nl> } <nl> <nl> / / No child , node goes within us <nl> auto l = Leaf : : make ( key , key_len , value ) ; <nl> - n - > addChild ( ref , l - > keyAt ( depth ) , SET_LEAF ( l ) ) ; <nl> + auto leafWeak = l . get ( ) ; <nl> + ref - > addChild ( ref , leafWeak - > keyAt ( depth ) , LeafToNode ( std : : move ( l ) ) ) ; <nl> } <nl> } <nl> <nl> void art_tree < ValueType > : : insert ( <nl> uint32_t key_len , <nl> const ValueType & value ) { <nl> int old_val = 0 ; <nl> - recursiveInsert ( root_ , & root_ , key , key_len , value , 0 , & old_val ) ; <nl> + recursiveInsert ( root_ , key , key_len , value , 0 , & old_val ) ; <nl> if ( ! old_val ) { <nl> size_ + + ; <nl> } <nl> } <nl> <nl> template < typename ValueType > <nl> - typename art_tree < ValueType > : : Leaf * art_tree < ValueType > : : recursiveDelete ( <nl> - Node * n , <nl> - Node * * ref , <nl> + typename art_tree < ValueType > : : NodePtr art_tree < ValueType > : : recursiveDelete ( <nl> + NodePtr & ref , <nl> const unsigned char * key , <nl> uint32_t key_len , <nl> uint32_t depth ) { <nl> / / Search terminated <nl> - if ( ! n ) { <nl> + if ( ! ref ) { <nl> return nullptr ; <nl> } <nl> <nl> / / Handle hitting a leaf node <nl> - if ( IS_LEAF ( n ) ) { <nl> - auto l = LEAF_RAW ( n ) ; <nl> + if ( IS_LEAF ( ref . get ( ) ) ) { <nl> + auto l = LEAF_RAW ( ref . get ( ) ) ; <nl> if ( l - > matches ( key , key_len ) ) { <nl> - * ref = nullptr ; <nl> - return l ; <nl> + NodePtr result ; <nl> + std : : swap ( result , ref ) ; <nl> + return result ; <nl> } <nl> return nullptr ; <nl> } <nl> <nl> / / Bail if the prefix does not match <nl> - if ( n - > partial_len ) { <nl> - auto prefix_len = n - > checkPrefix ( key , key_len , depth ) ; <nl> - if ( prefix_len ! = std : : min ( ART_MAX_PREFIX_LEN , n - > partial_len ) ) { <nl> + if ( ref - > partial_len ) { <nl> + auto prefix_len = ref - > checkPrefix ( key , key_len , depth ) ; <nl> + if ( prefix_len ! = std : : min ( ART_MAX_PREFIX_LEN , ref - > partial_len ) ) { <nl> return nullptr ; <nl> } <nl> - depth = depth + n - > partial_len ; <nl> + depth = depth + ref - > partial_len ; <nl> } <nl> <nl> / / Find child node <nl> - auto child = n - > findChild ( keyAt ( key , key_len , depth ) ) ; <nl> + auto child = ref - > findChild ( keyAt ( key , key_len , depth ) ) ; <nl> if ( ! child ) { <nl> return nullptr ; <nl> } <nl> <nl> / / If the child is leaf , delete from this node <nl> - if ( IS_LEAF ( * child ) ) { <nl> - auto l = LEAF_RAW ( * child ) ; <nl> + if ( IS_LEAF ( child - > get ( ) ) ) { <nl> + auto l = LEAF_RAW ( child - > get ( ) ) ; <nl> if ( l - > matches ( key , key_len ) ) { <nl> - n - > removeChild ( ref , keyAt ( key , key_len , depth ) , child ) ; <nl> - return l ; <nl> + return ref - > removeChild ( ref , keyAt ( key , key_len , depth ) , child ) ; <nl> } <nl> return nullptr ; <nl> } <nl> / / Recurse <nl> - return recursiveDelete ( * child , child , key , key_len , depth + 1 ) ; <nl> + return recursiveDelete ( * child , key , key_len , depth + 1 ) ; <nl> } <nl> <nl> / * * <nl> typename art_tree < ValueType > : : Leaf * art_tree < ValueType > : : recursiveDelete ( <nl> * the value pointer is returned . <nl> * / <nl> template < typename ValueType > <nl> - bool art_tree < ValueType > : : erase ( const unsigned char * key , uint32_t key_len ) { <nl> - auto l = recursiveDelete ( root_ , & root_ , key , key_len , 0 ) ; <nl> + typename art_tree < ValueType > : : LeafPtr art_tree < ValueType > : : erase ( <nl> + const unsigned char * key , <nl> + uint32_t key_len ) { <nl> + auto l = recursiveDelete ( root_ , key , key_len , 0 ) ; <nl> if ( l ) { <nl> size_ - - ; <nl> - Deleter ( ) ( l ) ; <nl> - return true ; <nl> + return NodeToLeaf ( std : : move ( l ) ) ; <nl> } <nl> - return false ; <nl> + return nullptr ; <nl> } <nl> <nl> / / Recursively iterates over the tree <nl> int art_tree < ValueType > : : recursiveIter ( Node * n , art_callback cb , void * data ) { <nl> switch ( n - > type ) { <nl> case Node_type : : NODE4 : <nl> for ( i = 0 ; i < n - > num_children ; i + + ) { <nl> - res = recursiveIter ( p . n4 - > children [ i ] , cb , data ) ; <nl> + res = recursiveIter ( p . n4 - > children [ i ] . get ( ) , cb , data ) ; <nl> if ( res ) { <nl> return res ; <nl> } <nl> int art_tree < ValueType > : : recursiveIter ( Node * n , art_callback cb , void * data ) { <nl> <nl> case Node_type : : NODE16 : <nl> for ( i = 0 ; i < n - > num_children ; i + + ) { <nl> - res = recursiveIter ( p . n16 - > children [ i ] , cb , data ) ; <nl> + res = recursiveIter ( p . n16 - > children [ i ] . get ( ) , cb , data ) ; <nl> if ( res ) { <nl> return res ; <nl> } <nl> int art_tree < ValueType > : : recursiveIter ( Node * n , art_callback cb , void * data ) { <nl> continue ; <nl> } <nl> <nl> - res = recursiveIter ( p . n48 - > children [ idx - 1 ] , cb , data ) ; <nl> + res = recursiveIter ( p . n48 - > children [ idx - 1 ] . get ( ) , cb , data ) ; <nl> if ( res ) { <nl> return res ; <nl> } <nl> int art_tree < ValueType > : : recursiveIter ( Node * n , art_callback cb , void * data ) { <nl> if ( ! p . n256 - > children [ i ] ) { <nl> continue ; <nl> } <nl> - res = recursiveIter ( p . n256 - > children [ i ] , cb , data ) ; <nl> + res = recursiveIter ( p . n256 - > children [ i ] . get ( ) , cb , data ) ; <nl> if ( res ) { <nl> return res ; <nl> } <nl> int art_tree < ValueType > : : recursiveIter ( Node * n , art_callback cb , void * data ) { <nl> * / <nl> template < typename ValueType > <nl> int art_tree < ValueType > : : iter ( art_callback cb , void * data ) { <nl> - return recursiveIter ( root_ , cb , data ) ; <nl> + return recursiveIter ( root_ . get ( ) , cb , data ) ; <nl> } <nl> <nl> / * * <nl> int art_tree < ValueType > : : iterPrefix ( <nl> uint32_t key_len , <nl> art_callback cb , <nl> void * data ) { <nl> - auto n = root_ ; <nl> + auto n = root_ . get ( ) ; <nl> uint32_t prefix_len , depth = 0 ; <nl> struct prefix_iterator_state < ValueType > state = { <nl> key , key_len , cb , data <nl> int art_tree < ValueType > : : iterPrefix ( <nl> <nl> / / Recursively search <nl> auto child = n - > findChild ( keyAt ( key , key_len , depth ) ) ; <nl> - n = ( child ) ? * child : NULL ; <nl> + n = child ? child - > get ( ) : nullptr ; <nl> depth + + ; <nl> } <nl> return 0 ; <nl> mmm a / thirdparty / libart / src / art . h <nl> ppp b / thirdparty / libart / src / art . h <nl> <nl> # include < cstdint > <nl> # include < functional > <nl> # include < memory > <nl> + # include " watchman_log . h " <nl> <nl> # define ART_MAX_PREFIX_LEN 10u <nl> <nl> struct art_tree { <nl> void operator ( ) ( Leaf * leaf ) const ; <nl> } ; <nl> using NodePtr = std : : unique_ptr < Node , Deleter > ; <nl> + using LeafPtr = std : : unique_ptr < Leaf , Deleter > ; <nl> <nl> / * * <nl> * This struct is included as part <nl> struct art_tree { <nl> <nl> Leaf * maximum ( ) const ; <nl> Leaf * minimum ( ) const ; <nl> - virtual Node * * findChild ( unsigned char c ) = 0 ; <nl> + virtual NodePtr * findChild ( unsigned char c ) = 0 ; <nl> <nl> / / Returns the number of prefix characters shared between the key and node . <nl> uint32_t checkPrefix ( <nl> struct art_tree { <nl> uint32_t key_len , <nl> uint32_t depth ) const ; <nl> <nl> - virtual void addChild ( Node * * ref , unsigned char c , Node * child ) = 0 ; <nl> - virtual void removeChild ( Node * * ref , unsigned char c , Node * * l ) = 0 ; <nl> + virtual void addChild ( NodePtr & ref , unsigned char c , NodePtr & & child ) = 0 ; <nl> + virtual NodePtr removeChild ( NodePtr & ref , unsigned char c , NodePtr * l ) = 0 ; <nl> } ; <nl> <nl> struct Node4 ; <nl> struct art_tree { <nl> return uintptr_t ( x ) & 1 ; <nl> } <nl> <nl> - static inline Node * SET_LEAF ( const Leaf * l ) { <nl> - return ( Node * ) ( uintptr_t ( l ) | 1 ) ; <nl> - } <nl> - <nl> static inline Leaf * LEAF_RAW ( const Node * x ) { <nl> return ( Leaf * ) ( ( void * ) ( ( uintptr_t ( x ) & ~ 1 ) ) ) ; <nl> } <nl> <nl> + static inline NodePtr LeafToNode ( LeafPtr & & leaf ) { <nl> + return NodePtr ( ( Node * ) ( uintptr_t ( leaf . release ( ) ) | 1 ) ) ; <nl> + } <nl> + <nl> + static inline LeafPtr NodeToLeaf ( NodePtr & & node ) { <nl> + return LeafPtr ( LEAF_RAW ( node . release ( ) ) ) ; <nl> + } <nl> + <nl> static inline unsigned char <nl> keyAt ( const unsigned char * key , uint32_t key_len , uint32_t idx ) ; <nl> <nl> struct art_tree { <nl> * / <nl> struct Node4 : public Node { <nl> unsigned char keys [ 4 ] ; <nl> - std : : array < Node * , 4 > children ; <nl> + std : : array < NodePtr , 4 > children ; <nl> <nl> - ~ Node4 ( ) ; <nl> Node4 ( ) ; <nl> explicit Node4 ( Node16 & & n16 ) ; <nl> - void addChild ( Node * * ref , unsigned char c , Node * child ) override ; <nl> - void removeChild ( Node * * ref , unsigned char c , Node * * l ) override ; <nl> - Node * * findChild ( unsigned char c ) override ; <nl> + void addChild ( NodePtr & ref , unsigned char c , NodePtr & & child ) override ; <nl> + NodePtr removeChild ( NodePtr & ref , unsigned char c , NodePtr * l ) override ; <nl> + NodePtr * findChild ( unsigned char c ) override ; <nl> } ; <nl> <nl> / * * <nl> struct art_tree { <nl> * / <nl> struct Node16 : public Node { <nl> unsigned char keys [ 16 ] ; <nl> - std : : array < Node * , 16 > children ; <nl> + std : : array < NodePtr , 16 > children ; <nl> <nl> - ~ Node16 ( ) ; <nl> Node16 ( ) ; <nl> explicit Node16 ( Node4 & & n4 ) ; <nl> explicit Node16 ( Node48 & & n48 ) ; <nl> - void addChild ( Node * * ref , unsigned char c , Node * child ) override ; <nl> - void removeChild ( Node * * ref , unsigned char c , Node * * l ) override ; <nl> - Node * * findChild ( unsigned char c ) override ; <nl> + void addChild ( NodePtr & ref , unsigned char c , NodePtr & & child ) override ; <nl> + NodePtr removeChild ( NodePtr & ref , unsigned char c , NodePtr * l ) override ; <nl> + NodePtr * findChild ( unsigned char c ) override ; <nl> } ; <nl> <nl> / * * <nl> struct art_tree { <nl> * / <nl> struct Node48 : public Node { <nl> unsigned char keys [ 256 ] ; <nl> - std : : array < Node * , 48 > children ; <nl> + std : : array < NodePtr , 48 > children ; <nl> <nl> - ~ Node48 ( ) ; <nl> Node48 ( ) ; <nl> explicit Node48 ( Node16 & & n16 ) ; <nl> explicit Node48 ( Node256 & & n256 ) ; <nl> - void addChild ( Node * * ref , unsigned char c , Node * child ) override ; <nl> - void removeChild ( Node * * ref , unsigned char c , Node * * l ) override ; <nl> - Node * * findChild ( unsigned char c ) override ; <nl> + void addChild ( NodePtr & ref , unsigned char c , NodePtr & & child ) override ; <nl> + NodePtr removeChild ( NodePtr & ref , unsigned char c , NodePtr * l ) override ; <nl> + NodePtr * findChild ( unsigned char c ) override ; <nl> } ; <nl> <nl> / * * <nl> * Full node with 256 children <nl> * / <nl> struct Node256 : public Node { <nl> - std : : array < Node * , 256 > children ; <nl> + std : : array < NodePtr , 256 > children ; <nl> <nl> - ~ Node256 ( ) ; <nl> Node256 ( ) ; <nl> explicit Node256 ( Node48 & & n48 ) ; <nl> - void addChild ( Node * * ref , unsigned char c , Node * child ) override ; <nl> - void removeChild ( Node * * ref , unsigned char c , Node * * l ) override ; <nl> - Node * * findChild ( unsigned char c ) override ; <nl> + void addChild ( NodePtr & ref , unsigned char c , NodePtr & & child ) override ; <nl> + NodePtr removeChild ( NodePtr & ref , unsigned char c , NodePtr * l ) override ; <nl> + NodePtr * findChild ( unsigned char c ) override ; <nl> } ; <nl> <nl> / * * <nl> struct art_tree { <nl> <nl> bool matches ( const unsigned char * key , uint32_t key_len ) const ; <nl> <nl> - static Leaf * <nl> + static LeafPtr <nl> make ( const unsigned char * key , uint32_t key_len , const ValueType & value ) ; <nl> <nl> uint32_t longestCommonPrefix ( const Leaf * other , uint32_t depth ) const ; <nl> struct art_tree { <nl> * @ arg key_len The length of the key <nl> * @ return true if the item was erased , false otherwise . <nl> * / <nl> - bool erase ( const unsigned char * key , uint32_t key_len ) ; <nl> + LeafPtr erase ( const unsigned char * key , uint32_t key_len ) ; <nl> <nl> / * * <nl> * Searches for a value in the ART tree <nl> struct art_tree { <nl> void * data ) ; <nl> <nl> private : <nl> - Node * root_ ; <nl> + NodePtr root_ ; <nl> uint64_t size_ ; <nl> <nl> void recursiveInsert ( <nl> - Node * n , <nl> - Node * * ref , <nl> + NodePtr & ref , <nl> const unsigned char * key , <nl> uint32_t key_len , <nl> const ValueType & value , <nl> uint32_t depth , <nl> int * old ) ; <nl> - Leaf * recursiveDelete ( <nl> - Node * n , <nl> - Node * * ref , <nl> + NodePtr recursiveDelete ( <nl> + NodePtr & ref , <nl> const unsigned char * key , <nl> uint32_t key_len , <nl> uint32_t depth ) ; <nl> | art : cut over to unique_ptr for nodes | facebook/watchman | 9036a273911ccb35ac8b7db6ed512140ef5476fc | 2016-11-01T00:34:58Z |
mmm a / xbmc / cores / VideoRenderers / VideoShaders / WinVideoFilter . cpp <nl> ppp b / xbmc / cores / VideoRenderers / VideoShaders / WinVideoFilter . cpp <nl> bool CConvolutionShaderSeparable : : ChooseIntermediateD3DFormat ( ) <nl> { <nl> DWORD usage = D3DUSAGE_RENDERTARGET ; <nl> <nl> - / / Try for higher precision than the output of the scaler to preserve quality <nl> - if ( g_Windowing . IsTextureFormatOk ( D3DFMT_A2R10G10B10 , usage ) ) m_IntermediateFormat = D3DFMT_A2R10G10B10 ; <nl> - else if ( g_Windowing . IsTextureFormatOk ( D3DFMT_A2B10G10R10 , usage ) ) m_IntermediateFormat = D3DFMT_A2B10G10R10 ; <nl> - else if ( g_Windowing . IsTextureFormatOk ( D3DFMT_A8R8G8B8 , usage ) ) m_IntermediateFormat = D3DFMT_A8R8G8B8 ; <nl> - else if ( g_Windowing . IsTextureFormatOk ( D3DFMT_A8B8G8R8 , usage ) ) m_IntermediateFormat = D3DFMT_A8B8G8R8 ; <nl> - else if ( g_Windowing . IsTextureFormatOk ( D3DFMT_X8R8G8B8 , usage ) ) m_IntermediateFormat = D3DFMT_X8R8G8B8 ; <nl> - else if ( g_Windowing . IsTextureFormatOk ( D3DFMT_X8B8G8R8 , usage ) ) m_IntermediateFormat = D3DFMT_X8B8G8R8 ; <nl> - else if ( g_Windowing . IsTextureFormatOk ( D3DFMT_R8G8B8 , usage ) ) m_IntermediateFormat = D3DFMT_R8G8B8 ; <nl> - else return false ; <nl> + / / Need a float texture , as the output of the first pass can contain negative values . <nl> + if ( g_Windowing . IsTextureFormatOk ( D3DFMT_A16B16G16R16F , usage ) ) m_IntermediateFormat = D3DFMT_A16B16G16R16F ; <nl> + else if ( g_Windowing . IsTextureFormatOk ( D3DFMT_A32B32G32R32F , usage ) ) m_IntermediateFormat = D3DFMT_A32B32G32R32F ; <nl> + else <nl> + { <nl> + CLog : : Log ( LOGNOTICE , __FUNCTION__ " : no float format available for the intermediate render target " ) ; <nl> + return false ; <nl> + } <nl> <nl> CLog : : Log ( LOGDEBUG , __FUNCTION__ " : format % i " , m_IntermediateFormat ) ; <nl> <nl> mmm a / xbmc / cores / VideoRenderers / WinRenderer . cpp <nl> ppp b / xbmc / cores / VideoRenderers / WinRenderer . cpp <nl> void CWinRenderer : : UpdatePSVideoFilter ( ) <nl> <nl> if ( m_bUseHQScaler ) <nl> { <nl> - switch ( m_scalingMethod ) <nl> - { <nl> - case VS_SCALINGMETHOD_CUBIC : <nl> - case VS_SCALINGMETHOD_LANCZOS2 : <nl> - m_scalerShader = new CConvolutionShader1Pass ( ) ; <nl> - break ; <nl> - default : <nl> - m_scalerShader = new CConvolutionShaderSeparable ( ) ; <nl> - break ; <nl> - } <nl> + / / First try the more efficient two pass convolution scaler <nl> + m_scalerShader = new CConvolutionShaderSeparable ( ) ; <nl> <nl> if ( ! m_scalerShader - > Create ( m_scalingMethod ) ) <nl> { <nl> SAFE_DELETE ( m_scalerShader ) ; <nl> - CGUIDialogKaiToast : : QueueNotification ( CGUIDialogKaiToast : : Error , " Video Renderering " , " Failed to init video scaler , falling back to bilinear scaling . " ) ; <nl> - m_bUseHQScaler = false ; <nl> + CLog : : Log ( LOGNOTICE , __FUNCTION__ " : two pass convolution shader init problem , falling back to one pass . " ) ; <nl> + } <nl> + <nl> + / / Fallback on the one pass version <nl> + if ( m_scalerShader = = NULL ) <nl> + { <nl> + m_scalerShader = new CConvolutionShader1Pass ( ) ; <nl> + <nl> + if ( ! m_scalerShader - > Create ( m_scalingMethod ) ) <nl> + { <nl> + SAFE_DELETE ( m_scalerShader ) ; <nl> + CGUIDialogKaiToast : : QueueNotification ( CGUIDialogKaiToast : : Error , " Video Renderering " , " Failed to init video scaler , falling back to bilinear scaling . " ) ; <nl> + m_bUseHQScaler = false ; <nl> + } <nl> } <nl> } <nl> <nl> | Changed the intermediate texture to float , because the result of the first pass can contain negative values . | xbmc/xbmc | 9e05344d7d6c4bde4a2f72b4a9c1d55c61470669 | 2011-09-02T04:02:08Z |
mmm a / docs / webgl / filament . js <nl> ppp b / docs / webgl / filament . js <nl> var Filament = ( <nl> function ( Filament ) { <nl> Filament = Filament | | { } ; <nl> <nl> - var Module = typeof Filament ! = = " undefined " ? Filament : { } ; var moduleOverrides = { } ; var key ; for ( key in Module ) { if ( Module . hasOwnProperty ( key ) ) { moduleOverrides [ key ] = Module [ key ] } } Module [ " arguments " ] = [ ] ; Module [ " thisProgram " ] = " . / this . program " ; Module [ " quit " ] = function ( status , toThrow ) { throw toThrow } ; Module [ " preRun " ] = [ ] ; Module [ " postRun " ] = [ ] ; var ENVIRONMENT_IS_WEB = false ; var ENVIRONMENT_IS_WORKER = false ; var ENVIRONMENT_IS_NODE = false ; var ENVIRONMENT_IS_SHELL = false ; ENVIRONMENT_IS_WEB = typeof window = = = " object " ; ENVIRONMENT_IS_WORKER = typeof importScripts = = = " function " ; ENVIRONMENT_IS_NODE = typeof process = = = " object " & & typeof require = = = " function " & & ! ENVIRONMENT_IS_WEB & & ! ENVIRONMENT_IS_WORKER ; ENVIRONMENT_IS_SHELL = ! ENVIRONMENT_IS_WEB & & ! ENVIRONMENT_IS_NODE & & ! ENVIRONMENT_IS_WORKER ; var scriptDirectory = " " ; function locateFile ( path ) { if ( Module [ " locateFile " ] ) { return Module [ " locateFile " ] ( path , scriptDirectory ) } else { return scriptDirectory + path } } if ( ENVIRONMENT_IS_NODE ) { scriptDirectory = __dirname + " / " ; var nodeFS ; var nodePath ; Module [ " read " ] = function shell_read ( filename , binary ) { var ret ; if ( ! nodeFS ) nodeFS = require ( " fs " ) ; if ( ! nodePath ) nodePath = require ( " path " ) ; filename = nodePath [ " normalize " ] ( filename ) ; ret = nodeFS [ " readFileSync " ] ( filename ) ; return binary ? ret : ret . toString ( ) } ; Module [ " readBinary " ] = function readBinary ( filename ) { var ret = Module [ " read " ] ( filename , true ) ; if ( ! ret . buffer ) { ret = new Uint8Array ( ret ) } assert ( ret . buffer ) ; return ret } ; if ( process [ " argv " ] . length > 1 ) { Module [ " thisProgram " ] = process [ " argv " ] [ 1 ] . replace ( / \ \ / g , " / " ) } Module [ " arguments " ] = process [ " argv " ] . slice ( 2 ) ; process [ " on " ] ( " uncaughtException " , function ( ex ) { if ( ! ( ex instanceof ExitStatus ) ) { throw ex } } ) ; process [ " on " ] ( " unhandledRejection " , abort ) ; Module [ " quit " ] = function ( status ) { process [ " exit " ] ( status ) } ; Module [ " inspect " ] = function ( ) { return " [ Emscripten Module object ] " } } else if ( ENVIRONMENT_IS_SHELL ) { if ( typeof read ! = " undefined " ) { Module [ " read " ] = function shell_read ( f ) { return read ( f ) } } Module [ " readBinary " ] = function readBinary ( f ) { var data ; if ( typeof readbuffer = = = " function " ) { return new Uint8Array ( readbuffer ( f ) ) } data = read ( f , " binary " ) ; assert ( typeof data = = = " object " ) ; return data } ; if ( typeof scriptArgs ! = " undefined " ) { Module [ " arguments " ] = scriptArgs } else if ( typeof arguments ! = " undefined " ) { Module [ " arguments " ] = arguments } if ( typeof quit = = = " function " ) { Module [ " quit " ] = function ( status ) { quit ( status ) } } } else if ( ENVIRONMENT_IS_WEB | | ENVIRONMENT_IS_WORKER ) { if ( ENVIRONMENT_IS_WORKER ) { scriptDirectory = self . location . href } else if ( document . currentScript ) { scriptDirectory = document . currentScript . src } if ( scriptDirectory . indexOf ( " blob : " ) ! = = 0 ) { scriptDirectory = scriptDirectory . substr ( 0 , scriptDirectory . lastIndexOf ( " / " ) + 1 ) } else { scriptDirectory = " " } Module [ " read " ] = function shell_read ( url ) { var xhr = new XMLHttpRequest ; xhr . open ( " GET " , url , false ) ; xhr . send ( null ) ; return xhr . responseText } ; if ( ENVIRONMENT_IS_WORKER ) { Module [ " readBinary " ] = function readBinary ( url ) { var xhr = new XMLHttpRequest ; xhr . open ( " GET " , url , false ) ; xhr . responseType = " arraybuffer " ; xhr . send ( null ) ; return new Uint8Array ( xhr . response ) } } Module [ " readAsync " ] = function readAsync ( url , onload , onerror ) { var xhr = new XMLHttpRequest ; xhr . open ( " GET " , url , true ) ; xhr . responseType = " arraybuffer " ; xhr . onload = function xhr_onload ( ) { if ( xhr . status = = 200 | | xhr . status = = 0 & & xhr . response ) { onload ( xhr . response ) ; return } onerror ( ) } ; xhr . onerror = onerror ; xhr . send ( null ) } ; Module [ " setWindowTitle " ] = function ( title ) { document . title = title } } else { } var out = Module [ " print " ] | | ( typeof console ! = = " undefined " ? console . log . bind ( console ) : typeof print ! = = " undefined " ? print : null ) ; var err = Module [ " printErr " ] | | ( typeof printErr ! = = " undefined " ? printErr : typeof console ! = = " undefined " & & console . warn . bind ( console ) | | out ) ; for ( key in moduleOverrides ) { if ( moduleOverrides . hasOwnProperty ( key ) ) { Module [ key ] = moduleOverrides [ key ] } } moduleOverrides = undefined ; var STACK_ALIGN = 16 ; function dynamicAlloc ( size ) { var ret = HEAP32 [ DYNAMICTOP_PTR > > 2 ] ; var end = ret + size + 15 & - 16 ; if ( end < = _emscripten_get_heap_size ( ) ) { HEAP32 [ DYNAMICTOP_PTR > > 2 ] = end } else { var success = _emscripten_resize_heap ( end ) ; if ( ! success ) return 0 } return ret } function getNativeTypeSize ( type ) { switch ( type ) { case " i1 " : case " i8 " : return 1 ; case " i16 " : return 2 ; case " i32 " : return 4 ; case " i64 " : return 8 ; case " float " : return 4 ; case " double " : return 8 ; default : { if ( type [ type . length - 1 ] = = = " * " ) { return 4 } else if ( type [ 0 ] = = = " i " ) { var bits = parseInt ( type . substr ( 1 ) ) ; assert ( bits % 8 = = = 0 , " getNativeTypeSize invalid bits " + bits + " , type " + type ) ; return bits / 8 } else { return 0 } } } } function warnOnce ( text ) { if ( ! warnOnce . shown ) warnOnce . shown = { } ; if ( ! warnOnce . shown [ text ] ) { warnOnce . shown [ text ] = 1 ; err ( text ) } } var asm2wasmImports = { " f64 - rem " : function ( x , y ) { return x % y } , " debugger " : function ( ) { debugger } } ; var jsCallStartIndex = 1 ; var functionPointers = new Array ( 0 ) ; var funcWrappers = { } ; function makeBigInt ( low , high , unsigned ) { return unsigned ? + ( low > > > 0 ) + + ( high > > > 0 ) * 4294967296 : + ( low > > > 0 ) + + ( high | 0 ) * 4294967296 } function dynCall ( sig , ptr , args ) { if ( args & & args . length ) { return Module [ " dynCall_ " + sig ] . apply ( null , [ ptr ] . concat ( args ) ) } else { return Module [ " dynCall_ " + sig ] . call ( null , ptr ) } } var tempRet0 = 0 ; var setTempRet0 = function ( value ) { tempRet0 = value } ; var getTempRet0 = function ( ) { return tempRet0 } ; if ( typeof WebAssembly ! = = " object " ) { err ( " no native wasm support detected " ) } var wasmMemory ; var wasmTable ; var ABORT = false ; var EXITSTATUS = 0 ; function assert ( condition , text ) { if ( ! condition ) { abort ( " Assertion failed : " + text ) } } function getCFunc ( ident ) { var func = Module [ " _ " + ident ] ; assert ( func , " Cannot call unknown function " + ident + " , make sure it is exported " ) ; return func } function ccall ( ident , returnType , argTypes , args , opts ) { var toC = { " string " : function ( str ) { var ret = 0 ; if ( str ! = = null & & str ! = = undefined & & str ! = = 0 ) { var len = ( str . length < < 2 ) + 1 ; ret = stackAlloc ( len ) ; stringToUTF8 ( str , ret , len ) } return ret } , " array " : function ( arr ) { var ret = stackAlloc ( arr . length ) ; writeArrayToMemory ( arr , ret ) ; return ret } } ; function convertReturnValue ( ret ) { if ( returnType = = = " string " ) return UTF8ToString ( ret ) ; if ( returnType = = = " boolean " ) return Boolean ( ret ) ; return ret } var func = getCFunc ( ident ) ; var cArgs = [ ] ; var stack = 0 ; if ( args ) { for ( var i = 0 ; i < args . length ; i + + ) { var converter = toC [ argTypes [ i ] ] ; if ( converter ) { if ( stack = = = 0 ) stack = stackSave ( ) ; cArgs [ i ] = converter ( args [ i ] ) } else { cArgs [ i ] = args [ i ] } } } var ret = func . apply ( null , cArgs ) ; ret = convertReturnValue ( ret ) ; if ( stack ! = = 0 ) stackRestore ( stack ) ; return ret } function setValue ( ptr , value , type , noSafe ) { type = type | | " i8 " ; if ( type . charAt ( type . length - 1 ) = = = " * " ) type = " i32 " ; switch ( type ) { case " i1 " : HEAP8 [ ptr > > 0 ] = value ; break ; case " i8 " : HEAP8 [ ptr > > 0 ] = value ; break ; case " i16 " : HEAP16 [ ptr > > 1 ] = value ; break ; case " i32 " : HEAP32 [ ptr > > 2 ] = value ; break ; case " i64 " : tempI64 = [ value > > > 0 , ( tempDouble = value , + Math_abs ( tempDouble ) > = 1 ? tempDouble > 0 ? ( Math_min ( + Math_floor ( tempDouble / 4294967296 ) , 4294967295 ) | 0 ) > > > 0 : ~ ~ + Math_ceil ( ( tempDouble - + ( ~ ~ tempDouble > > > 0 ) ) / 4294967296 ) > > > 0 : 0 ) ] , HEAP32 [ ptr > > 2 ] = tempI64 [ 0 ] , HEAP32 [ ptr + 4 > > 2 ] = tempI64 [ 1 ] ; break ; case " float " : HEAPF32 [ ptr > > 2 ] = value ; break ; case " double " : HEAPF64 [ ptr > > 3 ] = value ; break ; default : abort ( " invalid type for setValue : " + type ) } } var ALLOC_NONE = 3 ; function allocate ( slab , types , allocator , ptr ) { var zeroinit , size ; if ( typeof slab = = = " number " ) { zeroinit = true ; size = slab } else { zeroinit = false ; size = slab . length } var singleType = typeof types = = = " string " ? types : null ; var ret ; if ( allocator = = ALLOC_NONE ) { ret = ptr } else { ret = [ _malloc , stackAlloc , dynamicAlloc ] [ allocator ] ( Math . max ( size , singleType ? 1 : types . length ) ) } if ( zeroinit ) { var stop ; ptr = ret ; assert ( ( ret & 3 ) = = 0 ) ; stop = ret + ( size & ~ 3 ) ; for ( ; ptr < stop ; ptr + = 4 ) { HEAP32 [ ptr > > 2 ] = 0 } stop = ret + size ; while ( ptr < stop ) { HEAP8 [ ptr + + > > 0 ] = 0 } return ret } if ( singleType = = = " i8 " ) { if ( slab . subarray | | slab . slice ) { HEAPU8 . set ( slab , ret ) } else { HEAPU8 . set ( new Uint8Array ( slab ) , ret ) } return ret } var i = 0 , type , typeSize , previousType ; while ( i < size ) { var curr = slab [ i ] ; type = singleType | | types [ i ] ; if ( type = = = 0 ) { i + + ; continue } if ( type = = " i64 " ) type = " i32 " ; setValue ( ret + i , curr , type ) ; if ( previousType ! = = type ) { typeSize = getNativeTypeSize ( type ) ; previousType = type } i + = typeSize } return ret } var UTF8Decoder = typeof TextDecoder ! = = " undefined " ? new TextDecoder ( " utf8 " ) : undefined ; function UTF8ArrayToString ( u8Array , idx , maxBytesToRead ) { var endIdx = idx + maxBytesToRead ; var endPtr = idx ; while ( u8Array [ endPtr ] & & ! ( endPtr > = endIdx ) ) + + endPtr ; if ( endPtr - idx > 16 & & u8Array . subarray & & UTF8Decoder ) { return UTF8Decoder . decode ( u8Array . subarray ( idx , endPtr ) ) } else { var str = " " ; while ( idx < endPtr ) { var u0 = u8Array [ idx + + ] ; if ( ! ( u0 & 128 ) ) { str + = String . fromCharCode ( u0 ) ; continue } var u1 = u8Array [ idx + + ] & 63 ; if ( ( u0 & 224 ) = = 192 ) { str + = String . fromCharCode ( ( u0 & 31 ) < < 6 | u1 ) ; continue } var u2 = u8Array [ idx + + ] & 63 ; if ( ( u0 & 240 ) = = 224 ) { u0 = ( u0 & 15 ) < < 12 | u1 < < 6 | u2 } else { u0 = ( u0 & 7 ) < < 18 | u1 < < 12 | u2 < < 6 | u8Array [ idx + + ] & 63 } if ( u0 < 65536 ) { str + = String . fromCharCode ( u0 ) } else { var ch = u0 - 65536 ; str + = String . fromCharCode ( 55296 | ch > > 10 , 56320 | ch & 1023 ) } } } return str } function UTF8ToString ( ptr , maxBytesToRead ) { return ptr ? UTF8ArrayToString ( HEAPU8 , ptr , maxBytesToRead ) : " " } function stringToUTF8Array ( str , outU8Array , outIdx , maxBytesToWrite ) { if ( ! ( maxBytesToWrite > 0 ) ) return 0 ; var startIdx = outIdx ; var endIdx = outIdx + maxBytesToWrite - 1 ; for ( var i = 0 ; i < str . length ; + + i ) { var u = str . charCodeAt ( i ) ; if ( u > = 55296 & & u < = 57343 ) { var u1 = str . charCodeAt ( + + i ) ; u = 65536 + ( ( u & 1023 ) < < 10 ) | u1 & 1023 } if ( u < = 127 ) { if ( outIdx > = endIdx ) break ; outU8Array [ outIdx + + ] = u } else if ( u < = 2047 ) { if ( outIdx + 1 > = endIdx ) break ; outU8Array [ outIdx + + ] = 192 | u > > 6 ; outU8Array [ outIdx + + ] = 128 | u & 63 } else if ( u < = 65535 ) { if ( outIdx + 2 > = endIdx ) break ; outU8Array [ outIdx + + ] = 224 | u > > 12 ; outU8Array [ outIdx + + ] = 128 | u > > 6 & 63 ; outU8Array [ outIdx + + ] = 128 | u & 63 } else { if ( outIdx + 3 > = endIdx ) break ; outU8Array [ outIdx + + ] = 240 | u > > 18 ; outU8Array [ outIdx + + ] = 128 | u > > 12 & 63 ; outU8Array [ outIdx + + ] = 128 | u > > 6 & 63 ; outU8Array [ outIdx + + ] = 128 | u & 63 } } outU8Array [ outIdx ] = 0 ; return outIdx - startIdx } function stringToUTF8 ( str , outPtr , maxBytesToWrite ) { return stringToUTF8Array ( str , HEAPU8 , outPtr , maxBytesToWrite ) } function lengthBytesUTF8 ( str ) { var len = 0 ; for ( var i = 0 ; i < str . length ; + + i ) { var u = str . charCodeAt ( i ) ; if ( u > = 55296 & & u < = 57343 ) u = 65536 + ( ( u & 1023 ) < < 10 ) | str . charCodeAt ( + + i ) & 1023 ; if ( u < = 127 ) + + len ; else if ( u < = 2047 ) len + = 2 ; else if ( u < = 65535 ) len + = 3 ; else len + = 4 } return len } var UTF16Decoder = typeof TextDecoder ! = = " undefined " ? new TextDecoder ( " utf - 16le " ) : undefined ; function writeArrayToMemory ( array , buffer ) { HEAP8 . set ( array , buffer ) } function writeAsciiToMemory ( str , buffer , dontAddNull ) { for ( var i = 0 ; i < str . length ; + + i ) { HEAP8 [ buffer + + > > 0 ] = str . charCodeAt ( i ) } if ( ! dontAddNull ) HEAP8 [ buffer > > 0 ] = 0 } function demangle ( func ) { return func } function demangleAll ( text ) { var regex = / __Z [ \ w \ d_ ] + / g ; return text . replace ( regex , function ( x ) { var y = demangle ( x ) ; return x = = = y ? x : y + " [ " + x + " ] " } ) } function jsStackTrace ( ) { var err = new Error ; if ( ! err . stack ) { try { throw new Error ( 0 ) } catch ( e ) { err = e } if ( ! err . stack ) { return " ( no stack trace available ) " } } return err . stack . toString ( ) } function stackTrace ( ) { var js = jsStackTrace ( ) ; if ( Module [ " extraStackTrace " ] ) js + = " \ n " + Module [ " extraStackTrace " ] ( ) ; return demangleAll ( js ) } var PAGE_SIZE = 16384 ; var WASM_PAGE_SIZE = 65536 ; function alignUp ( x , multiple ) { if ( x % multiple > 0 ) { x + = multiple - x % multiple } return x } var buffer , HEAP8 , HEAPU8 , HEAP16 , HEAPU16 , HEAP32 , HEAPU32 , HEAPF32 , HEAPF64 ; function updateGlobalBuffer ( buf ) { Module [ " buffer " ] = buffer = buf } function updateGlobalBufferViews ( ) { Module [ " HEAP8 " ] = HEAP8 = new Int8Array ( buffer ) ; Module [ " HEAP16 " ] = HEAP16 = new Int16Array ( buffer ) ; Module [ " HEAP32 " ] = HEAP32 = new Int32Array ( buffer ) ; Module [ " HEAPU8 " ] = HEAPU8 = new Uint8Array ( buffer ) ; Module [ " HEAPU16 " ] = HEAPU16 = new Uint16Array ( buffer ) ; Module [ " HEAPU32 " ] = HEAPU32 = new Uint32Array ( buffer ) ; Module [ " HEAPF32 " ] = HEAPF32 = new Float32Array ( buffer ) ; Module [ " HEAPF64 " ] = HEAPF64 = new Float64Array ( buffer ) } var STACK_BASE = 1324160 , DYNAMIC_BASE = 6567040 , DYNAMICTOP_PTR = 1323904 ; var TOTAL_STACK = 5242880 ; var TOTAL_MEMORY = Module [ " TOTAL_MEMORY " ] | | 16777216 ; if ( TOTAL_MEMORY < TOTAL_STACK ) err ( " TOTAL_MEMORY should be larger than TOTAL_STACK , was " + TOTAL_MEMORY + " ! ( TOTAL_STACK = " + TOTAL_STACK + " ) " ) ; if ( Module [ " buffer " ] ) { buffer = Module [ " buffer " ] } else { if ( typeof WebAssembly = = = " object " & & typeof WebAssembly . Memory = = = " function " ) { wasmMemory = new WebAssembly . Memory ( { " initial " : TOTAL_MEMORY / WASM_PAGE_SIZE } ) ; buffer = wasmMemory . buffer } else { buffer = new ArrayBuffer ( TOTAL_MEMORY ) } Module [ " buffer " ] = buffer } updateGlobalBufferViews ( ) ; HEAP32 [ DYNAMICTOP_PTR > > 2 ] = DYNAMIC_BASE ; function callRuntimeCallbacks ( callbacks ) { while ( callbacks . length > 0 ) { var callback = callbacks . shift ( ) ; if ( typeof callback = = " function " ) { callback ( ) ; continue } var func = callback . func ; if ( typeof func = = = " number " ) { if ( callback . arg = = = undefined ) { Module [ " dynCall_v " ] ( func ) } else { Module [ " dynCall_vi " ] ( func , callback . arg ) } } else { func ( callback . arg = = = undefined ? null : callback . arg ) } } } var __ATPRERUN__ = [ ] ; var __ATINIT__ = [ ] ; var __ATMAIN__ = [ ] ; var __ATPOSTRUN__ = [ ] ; var runtimeInitialized = false ; var runtimeExited = false ; function preRun ( ) { if ( Module [ " preRun " ] ) { if ( typeof Module [ " preRun " ] = = " function " ) Module [ " preRun " ] = [ Module [ " preRun " ] ] ; while ( Module [ " preRun " ] . length ) { addOnPreRun ( Module [ " preRun " ] . shift ( ) ) } } callRuntimeCallbacks ( __ATPRERUN__ ) } function ensureInitRuntime ( ) { if ( runtimeInitialized ) return ; runtimeInitialized = true ; if ( ! Module [ " noFSInit " ] & & ! FS . init . initialized ) FS . init ( ) ; TTY . init ( ) ; callRuntimeCallbacks ( __ATINIT__ ) } function preMain ( ) { FS . ignorePermissions = false ; callRuntimeCallbacks ( __ATMAIN__ ) } function exitRuntime ( ) { runtimeExited = true } function postRun ( ) { if ( Module [ " postRun " ] ) { if ( typeof Module [ " postRun " ] = = " function " ) Module [ " postRun " ] = [ Module [ " postRun " ] ] ; while ( Module [ " postRun " ] . length ) { addOnPostRun ( Module [ " postRun " ] . shift ( ) ) } } callRuntimeCallbacks ( __ATPOSTRUN__ ) } function addOnPreRun ( cb ) { __ATPRERUN__ . unshift ( cb ) } function addOnPostRun ( cb ) { __ATPOSTRUN__ . unshift ( cb ) } var Math_abs = Math . abs ; var Math_ceil = Math . ceil ; var Math_floor = Math . floor ; var Math_min = Math . min ; var runDependencies = 0 ; var runDependencyWatcher = null ; var dependenciesFulfilled = null ; function getUniqueRunDependency ( id ) { return id } function addRunDependency ( id ) { runDependencies + + ; if ( Module [ " monitorRunDependencies " ] ) { Module [ " monitorRunDependencies " ] ( runDependencies ) } } function removeRunDependency ( id ) { runDependencies - - ; if ( Module [ " monitorRunDependencies " ] ) { Module [ " monitorRunDependencies " ] ( runDependencies ) } if ( runDependencies = = 0 ) { if ( runDependencyWatcher ! = = null ) { clearInterval ( runDependencyWatcher ) ; runDependencyWatcher = null } if ( dependenciesFulfilled ) { var callback = dependenciesFulfilled ; dependenciesFulfilled = null ; callback ( ) } } } Module [ " preloadedImages " ] = { } ; Module [ " preloadedAudios " ] = { } ; var dataURIPrefix = " data : application / octet - stream ; base64 , " ; function isDataURI ( filename ) { return String . prototype . startsWith ? filename . startsWith ( dataURIPrefix ) : filename . indexOf ( dataURIPrefix ) = = = 0 } var wasmBinaryFile = " filament . wasm " ; if ( ! isDataURI ( wasmBinaryFile ) ) { wasmBinaryFile = locateFile ( wasmBinaryFile ) } function getBinary ( ) { try { if ( Module [ " wasmBinary " ] ) { return new Uint8Array ( Module [ " wasmBinary " ] ) } if ( Module [ " readBinary " ] ) { return Module [ " readBinary " ] ( wasmBinaryFile ) } else { throw " both async and sync fetching of the wasm failed " } } catch ( err ) { abort ( err ) } } function getBinaryPromise ( ) { if ( ! Module [ " wasmBinary " ] & & ( ENVIRONMENT_IS_WEB | | ENVIRONMENT_IS_WORKER ) & & typeof fetch = = = " function " ) { return fetch ( wasmBinaryFile , { credentials : " same - origin " } ) . then ( function ( response ) { if ( ! response [ " ok " ] ) { throw " failed to load wasm binary file at ' " + wasmBinaryFile + " ' " } return response [ " arrayBuffer " ] ( ) } ) . catch ( function ( ) { return getBinary ( ) } ) } return new Promise ( function ( resolve , reject ) { resolve ( getBinary ( ) ) } ) } function createWasm ( env ) { var info = { " env " : env , " global " : { " NaN " : NaN , Infinity : Infinity } , " global . Math " : Math , " asm2wasm " : asm2wasmImports } ; function receiveInstance ( instance , module ) { var exports = instance . exports ; Module [ " asm " ] = exports ; removeRunDependency ( " wasm - instantiate " ) } addRunDependency ( " wasm - instantiate " ) ; if ( Module [ " instantiateWasm " ] ) { try { return Module [ " instantiateWasm " ] ( info , receiveInstance ) } catch ( e ) { err ( " Module . instantiateWasm callback failed with error : " + e ) ; return false } } function receiveInstantiatedSource ( output ) { receiveInstance ( output [ " instance " ] ) } function instantiateArrayBuffer ( receiver ) { getBinaryPromise ( ) . then ( function ( binary ) { return WebAssembly . instantiate ( binary , info ) } ) . then ( receiver , function ( reason ) { err ( " failed to asynchronously prepare wasm : " + reason ) ; abort ( reason ) } ) } if ( ! Module [ " wasmBinary " ] & & typeof WebAssembly . instantiateStreaming = = = " function " & & ! isDataURI ( wasmBinaryFile ) & & typeof fetch = = = " function " ) { WebAssembly . instantiateStreaming ( fetch ( wasmBinaryFile , { credentials : " same - origin " } ) , info ) . then ( receiveInstantiatedSource , function ( reason ) { err ( " wasm streaming compile failed : " + reason ) ; err ( " falling back to ArrayBuffer instantiation " ) ; instantiateArrayBuffer ( receiveInstantiatedSource ) } ) } else { instantiateArrayBuffer ( receiveInstantiatedSource ) } return { } } Module [ " asm " ] = function ( global , env , providedBuffer ) { env [ " memory " ] = wasmMemory ; env [ " table " ] = wasmTable = new WebAssembly . Table ( { " initial " : 2124 , " maximum " : 2124 , " element " : " anyfunc " } ) ; env [ " __memory_base " ] = 1024 ; env [ " __table_base " ] = 0 ; var exports = createWasm ( env ) ; return exports } ; __ATINIT__ . push ( { func : function ( ) { globalCtors ( ) } } ) ; var tempDoublePtr = 1324144 ; function ___atomic_compare_exchange_8 ( ptr , expected , desiredl , desiredh , weak , success_memmodel , failure_memmodel ) { var pl = HEAP32 [ ptr > > 2 ] ; var ph = HEAP32 [ ptr + 4 > > 2 ] ; var el = HEAP32 [ expected > > 2 ] ; var eh = HEAP32 [ expected + 4 > > 2 ] ; if ( pl = = = el & & ph = = = eh ) { HEAP32 [ ptr > > 2 ] = desiredl ; HEAP32 [ ptr + 4 > > 2 ] = desiredh ; return 1 } else { HEAP32 [ expected > > 2 ] = pl ; HEAP32 [ expected + 4 > > 2 ] = ph ; return 0 } } function __ZSt18uncaught_exceptionv ( ) { return ! ! __ZSt18uncaught_exceptionv . uncaught_exception } function ___cxa_free_exception ( ptr ) { try { return _free ( ptr ) } catch ( e ) { } } var EXCEPTIONS = { last : 0 , caught : [ ] , infos : { } , deAdjust : function ( adjusted ) { if ( ! adjusted | | EXCEPTIONS . infos [ adjusted ] ) return adjusted ; for ( var key in EXCEPTIONS . infos ) { var ptr = + key ; var adj = EXCEPTIONS . infos [ ptr ] . adjusted ; var len = adj . length ; for ( var i = 0 ; i < len ; i + + ) { if ( adj [ i ] = = = adjusted ) { return ptr } } } return adjusted } , addRef : function ( ptr ) { if ( ! ptr ) return ; var info = EXCEPTIONS . infos [ ptr ] ; info . refcount + + } , decRef : function ( ptr ) { if ( ! ptr ) return ; var info = EXCEPTIONS . infos [ ptr ] ; assert ( info . refcount > 0 ) ; info . refcount - - ; if ( info . refcount = = = 0 & & ! info . rethrown ) { if ( info . destructor ) { Module [ " dynCall_vi " ] ( info . destructor , ptr ) } delete EXCEPTIONS . infos [ ptr ] ; ___cxa_free_exception ( ptr ) } } , clearRef : function ( ptr ) { if ( ! ptr ) return ; var info = EXCEPTIONS . infos [ ptr ] ; info . refcount = 0 } } ; function ___cxa_begin_catch ( ptr ) { var info = EXCEPTIONS . infos [ ptr ] ; if ( info & & ! info . caught ) { info . caught = true ; __ZSt18uncaught_exceptionv . uncaught_exception - - } if ( info ) info . rethrown = false ; EXCEPTIONS . caught . push ( ptr ) ; EXCEPTIONS . addRef ( EXCEPTIONS . deAdjust ( ptr ) ) ; return ptr } function ___cxa_pure_virtual ( ) { ABORT = true ; throw " Pure virtual function called ! " } function ___resumeException ( ptr ) { if ( ! EXCEPTIONS . last ) { EXCEPTIONS . last = ptr } throw ptr } function ___cxa_find_matching_catch ( ) { var thrown = EXCEPTIONS . last ; if ( ! thrown ) { return ( setTempRet0 ( 0 ) , 0 ) | 0 } var info = EXCEPTIONS . infos [ thrown ] ; var throwntype = info . type ; if ( ! throwntype ) { return ( setTempRet0 ( 0 ) , thrown ) | 0 } var typeArray = Array . prototype . slice . call ( arguments ) ; var pointer = Module [ " ___cxa_is_pointer_type " ] ( throwntype ) ; if ( ! ___cxa_find_matching_catch . buffer ) ___cxa_find_matching_catch . buffer = _malloc ( 4 ) ; HEAP32 [ ___cxa_find_matching_catch . buffer > > 2 ] = thrown ; thrown = ___cxa_find_matching_catch . buffer ; for ( var i = 0 ; i < typeArray . length ; i + + ) { if ( typeArray [ i ] & & Module [ " ___cxa_can_catch " ] ( typeArray [ i ] , throwntype , thrown ) ) { thrown = HEAP32 [ thrown > > 2 ] ; info . adjusted . push ( thrown ) ; return ( setTempRet0 ( typeArray [ i ] ) , thrown ) | 0 } } thrown = HEAP32 [ thrown > > 2 ] ; return ( setTempRet0 ( throwntype ) , thrown ) | 0 } function ___gxx_personality_v0 ( ) { } function ___setErrNo ( value ) { if ( Module [ " ___errno_location " ] ) HEAP32 [ Module [ " ___errno_location " ] ( ) > > 2 ] = value ; return value } var PATH = { splitPath : function ( filename ) { var splitPathRe = / ^ ( \ / ? | ) ( [ \ s \ S ] * ? ) ( ( ? : \ . { 1 , 2 } | [ ^ \ / ] + ? | ) ( \ . [ ^ . \ / ] * | ) ) ( ? : [ \ / ] * ) $ / ; return splitPathRe . exec ( filename ) . slice ( 1 ) } , normalizeArray : function ( parts , allowAboveRoot ) { var up = 0 ; for ( var i = parts . length - 1 ; i > = 0 ; i - - ) { var last = parts [ i ] ; if ( last = = = " . " ) { parts . splice ( i , 1 ) } else if ( last = = = " . . " ) { parts . splice ( i , 1 ) ; up + + } else if ( up ) { parts . splice ( i , 1 ) ; up - - } } if ( allowAboveRoot ) { for ( ; up ; up - - ) { parts . unshift ( " . . " ) } } return parts } , normalize : function ( path ) { var isAbsolute = path . charAt ( 0 ) = = = " / " , trailingSlash = path . substr ( - 1 ) = = = " / " ; path = PATH . normalizeArray ( path . split ( " / " ) . filter ( function ( p ) { return ! ! p } ) , ! isAbsolute ) . join ( " / " ) ; if ( ! path & & ! isAbsolute ) { path = " . " } if ( path & & trailingSlash ) { path + = " / " } return ( isAbsolute ? " / " : " " ) + path } , dirname : function ( path ) { var result = PATH . splitPath ( path ) , root = result [ 0 ] , dir = result [ 1 ] ; if ( ! root & & ! dir ) { return " . " } if ( dir ) { dir = dir . substr ( 0 , dir . length - 1 ) } return root + dir } , basename : function ( path ) { if ( path = = = " / " ) return " / " ; var lastSlash = path . lastIndexOf ( " / " ) ; if ( lastSlash = = = - 1 ) return path ; return path . substr ( lastSlash + 1 ) } , extname : function ( path ) { return PATH . splitPath ( path ) [ 3 ] } , join : function ( ) { var paths = Array . prototype . slice . call ( arguments , 0 ) ; return PATH . normalize ( paths . join ( " / " ) ) } , join2 : function ( l , r ) { return PATH . normalize ( l + " / " + r ) } , resolve : function ( ) { var resolvedPath = " " , resolvedAbsolute = false ; for ( var i = arguments . length - 1 ; i > = - 1 & & ! resolvedAbsolute ; i - - ) { var path = i > = 0 ? arguments [ i ] : FS . cwd ( ) ; if ( typeof path ! = = " string " ) { throw new TypeError ( " Arguments to path . resolve must be strings " ) } else if ( ! path ) { return " " } resolvedPath = path + " / " + resolvedPath ; resolvedAbsolute = path . charAt ( 0 ) = = = " / " } resolvedPath = PATH . normalizeArray ( resolvedPath . split ( " / " ) . filter ( function ( p ) { return ! ! p } ) , ! resolvedAbsolute ) . join ( " / " ) ; return ( resolvedAbsolute ? " / " : " " ) + resolvedPath | | " . " } , relative : function ( from , to ) { from = PATH . resolve ( from ) . substr ( 1 ) ; to = PATH . resolve ( to ) . substr ( 1 ) ; function trim ( arr ) { var start = 0 ; for ( ; start < arr . length ; start + + ) { if ( arr [ start ] ! = = " " ) break } var end = arr . length - 1 ; for ( ; end > = 0 ; end - - ) { if ( arr [ end ] ! = = " " ) break } if ( start > end ) return [ ] ; return arr . slice ( start , end - start + 1 ) } var fromParts = trim ( from . split ( " / " ) ) ; var toParts = trim ( to . split ( " / " ) ) ; var length = Math . min ( fromParts . length , toParts . length ) ; var samePartsLength = length ; for ( var i = 0 ; i < length ; i + + ) { if ( fromParts [ i ] ! = = toParts [ i ] ) { samePartsLength = i ; break } } var outputParts = [ ] ; for ( var i = samePartsLength ; i < fromParts . length ; i + + ) { outputParts . push ( " . . " ) } outputParts = outputParts . concat ( toParts . slice ( samePartsLength ) ) ; return outputParts . join ( " / " ) } } ; var TTY = { ttys : [ ] , init : function ( ) { } , shutdown : function ( ) { } , register : function ( dev , ops ) { TTY . ttys [ dev ] = { input : [ ] , output : [ ] , ops : ops } ; FS . registerDevice ( dev , TTY . stream_ops ) } , stream_ops : { open : function ( stream ) { var tty = TTY . ttys [ stream . node . rdev ] ; if ( ! tty ) { throw new FS . ErrnoError ( ERRNO_CODES . ENODEV ) } stream . tty = tty ; stream . seekable = false } , close : function ( stream ) { stream . tty . ops . flush ( stream . tty ) } , flush : function ( stream ) { stream . tty . ops . flush ( stream . tty ) } , read : function ( stream , buffer , offset , length , pos ) { if ( ! stream . tty | | ! stream . tty . ops . get_char ) { throw new FS . ErrnoError ( ERRNO_CODES . ENXIO ) } var bytesRead = 0 ; for ( var i = 0 ; i < length ; i + + ) { var result ; try { result = stream . tty . ops . get_char ( stream . tty ) } catch ( e ) { throw new FS . ErrnoError ( ERRNO_CODES . EIO ) } if ( result = = = undefined & & bytesRead = = = 0 ) { throw new FS . ErrnoError ( ERRNO_CODES . EAGAIN ) } if ( result = = = null | | result = = = undefined ) break ; bytesRead + + ; buffer [ offset + i ] = result } if ( bytesRead ) { stream . node . timestamp = Date . now ( ) } return bytesRead } , write : function ( stream , buffer , offset , length , pos ) { if ( ! stream . tty | | ! stream . tty . ops . put_char ) { throw new FS . ErrnoError ( ERRNO_CODES . ENXIO ) } try { for ( var i = 0 ; i < length ; i + + ) { stream . tty . ops . put_char ( stream . tty , buffer [ offset + i ] ) } } catch ( e ) { throw new FS . ErrnoError ( ERRNO_CODES . EIO ) } if ( length ) { stream . node . timestamp = Date . now ( ) } return i } } , default_tty_ops : { get_char : function ( tty ) { if ( ! tty . input . length ) { var result = null ; if ( ENVIRONMENT_IS_NODE ) { var BUFSIZE = 256 ; var buf = new Buffer ( BUFSIZE ) ; var bytesRead = 0 ; var isPosixPlatform = process . platform ! = " win32 " ; var fd = process . stdin . fd ; if ( isPosixPlatform ) { var usingDevice = false ; try { fd = fs . openSync ( " / dev / stdin " , " r " ) ; usingDevice = true } catch ( e ) { } } try { bytesRead = fs . readSync ( fd , buf , 0 , BUFSIZE , null ) } catch ( e ) { if ( e . toString ( ) . indexOf ( " EOF " ) ! = - 1 ) bytesRead = 0 ; else throw e } if ( usingDevice ) { fs . closeSync ( fd ) } if ( bytesRead > 0 ) { result = buf . slice ( 0 , bytesRead ) . toString ( " utf - 8 " ) } else { result = null } } else if ( typeof window ! = " undefined " & & typeof window . prompt = = " function " ) { result = window . prompt ( " Input : " ) ; if ( result ! = = null ) { result + = " \ n " } } else if ( typeof readline = = " function " ) { result = readline ( ) ; if ( result ! = = null ) { result + = " \ n " } } if ( ! result ) { return null } tty . input = intArrayFromString ( result , true ) } return tty . input . shift ( ) } , put_char : function ( tty , val ) { if ( val = = = null | | val = = = 10 ) { out ( UTF8ArrayToString ( tty . output , 0 ) ) ; tty . output = [ ] } else { if ( val ! = 0 ) tty . output . push ( val ) } } , flush : function ( tty ) { if ( tty . output & & tty . output . length > 0 ) { out ( UTF8ArrayToString ( tty . output , 0 ) ) ; tty . output = [ ] } } } , default_tty1_ops : { put_char : function ( tty , val ) { if ( val = = = null | | val = = = 10 ) { err ( UTF8ArrayToString ( tty . output , 0 ) ) ; tty . output = [ ] } else { if ( val ! = 0 ) tty . output . push ( val ) } } , flush : function ( tty ) { if ( tty . output & & tty . output . length > 0 ) { err ( UTF8ArrayToString ( tty . output , 0 ) ) ; tty . output = [ ] } } } } ; var MEMFS = { ops_table : null , mount : function ( mount ) { return MEMFS . createNode ( null , " / " , 16384 | 511 , 0 ) } , createNode : function ( parent , name , mode , dev ) { if ( FS . isBlkdev ( mode ) | | FS . isFIFO ( mode ) ) { throw new FS . ErrnoError ( ERRNO_CODES . EPERM ) } if ( ! MEMFS . ops_table ) { MEMFS . ops_table = { dir : { node : { getattr : MEMFS . node_ops . getattr , setattr : MEMFS . node_ops . setattr , lookup : MEMFS . node_ops . lookup , mknod : MEMFS . node_ops . mknod , rename : MEMFS . node_ops . rename , unlink : MEMFS . node_ops . unlink , rmdir : MEMFS . node_ops . rmdir , readdir : MEMFS . node_ops . readdir , symlink : MEMFS . node_ops . symlink } , stream : { llseek : MEMFS . stream_ops . llseek } } , file : { node : { getattr : MEMFS . node_ops . getattr , setattr : MEMFS . node_ops . setattr } , stream : { llseek : MEMFS . stream_ops . llseek , read : MEMFS . stream_ops . read , write : MEMFS . stream_ops . write , allocate : MEMFS . stream_ops . allocate , mmap : MEMFS . stream_ops . mmap , msync : MEMFS . stream_ops . msync } } , link : { node : { getattr : MEMFS . node_ops . getattr , setattr : MEMFS . node_ops . setattr , readlink : MEMFS . node_ops . readlink } , stream : { } } , chrdev : { node : { getattr : MEMFS . node_ops . getattr , setattr : MEMFS . node_ops . setattr } , stream : FS . chrdev_stream_ops } } } var node = FS . createNode ( parent , name , mode , dev ) ; if ( FS . isDir ( node . mode ) ) { node . node_ops = MEMFS . ops_table . dir . node ; node . stream_ops = MEMFS . ops_table . dir . stream ; node . contents = { } } else if ( FS . isFile ( node . mode ) ) { node . node_ops = MEMFS . ops_table . file . node ; node . stream_ops = MEMFS . ops_table . file . stream ; node . usedBytes = 0 ; node . contents = null } else if ( FS . isLink ( node . mode ) ) { node . node_ops = MEMFS . ops_table . link . node ; node . stream_ops = MEMFS . ops_table . link . stream } else if ( FS . isChrdev ( node . mode ) ) { node . node_ops = MEMFS . ops_table . chrdev . node ; node . stream_ops = MEMFS . ops_table . chrdev . stream } node . timestamp = Date . now ( ) ; if ( parent ) { parent . contents [ name ] = node } return node } , getFileDataAsRegularArray : function ( node ) { if ( node . contents & & node . contents . subarray ) { var arr = [ ] ; for ( var i = 0 ; i < node . usedBytes ; + + i ) arr . push ( node . contents [ i ] ) ; return arr } return node . contents } , getFileDataAsTypedArray : function ( node ) { if ( ! node . contents ) return new Uint8Array ; if ( node . contents . subarray ) return node . contents . subarray ( 0 , node . usedBytes ) ; return new Uint8Array ( node . contents ) } , expandFileStorage : function ( node , newCapacity ) { var prevCapacity = node . contents ? node . contents . length : 0 ; if ( prevCapacity > = newCapacity ) return ; var CAPACITY_DOUBLING_MAX = 1024 * 1024 ; newCapacity = Math . max ( newCapacity , prevCapacity * ( prevCapacity < CAPACITY_DOUBLING_MAX ? 2 : 1 . 125 ) | 0 ) ; if ( prevCapacity ! = 0 ) newCapacity = Math . max ( newCapacity , 256 ) ; var oldContents = node . contents ; node . contents = new Uint8Array ( newCapacity ) ; if ( node . usedBytes > 0 ) node . contents . set ( oldContents . subarray ( 0 , node . usedBytes ) , 0 ) ; return } , resizeFileStorage : function ( node , newSize ) { if ( node . usedBytes = = newSize ) return ; if ( newSize = = 0 ) { node . contents = null ; node . usedBytes = 0 ; return } if ( ! node . contents | | node . contents . subarray ) { var oldContents = node . contents ; node . contents = new Uint8Array ( new ArrayBuffer ( newSize ) ) ; if ( oldContents ) { node . contents . set ( oldContents . subarray ( 0 , Math . min ( newSize , node . usedBytes ) ) ) } node . usedBytes = newSize ; return } if ( ! node . contents ) node . contents = [ ] ; if ( node . contents . length > newSize ) node . contents . length = newSize ; else while ( node . contents . length < newSize ) node . contents . push ( 0 ) ; node . usedBytes = newSize } , node_ops : { getattr : function ( node ) { var attr = { } ; attr . dev = FS . isChrdev ( node . mode ) ? node . id : 1 ; attr . ino = node . id ; attr . mode = node . mode ; attr . nlink = 1 ; attr . uid = 0 ; attr . gid = 0 ; attr . rdev = node . rdev ; if ( FS . isDir ( node . mode ) ) { attr . size = 4096 } else if ( FS . isFile ( node . mode ) ) { attr . size = node . usedBytes } else if ( FS . isLink ( node . mode ) ) { attr . size = node . link . length } else { attr . size = 0 } attr . atime = new Date ( node . timestamp ) ; attr . mtime = new Date ( node . timestamp ) ; attr . ctime = new Date ( node . timestamp ) ; attr . blksize = 4096 ; attr . blocks = Math . ceil ( attr . size / attr . blksize ) ; return attr } , setattr : function ( node , attr ) { if ( attr . mode ! = = undefined ) { node . mode = attr . mode } if ( attr . timestamp ! = = undefined ) { node . timestamp = attr . timestamp } if ( attr . size ! = = undefined ) { MEMFS . resizeFileStorage ( node , attr . size ) } } , lookup : function ( parent , name ) { throw FS . genericErrors [ ERRNO_CODES . ENOENT ] } , mknod : function ( parent , name , mode , dev ) { return MEMFS . createNode ( parent , name , mode , dev ) } , rename : function ( old_node , new_dir , new_name ) { if ( FS . isDir ( old_node . mode ) ) { var new_node ; try { new_node = FS . lookupNode ( new_dir , new_name ) } catch ( e ) { } if ( new_node ) { for ( var i in new_node . contents ) { throw new FS . ErrnoError ( ERRNO_CODES . ENOTEMPTY ) } } } delete old_node . parent . contents [ old_node . name ] ; old_node . name = new_name ; new_dir . contents [ new_name ] = old_node ; old_node . parent = new_dir } , unlink : function ( parent , name ) { delete parent . contents [ name ] } , rmdir : function ( parent , name ) { var node = FS . lookupNode ( parent , name ) ; for ( var i in node . contents ) { throw new FS . ErrnoError ( ERRNO_CODES . ENOTEMPTY ) } delete parent . contents [ name ] } , readdir : function ( node ) { var entries = [ " . " , " . . " ] ; for ( var key in node . contents ) { if ( ! node . contents . hasOwnProperty ( key ) ) { continue } entries . push ( key ) } return entries } , symlink : function ( parent , newname , oldpath ) { var node = MEMFS . createNode ( parent , newname , 511 | 40960 , 0 ) ; node . link = oldpath ; return node } , readlink : function ( node ) { if ( ! FS . isLink ( node . mode ) ) { throw new FS . ErrnoError ( ERRNO_CODES . EINVAL ) } return node . link } } , stream_ops : { read : function ( stream , buffer , offset , length , position ) { var contents = stream . node . contents ; if ( position > = stream . node . usedBytes ) return 0 ; var size = Math . min ( stream . node . usedBytes - position , length ) ; if ( size > 8 & & contents . subarray ) { buffer . set ( contents . subarray ( position , position + size ) , offset ) } else { for ( var i = 0 ; i < size ; i + + ) buffer [ offset + i ] = contents [ position + i ] } return size } , write : function ( stream , buffer , offset , length , position , canOwn ) { canOwn = false ; if ( ! length ) return 0 ; var node = stream . node ; node . timestamp = Date . now ( ) ; if ( buffer . subarray & & ( ! node . contents | | node . contents . subarray ) ) { if ( canOwn ) { node . contents = buffer . subarray ( offset , offset + length ) ; node . usedBytes = length ; return length } else if ( node . usedBytes = = = 0 & & position = = = 0 ) { node . contents = new Uint8Array ( buffer . subarray ( offset , offset + length ) ) ; node . usedBytes = length ; return length } else if ( position + length < = node . usedBytes ) { node . contents . set ( buffer . subarray ( offset , offset + length ) , position ) ; return length } } MEMFS . expandFileStorage ( node , position + length ) ; if ( node . contents . subarray & & buffer . subarray ) node . contents . set ( buffer . subarray ( offset , offset + length ) , position ) ; else { for ( var i = 0 ; i < length ; i + + ) { node . contents [ position + i ] = buffer [ offset + i ] } } node . usedBytes = Math . max ( node . usedBytes , position + length ) ; return length } , llseek : function ( stream , offset , whence ) { var position = offset ; if ( whence = = = 1 ) { position + = stream . position } else if ( whence = = = 2 ) { if ( FS . isFile ( stream . node . mode ) ) { position + = stream . node . usedBytes } } if ( position < 0 ) { throw new FS . ErrnoError ( ERRNO_CODES . EINVAL ) } return position } , allocate : function ( stream , offset , length ) { MEMFS . expandFileStorage ( stream . node , offset + length ) ; stream . node . usedBytes = Math . max ( stream . node . usedBytes , offset + length ) } , mmap : function ( stream , buffer , offset , length , position , prot , flags ) { if ( ! FS . isFile ( stream . node . mode ) ) { throw new FS . ErrnoError ( ERRNO_CODES . ENODEV ) } var ptr ; var allocated ; var contents = stream . node . contents ; if ( ! ( flags & 2 ) & & ( contents . buffer = = = buffer | | contents . buffer = = = buffer . buffer ) ) { allocated = false ; ptr = contents . byteOffset } else { if ( position > 0 | | position + length < stream . node . usedBytes ) { if ( contents . subarray ) { contents = contents . subarray ( position , position + length ) } else { contents = Array . prototype . slice . call ( contents , position , position + length ) } } allocated = true ; ptr = _malloc ( length ) ; if ( ! ptr ) { throw new FS . ErrnoError ( ERRNO_CODES . ENOMEM ) } buffer . set ( contents , ptr ) } return { ptr : ptr , allocated : allocated } } , msync : function ( stream , buffer , offset , length , mmapFlags ) { if ( ! FS . isFile ( stream . node . mode ) ) { throw new FS . ErrnoError ( ERRNO_CODES . ENODEV ) } if ( mmapFlags & 2 ) { return 0 } var bytesWritten = MEMFS . stream_ops . write ( stream , buffer , 0 , length , offset , false ) ; return 0 } } } ; var IDBFS = { dbs : { } , indexedDB : function ( ) { if ( typeof indexedDB ! = = " undefined " ) return indexedDB ; var ret = null ; if ( typeof window = = = " object " ) ret = window . indexedDB | | window . mozIndexedDB | | window . webkitIndexedDB | | window . msIndexedDB ; assert ( ret , " IDBFS used , but indexedDB not supported " ) ; return ret } , DB_VERSION : 21 , DB_STORE_NAME : " FILE_DATA " , mount : function ( mount ) { return MEMFS . mount . apply ( null , arguments ) } , syncfs : function ( mount , populate , callback ) { IDBFS . getLocalSet ( mount , function ( err , local ) { if ( err ) return callback ( err ) ; IDBFS . getRemoteSet ( mount , function ( err , remote ) { if ( err ) return callback ( err ) ; var src = populate ? remote : local ; var dst = populate ? local : remote ; IDBFS . reconcile ( src , dst , callback ) } ) } ) } , getDB : function ( name , callback ) { var db = IDBFS . dbs [ name ] ; if ( db ) { return callback ( null , db ) } var req ; try { req = IDBFS . indexedDB ( ) . open ( name , IDBFS . DB_VERSION ) } catch ( e ) { return callback ( e ) } if ( ! req ) { return callback ( " Unable to connect to IndexedDB " ) } req . onupgradeneeded = function ( e ) { var db = e . target . result ; var transaction = e . target . transaction ; var fileStore ; if ( db . objectStoreNames . contains ( IDBFS . DB_STORE_NAME ) ) { fileStore = transaction . objectStore ( IDBFS . DB_STORE_NAME ) } else { fileStore = db . createObjectStore ( IDBFS . DB_STORE_NAME ) } if ( ! fileStore . indexNames . contains ( " timestamp " ) ) { fileStore . createIndex ( " timestamp " , " timestamp " , { unique : false } ) } } ; req . onsuccess = function ( ) { db = req . result ; IDBFS . dbs [ name ] = db ; callback ( null , db ) } ; req . onerror = function ( e ) { callback ( this . error ) ; e . preventDefault ( ) } } , getLocalSet : function ( mount , callback ) { var entries = { } ; function isRealDir ( p ) { return p ! = = " . " & & p ! = = " . . " } function toAbsolute ( root ) { return function ( p ) { return PATH . join2 ( root , p ) } } var check = FS . readdir ( mount . mountpoint ) . filter ( isRealDir ) . map ( toAbsolute ( mount . mountpoint ) ) ; while ( check . length ) { var path = check . pop ( ) ; var stat ; try { stat = FS . stat ( path ) } catch ( e ) { return callback ( e ) } if ( FS . isDir ( stat . mode ) ) { check . push . apply ( check , FS . readdir ( path ) . filter ( isRealDir ) . map ( toAbsolute ( path ) ) ) } entries [ path ] = { timestamp : stat . mtime } } return callback ( null , { type : " local " , entries : entries } ) } , getRemoteSet : function ( mount , callback ) { var entries = { } ; IDBFS . getDB ( mount . mountpoint , function ( err , db ) { if ( err ) return callback ( err ) ; try { var transaction = db . transaction ( [ IDBFS . DB_STORE_NAME ] , " readonly " ) ; transaction . onerror = function ( e ) { callback ( this . error ) ; e . preventDefault ( ) } ; var store = transaction . objectStore ( IDBFS . DB_STORE_NAME ) ; var index = store . index ( " timestamp " ) ; index . openKeyCursor ( ) . onsuccess = function ( event ) { var cursor = event . target . result ; if ( ! cursor ) { return callback ( null , { type : " remote " , db : db , entries : entries } ) } entries [ cursor . primaryKey ] = { timestamp : cursor . key } ; cursor . continue ( ) } } catch ( e ) { return callback ( e ) } } ) } , loadLocalEntry : function ( path , callback ) { var stat , node ; try { var lookup = FS . lookupPath ( path ) ; node = lookup . node ; stat = FS . stat ( path ) } catch ( e ) { return callback ( e ) } if ( FS . isDir ( stat . mode ) ) { return callback ( null , { timestamp : stat . mtime , mode : stat . mode } ) } else if ( FS . isFile ( stat . mode ) ) { node . contents = MEMFS . getFileDataAsTypedArray ( node ) ; return callback ( null , { timestamp : stat . mtime , mode : stat . mode , contents : node . contents } ) } else { return callback ( new Error ( " node type not supported " ) ) } } , storeLocalEntry : function ( path , entry , callback ) { try { if ( FS . isDir ( entry . mode ) ) { FS . mkdir ( path , entry . mode ) } else if ( FS . isFile ( entry . mode ) ) { FS . writeFile ( path , entry . contents , { canOwn : true } ) } else { return callback ( new Error ( " node type not supported " ) ) } FS . chmod ( path , entry . mode ) ; FS . utime ( path , entry . timestamp , entry . timestamp ) } catch ( e ) { return callback ( e ) } callback ( null ) } , removeLocalEntry : function ( path , callback ) { try { var lookup = FS . lookupPath ( path ) ; var stat = FS . stat ( path ) ; if ( FS . isDir ( stat . mode ) ) { FS . rmdir ( path ) } else if ( FS . isFile ( stat . mode ) ) { FS . unlink ( path ) } } catch ( e ) { return callback ( e ) } callback ( null ) } , loadRemoteEntry : function ( store , path , callback ) { var req = store . get ( path ) ; req . onsuccess = function ( event ) { callback ( null , event . target . result ) } ; req . onerror = function ( e ) { callback ( this . error ) ; e . preventDefault ( ) } } , storeRemoteEntry : function ( store , path , entry , callback ) { var req = store . put ( entry , path ) ; req . onsuccess = function ( ) { callback ( null ) } ; req . onerror = function ( e ) { callback ( this . error ) ; e . preventDefault ( ) } } , removeRemoteEntry : function ( store , path , callback ) { var req = store . delete ( path ) ; req . onsuccess = function ( ) { callback ( null ) } ; req . onerror = function ( e ) { callback ( this . error ) ; e . preventDefault ( ) } } , reconcile : function ( src , dst , callback ) { var total = 0 ; var create = [ ] ; Object . keys ( src . entries ) . forEach ( function ( key ) { var e = src . entries [ key ] ; var e2 = dst . entries [ key ] ; if ( ! e2 | | e . timestamp > e2 . timestamp ) { create . push ( key ) ; total + + } } ) ; var remove = [ ] ; Object . keys ( dst . entries ) . forEach ( function ( key ) { var e = dst . entries [ key ] ; var e2 = src . entries [ key ] ; if ( ! e2 ) { remove . push ( key ) ; total + + } } ) ; if ( ! total ) { return callback ( null ) } var errored = false ; var completed = 0 ; var db = src . type = = = " remote " ? src . db : dst . db ; var transaction = db . transaction ( [ IDBFS . DB_STORE_NAME ] , " readwrite " ) ; var store = transaction . objectStore ( IDBFS . DB_STORE_NAME ) ; function done ( err ) { if ( err ) { if ( ! done . errored ) { done . errored = true ; return callback ( err ) } return } if ( + + completed > = total ) { return callback ( null ) } } transaction . onerror = function ( e ) { done ( this . error ) ; e . preventDefault ( ) } ; create . sort ( ) . forEach ( function ( path ) { if ( dst . type = = = " local " ) { IDBFS . loadRemoteEntry ( store , path , function ( err , entry ) { if ( err ) return done ( err ) ; IDBFS . storeLocalEntry ( path , entry , done ) } ) } else { IDBFS . loadLocalEntry ( path , function ( err , entry ) { if ( err ) return done ( err ) ; IDBFS . storeRemoteEntry ( store , path , entry , done ) } ) } } ) ; remove . sort ( ) . reverse ( ) . forEach ( function ( path ) { if ( dst . type = = = " local " ) { IDBFS . removeLocalEntry ( path , done ) } else { IDBFS . removeRemoteEntry ( store , path , done ) } } ) } } ; var NODEFS = { isWindows : false , staticInit : function ( ) { NODEFS . isWindows = ! ! process . platform . match ( / ^ win / ) ; var flags = process [ " binding " ] ( " constants " ) ; if ( flags [ " fs " ] ) { flags = flags [ " fs " ] } NODEFS . flagsForNodeMap = { 1024 : flags [ " O_APPEND " ] , 64 : flags [ " O_CREAT " ] , 128 : flags [ " O_EXCL " ] , 0 : flags [ " O_RDONLY " ] , 2 : flags [ " O_RDWR " ] , 4096 : flags [ " O_SYNC " ] , 512 : flags [ " O_TRUNC " ] , 1 : flags [ " O_WRONLY " ] } } , bufferFrom : function ( arrayBuffer ) { return Buffer . alloc ? Buffer . from ( arrayBuffer ) : new Buffer ( arrayBuffer ) } , mount : function ( mount ) { assert ( ENVIRONMENT_IS_NODE ) ; return NODEFS . createNode ( null , " / " , NODEFS . getMode ( mount . opts . root ) , 0 ) } , createNode : function ( parent , name , mode , dev ) { if ( ! FS . isDir ( mode ) & & ! FS . isFile ( mode ) & & ! FS . isLink ( mode ) ) { throw new FS . ErrnoError ( ERRNO_CODES . EINVAL ) } var node = FS . createNode ( parent , name , mode ) ; node . node_ops = NODEFS . node_ops ; node . stream_ops = NODEFS . stream_ops ; return node } , getMode : function ( path ) { var stat ; try { stat = fs . lstatSync ( path ) ; if ( NODEFS . isWindows ) { stat . mode = stat . mode | ( stat . mode & 292 ) > > 2 } } catch ( e ) { if ( ! e . code ) throw e ; throw new FS . ErrnoError ( ERRNO_CODES [ e . code ] ) } return stat . mode } , realPath : function ( node ) { var parts = [ ] ; while ( node . parent ! = = node ) { parts . push ( node . name ) ; node = node . parent } parts . push ( node . mount . opts . root ) ; parts . reverse ( ) ; return PATH . join . apply ( null , parts ) } , flagsForNode : function ( flags ) { flags & = ~ 2097152 ; flags & = ~ 2048 ; flags & = ~ 32768 ; flags & = ~ 524288 ; var newFlags = 0 ; for ( var k in NODEFS . flagsForNodeMap ) { if ( flags & k ) { newFlags | = NODEFS . flagsForNodeMap [ k ] ; flags ^ = k } } if ( ! flags ) { return newFlags } else { throw new FS . ErrnoError ( ERRNO_CODES . EINVAL ) } } , node_ops : { getattr : function ( node ) { var path = NODEFS . realPath ( node ) ; var stat ; try { stat = fs . lstatSync ( path ) } catch ( e ) { if ( ! e . code ) throw e ; throw new FS . ErrnoError ( ERRNO_CODES [ e . code ] ) } if ( NODEFS . isWindows & & ! stat . blksize ) { stat . blksize = 4096 } if ( NODEFS . isWindows & & ! stat . blocks ) { stat . blocks = ( stat . size + stat . blksize - 1 ) / stat . blksize | 0 } return { dev : stat . dev , ino : stat . ino , mode : stat . mode , nlink : stat . nlink , uid : stat . uid , gid : stat . gid , rdev : stat . rdev , size : stat . size , atime : stat . atime , mtime : stat . mtime , ctime : stat . ctime , blksize : stat . blksize , blocks : stat . blocks } } , setattr : function ( node , attr ) { var path = NODEFS . realPath ( node ) ; try { if ( attr . mode ! = = undefined ) { fs . chmodSync ( path , attr . mode ) ; node . mode = attr . mode } if ( attr . timestamp ! = = undefined ) { var date = new Date ( attr . timestamp ) ; fs . utimesSync ( path , date , date ) } if ( attr . size ! = = undefined ) { fs . truncateSync ( path , attr . size ) } } catch ( e ) { if ( ! e . code ) throw e ; throw new FS . ErrnoError ( ERRNO_CODES [ e . code ] ) } } , lookup : function ( parent , name ) { var path = PATH . join2 ( NODEFS . realPath ( parent ) , name ) ; var mode = NODEFS . getMode ( path ) ; return NODEFS . createNode ( parent , name , mode ) } , mknod : function ( parent , name , mode , dev ) { var node = NODEFS . createNode ( parent , name , mode , dev ) ; var path = NODEFS . realPath ( node ) ; try { if ( FS . isDir ( node . mode ) ) { fs . mkdirSync ( path , node . mode ) } else { fs . writeFileSync ( path , " " , { mode : node . mode } ) } } catch ( e ) { if ( ! e . code ) throw e ; throw new FS . ErrnoError ( ERRNO_CODES [ e . code ] ) } return node } , rename : function ( oldNode , newDir , newName ) { var oldPath = NODEFS . realPath ( oldNode ) ; var newPath = PATH . join2 ( NODEFS . realPath ( newDir ) , newName ) ; try { fs . renameSync ( oldPath , newPath ) } catch ( e ) { if ( ! e . code ) throw e ; throw new FS . ErrnoError ( ERRNO_CODES [ e . code ] ) } } , unlink : function ( parent , name ) { var path = PATH . join2 ( NODEFS . realPath ( parent ) , name ) ; try { fs . unlinkSync ( path ) } catch ( e ) { if ( ! e . code ) throw e ; throw new FS . ErrnoError ( ERRNO_CODES [ e . code ] ) } } , rmdir : function ( parent , name ) { var path = PATH . join2 ( NODEFS . realPath ( parent ) , name ) ; try { fs . rmdirSync ( path ) } catch ( e ) { if ( ! e . code ) throw e ; throw new FS . ErrnoError ( ERRNO_CODES [ e . code ] ) } } , readdir : function ( node ) { var path = NODEFS . realPath ( node ) ; try { return fs . readdirSync ( path ) } catch ( e ) { if ( ! e . code ) throw e ; throw new FS . ErrnoError ( ERRNO_CODES [ e . code ] ) } } , symlink : function ( parent , newName , oldPath ) { var newPath = PATH . join2 ( NODEFS . realPath ( parent ) , newName ) ; try { fs . symlinkSync ( oldPath , newPath ) } catch ( e ) { if ( ! e . code ) throw e ; throw new FS . ErrnoError ( ERRNO_CODES [ e . code ] ) } } , readlink : function ( node ) { var path = NODEFS . realPath ( node ) ; try { path = fs . readlinkSync ( path ) ; path = NODEJS_PATH . relative ( NODEJS_PATH . resolve ( node . mount . opts . root ) , path ) ; return path } catch ( e ) { if ( ! e . code ) throw e ; throw new FS . ErrnoError ( ERRNO_CODES [ e . code ] ) } } } , stream_ops : { open : function ( stream ) { var path = NODEFS . realPath ( stream . node ) ; try { if ( FS . isFile ( stream . node . mode ) ) { stream . nfd = fs . openSync ( path , NODEFS . flagsForNode ( stream . flags ) ) } } catch ( e ) { if ( ! e . code ) throw e ; throw new FS . ErrnoError ( ERRNO_CODES [ e . code ] ) } } , close : function ( stream ) { try { if ( FS . isFile ( stream . node . mode ) & & stream . nfd ) { fs . closeSync ( stream . nfd ) } } catch ( e ) { if ( ! e . code ) throw e ; throw new FS . ErrnoError ( ERRNO_CODES [ e . code ] ) } } , read : function ( stream , buffer , offset , length , position ) { if ( length = = = 0 ) return 0 ; try { return fs . readSync ( stream . nfd , NODEFS . bufferFrom ( buffer . buffer ) , offset , length , position ) } catch ( e ) { throw new FS . ErrnoError ( ERRNO_CODES [ e . code ] ) } } , write : function ( stream , buffer , offset , length , position ) { try { return fs . writeSync ( stream . nfd , NODEFS . bufferFrom ( buffer . buffer ) , offset , length , position ) } catch ( e ) { throw new FS . ErrnoError ( ERRNO_CODES [ e . code ] ) } } , llseek : function ( stream , offset , whence ) { var position = offset ; if ( whence = = = 1 ) { position + = stream . position } else if ( whence = = = 2 ) { if ( FS . isFile ( stream . node . mode ) ) { try { var stat = fs . fstatSync ( stream . nfd ) ; position + = stat . size } catch ( e ) { throw new FS . ErrnoError ( ERRNO_CODES [ e . code ] ) } } } if ( position < 0 ) { throw new FS . ErrnoError ( ERRNO_CODES . EINVAL ) } return position } } } ; var WORKERFS = { DIR_MODE : 16895 , FILE_MODE : 33279 , reader : null , mount : function ( mount ) { assert ( ENVIRONMENT_IS_WORKER ) ; if ( ! WORKERFS . reader ) WORKERFS . reader = new FileReaderSync ; var root = WORKERFS . createNode ( null , " / " , WORKERFS . DIR_MODE , 0 ) ; var createdParents = { } ; function ensureParent ( path ) { var parts = path . split ( " / " ) ; var parent = root ; for ( var i = 0 ; i < parts . length - 1 ; i + + ) { var curr = parts . slice ( 0 , i + 1 ) . join ( " / " ) ; if ( ! createdParents [ curr ] ) { createdParents [ curr ] = WORKERFS . createNode ( parent , parts [ i ] , WORKERFS . DIR_MODE , 0 ) } parent = createdParents [ curr ] } return parent } function base ( path ) { var parts = path . split ( " / " ) ; return parts [ parts . length - 1 ] } Array . prototype . forEach . call ( mount . opts [ " files " ] | | [ ] , function ( file ) { WORKERFS . createNode ( ensureParent ( file . name ) , base ( file . name ) , WORKERFS . FILE_MODE , 0 , file , file . lastModifiedDate ) } ) ; ( mount . opts [ " blobs " ] | | [ ] ) . forEach ( function ( obj ) { WORKERFS . createNode ( ensureParent ( obj [ " name " ] ) , base ( obj [ " name " ] ) , WORKERFS . FILE_MODE , 0 , obj [ " data " ] ) } ) ; ( mount . opts [ " packages " ] | | [ ] ) . forEach ( function ( pack ) { pack [ " metadata " ] . files . forEach ( function ( file ) { var name = file . filename . substr ( 1 ) ; WORKERFS . createNode ( ensureParent ( name ) , base ( name ) , WORKERFS . FILE_MODE , 0 , pack [ " blob " ] . slice ( file . start , file . end ) ) } ) } ) ; return root } , createNode : function ( parent , name , mode , dev , contents , mtime ) { var node = FS . createNode ( parent , name , mode ) ; node . mode = mode ; node . node_ops = WORKERFS . node_ops ; node . stream_ops = WORKERFS . stream_ops ; node . timestamp = ( mtime | | new Date ) . getTime ( ) ; assert ( WORKERFS . FILE_MODE ! = = WORKERFS . DIR_MODE ) ; if ( mode = = = WORKERFS . FILE_MODE ) { node . size = contents . size ; node . contents = contents } else { node . size = 4096 ; node . contents = { } } if ( parent ) { parent . contents [ name ] = node } return node } , node_ops : { getattr : function ( node ) { return { dev : 1 , ino : undefined , mode : node . mode , nlink : 1 , uid : 0 , gid : 0 , rdev : undefined , size : node . size , atime : new Date ( node . timestamp ) , mtime : new Date ( node . timestamp ) , ctime : new Date ( node . timestamp ) , blksize : 4096 , blocks : Math . ceil ( node . size / 4096 ) } } , setattr : function ( node , attr ) { if ( attr . mode ! = = undefined ) { node . mode = attr . mode } if ( attr . timestamp ! = = undefined ) { node . timestamp = attr . timestamp } } , lookup : function ( parent , name ) { throw new FS . ErrnoError ( ERRNO_CODES . ENOENT ) } , mknod : function ( parent , name , mode , dev ) { throw new FS . ErrnoError ( ERRNO_CODES . EPERM ) } , rename : function ( oldNode , newDir , newName ) { throw new FS . ErrnoError ( ERRNO_CODES . EPERM ) } , unlink : function ( parent , name ) { throw new FS . ErrnoError ( ERRNO_CODES . EPERM ) } , rmdir : function ( parent , name ) { throw new FS . ErrnoError ( ERRNO_CODES . EPERM ) } , readdir : function ( node ) { var entries = [ " . " , " . . " ] ; for ( var key in node . contents ) { if ( ! node . contents . hasOwnProperty ( key ) ) { continue } entries . push ( key ) } return entries } , symlink : function ( parent , newName , oldPath ) { throw new FS . ErrnoError ( ERRNO_CODES . EPERM ) } , readlink : function ( node ) { throw new FS . ErrnoError ( ERRNO_CODES . EPERM ) } } , stream_ops : { read : function ( stream , buffer , offset , length , position ) { if ( position > = stream . node . size ) return 0 ; var chunk = stream . node . contents . slice ( position , position + length ) ; var ab = WORKERFS . reader . readAsArrayBuffer ( chunk ) ; buffer . set ( new Uint8Array ( ab ) , offset ) ; return chunk . size } , write : function ( stream , buffer , offset , length , position ) { throw new FS . ErrnoError ( ERRNO_CODES . EIO ) } , llseek : function ( stream , offset , whence ) { var position = offset ; if ( whence = = = 1 ) { position + = stream . position } else if ( whence = = = 2 ) { if ( FS . isFile ( stream . node . mode ) ) { position + = stream . node . size } } if ( position < 0 ) { throw new FS . ErrnoError ( ERRNO_CODES . EINVAL ) } return position } } } ; var FS = { root : null , mounts : [ ] , devices : { } , streams : [ ] , nextInode : 1 , nameTable : null , currentPath : " / " , initialized : false , ignorePermissions : true , trackingDelegate : { } , tracking : { openFlags : { READ : 1 , WRITE : 2 } } , ErrnoError : null , genericErrors : { } , filesystems : null , syncFSRequests : 0 , handleFSError : function ( e ) { if ( ! ( e instanceof FS . ErrnoError ) ) throw e + " : " + stackTrace ( ) ; return ___setErrNo ( e . errno ) } , lookupPath : function ( path , opts ) { path = PATH . resolve ( FS . cwd ( ) , path ) ; opts = opts | | { } ; if ( ! path ) return { path : " " , node : null } ; var defaults = { follow_mount : true , recurse_count : 0 } ; for ( var key in defaults ) { if ( opts [ key ] = = = undefined ) { opts [ key ] = defaults [ key ] } } if ( opts . recurse_count > 8 ) { throw new FS . ErrnoError ( 40 ) } var parts = PATH . normalizeArray ( path . split ( " / " ) . filter ( function ( p ) { return ! ! p } ) , false ) ; var current = FS . root ; var current_path = " / " ; for ( var i = 0 ; i < parts . length ; i + + ) { var islast = i = = = parts . length - 1 ; if ( islast & & opts . parent ) { break } current = FS . lookupNode ( current , parts [ i ] ) ; current_path = PATH . join2 ( current_path , parts [ i ] ) ; if ( FS . isMountpoint ( current ) ) { if ( ! islast | | islast & & opts . follow_mount ) { current = current . mounted . root } } if ( ! islast | | opts . follow ) { var count = 0 ; while ( FS . isLink ( current . mode ) ) { var link = FS . readlink ( current_path ) ; current_path = PATH . resolve ( PATH . dirname ( current_path ) , link ) ; var lookup = FS . lookupPath ( current_path , { recurse_count : opts . recurse_count } ) ; current = lookup . node ; if ( count + + > 40 ) { throw new FS . ErrnoError ( 40 ) } } } } return { path : current_path , node : current } } , getPath : function ( node ) { var path ; while ( true ) { if ( FS . isRoot ( node ) ) { var mount = node . mount . mountpoint ; if ( ! path ) return mount ; return mount [ mount . length - 1 ] ! = = " / " ? mount + " / " + path : mount + path } path = path ? node . name + " / " + path : node . name ; node = node . parent } } , hashName : function ( parentid , name ) { var hash = 0 ; for ( var i = 0 ; i < name . length ; i + + ) { hash = ( hash < < 5 ) - hash + name . charCodeAt ( i ) | 0 } return ( parentid + hash > > > 0 ) % FS . nameTable . length } , hashAddNode : function ( node ) { var hash = FS . hashName ( node . parent . id , node . name ) ; node . name_next = FS . nameTable [ hash ] ; FS . nameTable [ hash ] = node } , hashRemoveNode : function ( node ) { var hash = FS . hashName ( node . parent . id , node . name ) ; if ( FS . nameTable [ hash ] = = = node ) { FS . nameTable [ hash ] = node . name_next } else { var current = FS . nameTable [ hash ] ; while ( current ) { if ( current . name_next = = = node ) { current . name_next = node . name_next ; break } current = current . name_next } } } , lookupNode : function ( parent , name ) { var err = FS . mayLookup ( parent ) ; if ( err ) { throw new FS . ErrnoError ( err , parent ) } var hash = FS . hashName ( parent . id , name ) ; for ( var node = FS . nameTable [ hash ] ; node ; node = node . name_next ) { var nodeName = node . name ; if ( node . parent . id = = = parent . id & & nodeName = = = name ) { return node } } return FS . lookup ( parent , name ) } , createNode : function ( parent , name , mode , rdev ) { if ( ! FS . FSNode ) { FS . FSNode = function ( parent , name , mode , rdev ) { if ( ! parent ) { parent = this } this . parent = parent ; this . mount = parent . mount ; this . mounted = null ; this . id = FS . nextInode + + ; this . name = name ; this . mode = mode ; this . node_ops = { } ; this . stream_ops = { } ; this . rdev = rdev } ; FS . FSNode . prototype = { } ; var readMode = 292 | 73 ; var writeMode = 146 ; Object . defineProperties ( FS . FSNode . prototype , { read : { get : function ( ) { return ( this . mode & readMode ) = = = readMode } , set : function ( val ) { val ? this . mode | = readMode : this . mode & = ~ readMode } } , write : { get : function ( ) { return ( this . mode & writeMode ) = = = writeMode } , set : function ( val ) { val ? this . mode | = writeMode : this . mode & = ~ writeMode } } , isFolder : { get : function ( ) { return FS . isDir ( this . mode ) } } , isDevice : { get : function ( ) { return FS . isChrdev ( this . mode ) } } } ) } var node = new FS . FSNode ( parent , name , mode , rdev ) ; FS . hashAddNode ( node ) ; return node } , destroyNode : function ( node ) { FS . hashRemoveNode ( node ) } , isRoot : function ( node ) { return node = = = node . parent } , isMountpoint : function ( node ) { return ! ! node . mounted } , isFile : function ( mode ) { return ( mode & 61440 ) = = = 32768 } , isDir : function ( mode ) { return ( mode & 61440 ) = = = 16384 } , isLink : function ( mode ) { return ( mode & 61440 ) = = = 40960 } , isChrdev : function ( mode ) { return ( mode & 61440 ) = = = 8192 } , isBlkdev : function ( mode ) { return ( mode & 61440 ) = = = 24576 } , isFIFO : function ( mode ) { return ( mode & 61440 ) = = = 4096 } , isSocket : function ( mode ) { return ( mode & 49152 ) = = = 49152 } , flagModes : { " r " : 0 , " rs " : 1052672 , " r + " : 2 , " w " : 577 , " wx " : 705 , " xw " : 705 , " w + " : 578 , " wx + " : 706 , " xw + " : 706 , " a " : 1089 , " ax " : 1217 , " xa " : 1217 , " a + " : 1090 , " ax + " : 1218 , " xa + " : 1218 } , modeStringToFlags : function ( str ) { var flags = FS . flagModes [ str ] ; if ( typeof flags = = = " undefined " ) { throw new Error ( " Unknown file open mode : " + str ) } return flags } , flagsToPermissionString : function ( flag ) { var perms = [ " r " , " w " , " rw " ] [ flag & 3 ] ; if ( flag & 512 ) { perms + = " w " } return perms } , nodePermissions : function ( node , perms ) { if ( FS . ignorePermissions ) { return 0 } if ( perms . indexOf ( " r " ) ! = = - 1 & & ! ( node . mode & 292 ) ) { return 13 } else if ( perms . indexOf ( " w " ) ! = = - 1 & & ! ( node . mode & 146 ) ) { return 13 } else if ( perms . indexOf ( " x " ) ! = = - 1 & & ! ( node . mode & 73 ) ) { return 13 } return 0 } , mayLookup : function ( dir ) { var err = FS . nodePermissions ( dir , " x " ) ; if ( err ) return err ; if ( ! dir . node_ops . lookup ) return 13 ; return 0 } , mayCreate : function ( dir , name ) { try { var node = FS . lookupNode ( dir , name ) ; return 17 } catch ( e ) { } return FS . nodePermissions ( dir , " wx " ) } , mayDelete : function ( dir , name , isdir ) { var node ; try { node = FS . lookupNode ( dir , name ) } catch ( e ) { return e . errno } var err = FS . nodePermissions ( dir , " wx " ) ; if ( err ) { return err } if ( isdir ) { if ( ! FS . isDir ( node . mode ) ) { return 20 } if ( FS . isRoot ( node ) | | FS . getPath ( node ) = = = FS . cwd ( ) ) { return 16 } } else { if ( FS . isDir ( node . mode ) ) { return 21 } } return 0 } , mayOpen : function ( node , flags ) { if ( ! node ) { return 2 } if ( FS . isLink ( node . mode ) ) { return 40 } else if ( FS . isDir ( node . mode ) ) { if ( FS . flagsToPermissionString ( flags ) ! = = " r " | | flags & 512 ) { return 21 } } return FS . nodePermissions ( node , FS . flagsToPermissionString ( flags ) ) } , MAX_OPEN_FDS : 4096 , nextfd : function ( fd_start , fd_end ) { fd_start = fd_start | | 0 ; fd_end = fd_end | | FS . MAX_OPEN_FDS ; for ( var fd = fd_start ; fd < = fd_end ; fd + + ) { if ( ! FS . streams [ fd ] ) { return fd } } throw new FS . ErrnoError ( 24 ) } , getStream : function ( fd ) { return FS . streams [ fd ] } , createStream : function ( stream , fd_start , fd_end ) { if ( ! FS . FSStream ) { FS . FSStream = function ( ) { } ; FS . FSStream . prototype = { } ; Object . defineProperties ( FS . FSStream . prototype , { object : { get : function ( ) { return this . node } , set : function ( val ) { this . node = val } } , isRead : { get : function ( ) { return ( this . flags & 2097155 ) ! = = 1 } } , isWrite : { get : function ( ) { return ( this . flags & 2097155 ) ! = = 0 } } , isAppend : { get : function ( ) { return this . flags & 1024 } } } ) } var newStream = new FS . FSStream ; for ( var p in stream ) { newStream [ p ] = stream [ p ] } stream = newStream ; var fd = FS . nextfd ( fd_start , fd_end ) ; stream . fd = fd ; FS . streams [ fd ] = stream ; return stream } , closeStream : function ( fd ) { FS . streams [ fd ] = null } , chrdev_stream_ops : { open : function ( stream ) { var device = FS . getDevice ( stream . node . rdev ) ; stream . stream_ops = device . stream_ops ; if ( stream . stream_ops . open ) { stream . stream_ops . open ( stream ) } } , llseek : function ( ) { throw new FS . ErrnoError ( 29 ) } } , major : function ( dev ) { return dev > > 8 } , minor : function ( dev ) { return dev & 255 } , makedev : function ( ma , mi ) { return ma < < 8 | mi } , registerDevice : function ( dev , ops ) { FS . devices [ dev ] = { stream_ops : ops } } , getDevice : function ( dev ) { return FS . devices [ dev ] } , getMounts : function ( mount ) { var mounts = [ ] ; var check = [ mount ] ; while ( check . length ) { var m = check . pop ( ) ; mounts . push ( m ) ; check . push . apply ( check , m . mounts ) } return mounts } , syncfs : function ( populate , callback ) { if ( typeof populate = = = " function " ) { callback = populate ; populate = false } FS . syncFSRequests + + ; if ( FS . syncFSRequests > 1 ) { console . log ( " warning : " + FS . syncFSRequests + " FS . syncfs operations in flight at once , probably just doing extra work " ) } var mounts = FS . getMounts ( FS . root . mount ) ; var completed = 0 ; function doCallback ( err ) { FS . syncFSRequests - - ; return callback ( err ) } function done ( err ) { if ( err ) { if ( ! done . errored ) { done . errored = true ; return doCallback ( err ) } return } if ( + + completed > = mounts . length ) { doCallback ( null ) } } mounts . forEach ( function ( mount ) { if ( ! mount . type . syncfs ) { return done ( null ) } mount . type . syncfs ( mount , populate , done ) } ) } , mount : function ( type , opts , mountpoint ) { var root = mountpoint = = = " / " ; var pseudo = ! mountpoint ; var node ; if ( root & & FS . root ) { throw new FS . ErrnoError ( 16 ) } else if ( ! root & & ! pseudo ) { var lookup = FS . lookupPath ( mountpoint , { follow_mount : false } ) ; mountpoint = lookup . path ; node = lookup . node ; if ( FS . isMountpoint ( node ) ) { throw new FS . ErrnoError ( 16 ) } if ( ! FS . isDir ( node . mode ) ) { throw new FS . ErrnoError ( 20 ) } } var mount = { type : type , opts : opts , mountpoint : mountpoint , mounts : [ ] } ; var mountRoot = type . mount ( mount ) ; mountRoot . mount = mount ; mount . root = mountRoot ; if ( root ) { FS . root = mountRoot } else if ( node ) { node . mounted = mount ; if ( node . mount ) { node . mount . mounts . push ( mount ) } } return mountRoot } , unmount : function ( mountpoint ) { var lookup = FS . lookupPath ( mountpoint , { follow_mount : false } ) ; if ( ! FS . isMountpoint ( lookup . node ) ) { throw new FS . ErrnoError ( 22 ) } var node = lookup . node ; var mount = node . mounted ; var mounts = FS . getMounts ( mount ) ; Object . keys ( FS . nameTable ) . forEach ( function ( hash ) { var current = FS . nameTable [ hash ] ; while ( current ) { var next = current . name_next ; if ( mounts . indexOf ( current . mount ) ! = = - 1 ) { FS . destroyNode ( current ) } current = next } } ) ; node . mounted = null ; var idx = node . mount . mounts . indexOf ( mount ) ; node . mount . mounts . splice ( idx , 1 ) } , lookup : function ( parent , name ) { return parent . node_ops . lookup ( parent , name ) } , mknod : function ( path , mode , dev ) { var lookup = FS . lookupPath ( path , { parent : true } ) ; var parent = lookup . node ; var name = PATH . basename ( path ) ; if ( ! name | | name = = = " . " | | name = = = " . . " ) { throw new FS . ErrnoError ( 22 ) } var err = FS . mayCreate ( parent , name ) ; if ( err ) { throw new FS . ErrnoError ( err ) } if ( ! parent . node_ops . mknod ) { throw new FS . ErrnoError ( 1 ) } return parent . node_ops . mknod ( parent , name , mode , dev ) } , create : function ( path , mode ) { mode = mode ! = = undefined ? mode : 438 ; mode & = 4095 ; mode | = 32768 ; return FS . mknod ( path , mode , 0 ) } , mkdir : function ( path , mode ) { mode = mode ! = = undefined ? mode : 511 ; mode & = 511 | 512 ; mode | = 16384 ; return FS . mknod ( path , mode , 0 ) } , mkdirTree : function ( path , mode ) { var dirs = path . split ( " / " ) ; var d = " " ; for ( var i = 0 ; i < dirs . length ; + + i ) { if ( ! dirs [ i ] ) continue ; d + = " / " + dirs [ i ] ; try { FS . mkdir ( d , mode ) } catch ( e ) { if ( e . errno ! = 17 ) throw e } } } , mkdev : function ( path , mode , dev ) { if ( typeof dev = = = " undefined " ) { dev = mode ; mode = 438 } mode | = 8192 ; return FS . mknod ( path , mode , dev ) } , symlink : function ( oldpath , newpath ) { if ( ! PATH . resolve ( oldpath ) ) { throw new FS . ErrnoError ( 2 ) } var lookup = FS . lookupPath ( newpath , { parent : true } ) ; var parent = lookup . node ; if ( ! parent ) { throw new FS . ErrnoError ( 2 ) } var newname = PATH . basename ( newpath ) ; var err = FS . mayCreate ( parent , newname ) ; if ( err ) { throw new FS . ErrnoError ( err ) } if ( ! parent . node_ops . symlink ) { throw new FS . ErrnoError ( 1 ) } return parent . node_ops . symlink ( parent , newname , oldpath ) } , rename : function ( old_path , new_path ) { var old_dirname = PATH . dirname ( old_path ) ; var new_dirname = PATH . dirname ( new_path ) ; var old_name = PATH . basename ( old_path ) ; var new_name = PATH . basename ( new_path ) ; var lookup , old_dir , new_dir ; try { lookup = FS . lookupPath ( old_path , { parent : true } ) ; old_dir = lookup . node ; lookup = FS . lookupPath ( new_path , { parent : true } ) ; new_dir = lookup . node } catch ( e ) { throw new FS . ErrnoError ( 16 ) } if ( ! old_dir | | ! new_dir ) throw new FS . ErrnoError ( 2 ) ; if ( old_dir . mount ! = = new_dir . mount ) { throw new FS . ErrnoError ( 18 ) } var old_node = FS . lookupNode ( old_dir , old_name ) ; var relative = PATH . relative ( old_path , new_dirname ) ; if ( relative . charAt ( 0 ) ! = = " . " ) { throw new FS . ErrnoError ( 22 ) } relative = PATH . relative ( new_path , old_dirname ) ; if ( relative . charAt ( 0 ) ! = = " . " ) { throw new FS . ErrnoError ( 39 ) } var new_node ; try { new_node = FS . lookupNode ( new_dir , new_name ) } catch ( e ) { } if ( old_node = = = new_node ) { return } var isdir = FS . isDir ( old_node . mode ) ; var err = FS . mayDelete ( old_dir , old_name , isdir ) ; if ( err ) { throw new FS . ErrnoError ( err ) } err = new_node ? FS . mayDelete ( new_dir , new_name , isdir ) : FS . mayCreate ( new_dir , new_name ) ; if ( err ) { throw new FS . ErrnoError ( err ) } if ( ! old_dir . node_ops . rename ) { throw new FS . ErrnoError ( 1 ) } if ( FS . isMountpoint ( old_node ) | | new_node & & FS . isMountpoint ( new_node ) ) { throw new FS . ErrnoError ( 16 ) } if ( new_dir ! = = old_dir ) { err = FS . nodePermissions ( old_dir , " w " ) ; if ( err ) { throw new FS . ErrnoError ( err ) } } try { if ( FS . trackingDelegate [ " willMovePath " ] ) { FS . trackingDelegate [ " willMovePath " ] ( old_path , new_path ) } } catch ( e ) { console . log ( " FS . trackingDelegate [ ' willMovePath ' ] ( ' " + old_path + " ' , ' " + new_path + " ' ) threw an exception : " + e . message ) } FS . hashRemoveNode ( old_node ) ; try { old_dir . node_ops . rename ( old_node , new_dir , new_name ) } catch ( e ) { throw e } finally { FS . hashAddNode ( old_node ) } try { if ( FS . trackingDelegate [ " onMovePath " ] ) FS . trackingDelegate [ " onMovePath " ] ( old_path , new_path ) } catch ( e ) { console . log ( " FS . trackingDelegate [ ' onMovePath ' ] ( ' " + old_path + " ' , ' " + new_path + " ' ) threw an exception : " + e . message ) } } , rmdir : function ( path ) { var lookup = FS . lookupPath ( path , { parent : true } ) ; var parent = lookup . node ; var name = PATH . basename ( path ) ; var node = FS . lookupNode ( parent , name ) ; var err = FS . mayDelete ( parent , name , true ) ; if ( err ) { throw new FS . ErrnoError ( err ) } if ( ! parent . node_ops . rmdir ) { throw new FS . ErrnoError ( 1 ) } if ( FS . isMountpoint ( node ) ) { throw new FS . ErrnoError ( 16 ) } try { if ( FS . trackingDelegate [ " willDeletePath " ] ) { FS . trackingDelegate [ " willDeletePath " ] ( path ) } } catch ( e ) { console . log ( " FS . trackingDelegate [ ' willDeletePath ' ] ( ' " + path + " ' ) threw an exception : " + e . message ) } parent . node_ops . rmdir ( parent , name ) ; FS . destroyNode ( node ) ; try { if ( FS . trackingDelegate [ " onDeletePath " ] ) FS . trackingDelegate [ " onDeletePath " ] ( path ) } catch ( e ) { console . log ( " FS . trackingDelegate [ ' onDeletePath ' ] ( ' " + path + " ' ) threw an exception : " + e . message ) } } , readdir : function ( path ) { var lookup = FS . lookupPath ( path , { follow : true } ) ; var node = lookup . node ; if ( ! node . node_ops . readdir ) { throw new FS . ErrnoError ( 20 ) } return node . node_ops . readdir ( node ) } , unlink : function ( path ) { var lookup = FS . lookupPath ( path , { parent : true } ) ; var parent = lookup . node ; var name = PATH . basename ( path ) ; var node = FS . lookupNode ( parent , name ) ; var err = FS . mayDelete ( parent , name , false ) ; if ( err ) { throw new FS . ErrnoError ( err ) } if ( ! parent . node_ops . unlink ) { throw new FS . ErrnoError ( 1 ) } if ( FS . isMountpoint ( node ) ) { throw new FS . ErrnoError ( 16 ) } try { if ( FS . trackingDelegate [ " willDeletePath " ] ) { FS . trackingDelegate [ " willDeletePath " ] ( path ) } } catch ( e ) { console . log ( " FS . trackingDelegate [ ' willDeletePath ' ] ( ' " + path + " ' ) threw an exception : " + e . message ) } parent . node_ops . unlink ( parent , name ) ; FS . destroyNode ( node ) ; try { if ( FS . trackingDelegate [ " onDeletePath " ] ) FS . trackingDelegate [ " onDeletePath " ] ( path ) } catch ( e ) { console . log ( " FS . trackingDelegate [ ' onDeletePath ' ] ( ' " + path + " ' ) threw an exception : " + e . message ) } } , readlink : function ( path ) { var lookup = FS . lookupPath ( path ) ; var link = lookup . node ; if ( ! link ) { throw new FS . ErrnoError ( 2 ) } if ( ! link . node_ops . readlink ) { throw new FS . ErrnoError ( 22 ) } return PATH . resolve ( FS . getPath ( link . parent ) , link . node_ops . readlink ( link ) ) } , stat : function ( path , dontFollow ) { var lookup = FS . lookupPath ( path , { follow : ! dontFollow } ) ; var node = lookup . node ; if ( ! node ) { throw new FS . ErrnoError ( 2 ) } if ( ! node . node_ops . getattr ) { throw new FS . ErrnoError ( 1 ) } return node . node_ops . getattr ( node ) } , lstat : function ( path ) { return FS . stat ( path , true ) } , chmod : function ( path , mode , dontFollow ) { var node ; if ( typeof path = = = " string " ) { var lookup = FS . lookupPath ( path , { follow : ! dontFollow } ) ; node = lookup . node } else { node = path } if ( ! node . node_ops . setattr ) { throw new FS . ErrnoError ( 1 ) } node . node_ops . setattr ( node , { mode : mode & 4095 | node . mode & ~ 4095 , timestamp : Date . now ( ) } ) } , lchmod : function ( path , mode ) { FS . chmod ( path , mode , true ) } , fchmod : function ( fd , mode ) { var stream = FS . getStream ( fd ) ; if ( ! stream ) { throw new FS . ErrnoError ( 9 ) } FS . chmod ( stream . node , mode ) } , chown : function ( path , uid , gid , dontFollow ) { var node ; if ( typeof path = = = " string " ) { var lookup = FS . lookupPath ( path , { follow : ! dontFollow } ) ; node = lookup . node } else { node = path } if ( ! node . node_ops . setattr ) { throw new FS . ErrnoError ( 1 ) } node . node_ops . setattr ( node , { timestamp : Date . now ( ) } ) } , lchown : function ( path , uid , gid ) { FS . chown ( path , uid , gid , true ) } , fchown : function ( fd , uid , gid ) { var stream = FS . getStream ( fd ) ; if ( ! stream ) { throw new FS . ErrnoError ( 9 ) } FS . chown ( stream . node , uid , gid ) } , truncate : function ( path , len ) { if ( len < 0 ) { throw new FS . ErrnoError ( 22 ) } var node ; if ( typeof path = = = " string " ) { var lookup = FS . lookupPath ( path , { follow : true } ) ; node = lookup . node } else { node = path } if ( ! node . node_ops . setattr ) { throw new FS . ErrnoError ( 1 ) } if ( FS . isDir ( node . mode ) ) { throw new FS . ErrnoError ( 21 ) } if ( ! FS . isFile ( node . mode ) ) { throw new FS . ErrnoError ( 22 ) } var err = FS . nodePermissions ( node , " w " ) ; if ( err ) { throw new FS . ErrnoError ( err ) } node . node_ops . setattr ( node , { size : len , timestamp : Date . now ( ) } ) } , ftruncate : function ( fd , len ) { var stream = FS . getStream ( fd ) ; if ( ! stream ) { throw new FS . ErrnoError ( 9 ) } if ( ( stream . flags & 2097155 ) = = = 0 ) { throw new FS . ErrnoError ( 22 ) } FS . truncate ( stream . node , len ) } , utime : function ( path , atime , mtime ) { var lookup = FS . lookupPath ( path , { follow : true } ) ; var node = lookup . node ; node . node_ops . setattr ( node , { timestamp : Math . max ( atime , mtime ) } ) } , open : function ( path , flags , mode , fd_start , fd_end ) { if ( path = = = " " ) { throw new FS . ErrnoError ( 2 ) } flags = typeof flags = = = " string " ? FS . modeStringToFlags ( flags ) : flags ; mode = typeof mode = = = " undefined " ? 438 : mode ; if ( flags & 64 ) { mode = mode & 4095 | 32768 } else { mode = 0 } var node ; if ( typeof path = = = " object " ) { node = path } else { path = PATH . normalize ( path ) ; try { var lookup = FS . lookupPath ( path , { follow : ! ( flags & 131072 ) } ) ; node = lookup . node } catch ( e ) { } } var created = false ; if ( flags & 64 ) { if ( node ) { if ( flags & 128 ) { throw new FS . ErrnoError ( 17 ) } } else { node = FS . mknod ( path , mode , 0 ) ; created = true } } if ( ! node ) { throw new FS . ErrnoError ( 2 ) } if ( FS . isChrdev ( node . mode ) ) { flags & = ~ 512 } if ( flags & 65536 & & ! FS . isDir ( node . mode ) ) { throw new FS . ErrnoError ( 20 ) } if ( ! created ) { var err = FS . mayOpen ( node , flags ) ; if ( err ) { throw new FS . ErrnoError ( err ) } } if ( flags & 512 ) { FS . truncate ( node , 0 ) } flags & = ~ ( 128 | 512 ) ; var stream = FS . createStream ( { node : node , path : FS . getPath ( node ) , flags : flags , seekable : true , position : 0 , stream_ops : node . stream_ops , ungotten : [ ] , error : false } , fd_start , fd_end ) ; if ( stream . stream_ops . open ) { stream . stream_ops . open ( stream ) } if ( Module [ " logReadFiles " ] & & ! ( flags & 1 ) ) { if ( ! FS . readFiles ) FS . readFiles = { } ; if ( ! ( path in FS . readFiles ) ) { FS . readFiles [ path ] = 1 ; console . log ( " FS . trackingDelegate error on read file : " + path ) } } try { if ( FS . trackingDelegate [ " onOpenFile " ] ) { var trackingFlags = 0 ; if ( ( flags & 2097155 ) ! = = 1 ) { trackingFlags | = FS . tracking . openFlags . READ } if ( ( flags & 2097155 ) ! = = 0 ) { trackingFlags | = FS . tracking . openFlags . WRITE } FS . trackingDelegate [ " onOpenFile " ] ( path , trackingFlags ) } } catch ( e ) { console . log ( " FS . trackingDelegate [ ' onOpenFile ' ] ( ' " + path + " ' , flags ) threw an exception : " + e . message ) } return stream } , close : function ( stream ) { if ( FS . isClosed ( stream ) ) { throw new FS . ErrnoError ( 9 ) } if ( stream . getdents ) stream . getdents = null ; try { if ( stream . stream_ops . close ) { stream . stream_ops . close ( stream ) } } catch ( e ) { throw e } finally { FS . closeStream ( stream . fd ) } stream . fd = null } , isClosed : function ( stream ) { return stream . fd = = = null } , llseek : function ( stream , offset , whence ) { if ( FS . isClosed ( stream ) ) { throw new FS . ErrnoError ( 9 ) } if ( ! stream . seekable | | ! stream . stream_ops . llseek ) { throw new FS . ErrnoError ( 29 ) } if ( whence ! = 0 & & whence ! = 1 & & whence ! = 2 ) { throw new FS . ErrnoError ( 22 ) } stream . position = stream . stream_ops . llseek ( stream , offset , whence ) ; stream . ungotten = [ ] ; return stream . position } , read : function ( stream , buffer , offset , length , position ) { if ( length < 0 | | position < 0 ) { throw new FS . ErrnoError ( 22 ) } if ( FS . isClosed ( stream ) ) { throw new FS . ErrnoError ( 9 ) } if ( ( stream . flags & 2097155 ) = = = 1 ) { throw new FS . ErrnoError ( 9 ) } if ( FS . isDir ( stream . node . mode ) ) { throw new FS . ErrnoError ( 21 ) } if ( ! stream . stream_ops . read ) { throw new FS . ErrnoError ( 22 ) } var seeking = typeof position ! = = " undefined " ; if ( ! seeking ) { position = stream . position } else if ( ! stream . seekable ) { throw new FS . ErrnoError ( 29 ) } var bytesRead = stream . stream_ops . read ( stream , buffer , offset , length , position ) ; if ( ! seeking ) stream . position + = bytesRead ; return bytesRead } , write : function ( stream , buffer , offset , length , position , canOwn ) { if ( length < 0 | | position < 0 ) { throw new FS . ErrnoError ( 22 ) } if ( FS . isClosed ( stream ) ) { throw new FS . ErrnoError ( 9 ) } if ( ( stream . flags & 2097155 ) = = = 0 ) { throw new FS . ErrnoError ( 9 ) } if ( FS . isDir ( stream . node . mode ) ) { throw new FS . ErrnoError ( 21 ) } if ( ! stream . stream_ops . write ) { throw new FS . ErrnoError ( 22 ) } if ( stream . flags & 1024 ) { FS . llseek ( stream , 0 , 2 ) } var seeking = typeof position ! = = " undefined " ; if ( ! seeking ) { position = stream . position } else if ( ! stream . seekable ) { throw new FS . ErrnoError ( 29 ) } var bytesWritten = stream . stream_ops . write ( stream , buffer , offset , length , position , canOwn ) ; if ( ! seeking ) stream . position + = bytesWritten ; try { if ( stream . path & & FS . trackingDelegate [ " onWriteToFile " ] ) FS . trackingDelegate [ " onWriteToFile " ] ( stream . path ) } catch ( e ) { console . log ( " FS . trackingDelegate [ ' onWriteToFile ' ] ( ' " + stream . path + " ' ) threw an exception : " + e . message ) } return bytesWritten } , allocate : function ( stream , offset , length ) { if ( FS . isClosed ( stream ) ) { throw new FS . ErrnoError ( 9 ) } if ( offset < 0 | | length < = 0 ) { throw new FS . ErrnoError ( 22 ) } if ( ( stream . flags & 2097155 ) = = = 0 ) { throw new FS . ErrnoError ( 9 ) } if ( ! FS . isFile ( stream . node . mode ) & & ! FS . isDir ( stream . node . mode ) ) { throw new FS . ErrnoError ( 19 ) } if ( ! stream . stream_ops . allocate ) { throw new FS . ErrnoError ( 95 ) } stream . stream_ops . allocate ( stream , offset , length ) } , mmap : function ( stream , buffer , offset , length , position , prot , flags ) { if ( ( stream . flags & 2097155 ) = = = 1 ) { throw new FS . ErrnoError ( 13 ) } if ( ! stream . stream_ops . mmap ) { throw new FS . ErrnoError ( 19 ) } return stream . stream_ops . mmap ( stream , buffer , offset , length , position , prot , flags ) } , msync : function ( stream , buffer , offset , length , mmapFlags ) { if ( ! stream | | ! stream . stream_ops . msync ) { return 0 } return stream . stream_ops . msync ( stream , buffer , offset , length , mmapFlags ) } , munmap : function ( stream ) { return 0 } , ioctl : function ( stream , cmd , arg ) { if ( ! stream . stream_ops . ioctl ) { throw new FS . ErrnoError ( 25 ) } return stream . stream_ops . ioctl ( stream , cmd , arg ) } , readFile : function ( path , opts ) { opts = opts | | { } ; opts . flags = opts . flags | | " r " ; opts . encoding = opts . encoding | | " binary " ; if ( opts . encoding ! = = " utf8 " & & opts . encoding ! = = " binary " ) { throw new Error ( ' Invalid encoding type " ' + opts . encoding + ' " ' ) } var ret ; var stream = FS . open ( path , opts . flags ) ; var stat = FS . stat ( path ) ; var length = stat . size ; var buf = new Uint8Array ( length ) ; FS . read ( stream , buf , 0 , length , 0 ) ; if ( opts . encoding = = = " utf8 " ) { ret = UTF8ArrayToString ( buf , 0 ) } else if ( opts . encoding = = = " binary " ) { ret = buf } FS . close ( stream ) ; return ret } , writeFile : function ( path , data , opts ) { opts = opts | | { } ; opts . flags = opts . flags | | " w " ; var stream = FS . open ( path , opts . flags , opts . mode ) ; if ( typeof data = = = " string " ) { var buf = new Uint8Array ( lengthBytesUTF8 ( data ) + 1 ) ; var actualNumBytes = stringToUTF8Array ( data , buf , 0 , buf . length ) ; FS . write ( stream , buf , 0 , actualNumBytes , undefined , opts . canOwn ) } else if ( ArrayBuffer . isView ( data ) ) { FS . write ( stream , data , 0 , data . byteLength , undefined , opts . canOwn ) } else { throw new Error ( " Unsupported data type " ) } FS . close ( stream ) } , cwd : function ( ) { return FS . currentPath } , chdir : function ( path ) { var lookup = FS . lookupPath ( path , { follow : true } ) ; if ( lookup . node = = = null ) { throw new FS . ErrnoError ( 2 ) } if ( ! FS . isDir ( lookup . node . mode ) ) { throw new FS . ErrnoError ( 20 ) } var err = FS . nodePermissions ( lookup . node , " x " ) ; if ( err ) { throw new FS . ErrnoError ( err ) } FS . currentPath = lookup . path } , createDefaultDirectories : function ( ) { FS . mkdir ( " / tmp " ) ; FS . mkdir ( " / home " ) ; FS . mkdir ( " / home / web_user " ) } , createDefaultDevices : function ( ) { FS . mkdir ( " / dev " ) ; FS . registerDevice ( FS . makedev ( 1 , 3 ) , { read : function ( ) { return 0 } , write : function ( stream , buffer , offset , length , pos ) { return length } } ) ; FS . mkdev ( " / dev / null " , FS . makedev ( 1 , 3 ) ) ; TTY . register ( FS . makedev ( 5 , 0 ) , TTY . default_tty_ops ) ; TTY . register ( FS . makedev ( 6 , 0 ) , TTY . default_tty1_ops ) ; FS . mkdev ( " / dev / tty " , FS . makedev ( 5 , 0 ) ) ; FS . mkdev ( " / dev / tty1 " , FS . makedev ( 6 , 0 ) ) ; var random_device ; if ( typeof crypto = = = " object " & & typeof crypto [ " getRandomValues " ] = = = " function " ) { var randomBuffer = new Uint8Array ( 1 ) ; random_device = function ( ) { crypto . getRandomValues ( randomBuffer ) ; return randomBuffer [ 0 ] } } else if ( ENVIRONMENT_IS_NODE ) { try { var crypto_module = require ( " crypto " ) ; random_device = function ( ) { return crypto_module [ " randomBytes " ] ( 1 ) [ 0 ] } } catch ( e ) { random_device = function ( ) { return Math . random ( ) * 256 | 0 } } } else { random_device = function ( ) { abort ( " random_device " ) } } FS . createDevice ( " / dev " , " random " , random_device ) ; FS . createDevice ( " / dev " , " urandom " , random_device ) ; FS . mkdir ( " / dev / shm " ) ; FS . mkdir ( " / dev / shm / tmp " ) } , createSpecialDirectories : function ( ) { FS . mkdir ( " / proc " ) ; FS . mkdir ( " / proc / self " ) ; FS . mkdir ( " / proc / self / fd " ) ; FS . mount ( { mount : function ( ) { var node = FS . createNode ( " / proc / self " , " fd " , 16384 | 511 , 73 ) ; node . node_ops = { lookup : function ( parent , name ) { var fd = + name ; var stream = FS . getStream ( fd ) ; if ( ! stream ) throw new FS . ErrnoError ( 9 ) ; var ret = { parent : null , mount : { mountpoint : " fake " } , node_ops : { readlink : function ( ) { return stream . path } } } ; ret . parent = ret ; return ret } } ; return node } } , { } , " / proc / self / fd " ) } , createStandardStreams : function ( ) { if ( Module [ " stdin " ] ) { FS . createDevice ( " / dev " , " stdin " , Module [ " stdin " ] ) } else { FS . symlink ( " / dev / tty " , " / dev / stdin " ) } if ( Module [ " stdout " ] ) { FS . createDevice ( " / dev " , " stdout " , null , Module [ " stdout " ] ) } else { FS . symlink ( " / dev / tty " , " / dev / stdout " ) } if ( Module [ " stderr " ] ) { FS . createDevice ( " / dev " , " stderr " , null , Module [ " stderr " ] ) } else { FS . symlink ( " / dev / tty1 " , " / dev / stderr " ) } var stdin = FS . open ( " / dev / stdin " , " r " ) ; var stdout = FS . open ( " / dev / stdout " , " w " ) ; var stderr = FS . open ( " / dev / stderr " , " w " ) } , ensureErrnoError : function ( ) { if ( FS . ErrnoError ) return ; FS . ErrnoError = function ErrnoError ( errno , node ) { this . node = node ; this . setErrno = function ( errno ) { this . errno = errno } ; this . setErrno ( errno ) ; this . message = " FS error " ; if ( this . stack ) Object . defineProperty ( this , " stack " , { value : ( new Error ) . stack , writable : true } ) } ; FS . ErrnoError . prototype = new Error ; FS . ErrnoError . prototype . constructor = FS . ErrnoError ; [ 2 ] . forEach ( function ( code ) { FS . genericErrors [ code ] = new FS . ErrnoError ( code ) ; FS . genericErrors [ code ] . stack = " < generic error , no stack > " } ) } , staticInit : function ( ) { FS . ensureErrnoError ( ) ; FS . nameTable = new Array ( 4096 ) ; FS . mount ( MEMFS , { } , " / " ) ; FS . createDefaultDirectories ( ) ; FS . createDefaultDevices ( ) ; FS . createSpecialDirectories ( ) ; FS . filesystems = { " MEMFS " : MEMFS , " IDBFS " : IDBFS , " NODEFS " : NODEFS , " WORKERFS " : WORKERFS } } , init : function ( input , output , error ) { FS . init . initialized = true ; FS . ensureErrnoError ( ) ; Module [ " stdin " ] = input | | Module [ " stdin " ] ; Module [ " stdout " ] = output | | Module [ " stdout " ] ; Module [ " stderr " ] = error | | Module [ " stderr " ] ; FS . createStandardStreams ( ) } , quit : function ( ) { FS . init . initialized = false ; var fflush = Module [ " _fflush " ] ; if ( fflush ) fflush ( 0 ) ; for ( var i = 0 ; i < FS . streams . length ; i + + ) { var stream = FS . streams [ i ] ; if ( ! stream ) { continue } FS . close ( stream ) } } , getMode : function ( canRead , canWrite ) { var mode = 0 ; if ( canRead ) mode | = 292 | 73 ; if ( canWrite ) mode | = 146 ; return mode } , joinPath : function ( parts , forceRelative ) { var path = PATH . join . apply ( null , parts ) ; if ( forceRelative & & path [ 0 ] = = " / " ) path = path . substr ( 1 ) ; return path } , absolutePath : function ( relative , base ) { return PATH . resolve ( base , relative ) } , standardizePath : function ( path ) { return PATH . normalize ( path ) } , findObject : function ( path , dontResolveLastLink ) { var ret = FS . analyzePath ( path , dontResolveLastLink ) ; if ( ret . exists ) { return ret . object } else { ___setErrNo ( ret . error ) ; return null } } , analyzePath : function ( path , dontResolveLastLink ) { try { var lookup = FS . lookupPath ( path , { follow : ! dontResolveLastLink } ) ; path = lookup . path } catch ( e ) { } var ret = { isRoot : false , exists : false , error : 0 , name : null , path : null , object : null , parentExists : false , parentPath : null , parentObject : null } ; try { var lookup = FS . lookupPath ( path , { parent : true } ) ; ret . parentExists = true ; ret . parentPath = lookup . path ; ret . parentObject = lookup . node ; ret . name = PATH . basename ( path ) ; lookup = FS . lookupPath ( path , { follow : ! dontResolveLastLink } ) ; ret . exists = true ; ret . path = lookup . path ; ret . object = lookup . node ; ret . name = lookup . node . name ; ret . isRoot = lookup . path = = = " / " } catch ( e ) { ret . error = e . errno } return ret } , createFolder : function ( parent , name , canRead , canWrite ) { var path = PATH . join2 ( typeof parent = = = " string " ? parent : FS . getPath ( parent ) , name ) ; var mode = FS . getMode ( canRead , canWrite ) ; return FS . mkdir ( path , mode ) } , createPath : function ( parent , path , canRead , canWrite ) { parent = typeof parent = = = " string " ? parent : FS . getPath ( parent ) ; var parts = path . split ( " / " ) . reverse ( ) ; while ( parts . length ) { var part = parts . pop ( ) ; if ( ! part ) continue ; var current = PATH . join2 ( parent , part ) ; try { FS . mkdir ( current ) } catch ( e ) { } parent = current } return current } , createFile : function ( parent , name , properties , canRead , canWrite ) { var path = PATH . join2 ( typeof parent = = = " string " ? parent : FS . getPath ( parent ) , name ) ; var mode = FS . getMode ( canRead , canWrite ) ; return FS . create ( path , mode ) } , createDataFile : function ( parent , name , data , canRead , canWrite , canOwn ) { var path = name ? PATH . join2 ( typeof parent = = = " string " ? parent : FS . getPath ( parent ) , name ) : parent ; var mode = FS . getMode ( canRead , canWrite ) ; var node = FS . create ( path , mode ) ; if ( data ) { if ( typeof data = = = " string " ) { var arr = new Array ( data . length ) ; for ( var i = 0 , len = data . length ; i < len ; + + i ) arr [ i ] = data . charCodeAt ( i ) ; data = arr } FS . chmod ( node , mode | 146 ) ; var stream = FS . open ( node , " w " ) ; FS . write ( stream , data , 0 , data . length , 0 , canOwn ) ; FS . close ( stream ) ; FS . chmod ( node , mode ) } return node } , createDevice : function ( parent , name , input , output ) { var path = PATH . join2 ( typeof parent = = = " string " ? parent : FS . getPath ( parent ) , name ) ; var mode = FS . getMode ( ! ! input , ! ! output ) ; if ( ! FS . createDevice . major ) FS . createDevice . major = 64 ; var dev = FS . makedev ( FS . createDevice . major + + , 0 ) ; FS . registerDevice ( dev , { open : function ( stream ) { stream . seekable = false } , close : function ( stream ) { if ( output & & output . buffer & & output . buffer . length ) { output ( 10 ) } } , read : function ( stream , buffer , offset , length , pos ) { var bytesRead = 0 ; for ( var i = 0 ; i < length ; i + + ) { var result ; try { result = input ( ) } catch ( e ) { throw new FS . ErrnoError ( 5 ) } if ( result = = = undefined & & bytesRead = = = 0 ) { throw new FS . ErrnoError ( 11 ) } if ( result = = = null | | result = = = undefined ) break ; bytesRead + + ; buffer [ offset + i ] = result } if ( bytesRead ) { stream . node . timestamp = Date . now ( ) } return bytesRead } , write : function ( stream , buffer , offset , length , pos ) { for ( var i = 0 ; i < length ; i + + ) { try { output ( buffer [ offset + i ] ) } catch ( e ) { throw new FS . ErrnoError ( 5 ) } } if ( length ) { stream . node . timestamp = Date . now ( ) } return i } } ) ; return FS . mkdev ( path , mode , dev ) } , createLink : function ( parent , name , target , canRead , canWrite ) { var path = PATH . join2 ( typeof parent = = = " string " ? parent : FS . getPath ( parent ) , name ) ; return FS . symlink ( target , path ) } , forceLoadFile : function ( obj ) { if ( obj . isDevice | | obj . isFolder | | obj . link | | obj . contents ) return true ; var success = true ; if ( typeof XMLHttpRequest ! = = " undefined " ) { throw new Error ( " Lazy loading should have been performed ( contents set ) in createLazyFile , but it was not . Lazy loading only works in web workers . Use - - embed - file or - - preload - file in emcc on the main thread . " ) } else if ( Module [ " read " ] ) { try { obj . contents = intArrayFromString ( Module [ " read " ] ( obj . url ) , true ) ; obj . usedBytes = obj . contents . length } catch ( e ) { success = false } } else { throw new Error ( " Cannot load without read ( ) or XMLHttpRequest . " ) } if ( ! success ) ___setErrNo ( 5 ) ; return success } , createLazyFile : function ( parent , name , url , canRead , canWrite ) { function LazyUint8Array ( ) { this . lengthKnown = false ; this . chunks = [ ] } LazyUint8Array . prototype . get = function LazyUint8Array_get ( idx ) { if ( idx > this . length - 1 | | idx < 0 ) { return undefined } var chunkOffset = idx % this . chunkSize ; var chunkNum = idx / this . chunkSize | 0 ; return this . getter ( chunkNum ) [ chunkOffset ] } ; LazyUint8Array . prototype . setDataGetter = function LazyUint8Array_setDataGetter ( getter ) { this . getter = getter } ; LazyUint8Array . prototype . cacheLength = function LazyUint8Array_cacheLength ( ) { var xhr = new XMLHttpRequest ; xhr . open ( " HEAD " , url , false ) ; xhr . send ( null ) ; if ( ! ( xhr . status > = 200 & & xhr . status < 300 | | xhr . status = = = 304 ) ) throw new Error ( " Couldn ' t load " + url + " . Status : " + xhr . status ) ; var datalength = Number ( xhr . getResponseHeader ( " Content - length " ) ) ; var header ; var hasByteServing = ( header = xhr . getResponseHeader ( " Accept - Ranges " ) ) & & header = = = " bytes " ; var usesGzip = ( header = xhr . getResponseHeader ( " Content - Encoding " ) ) & & header = = = " gzip " ; var chunkSize = 1024 * 1024 ; if ( ! hasByteServing ) chunkSize = datalength ; var doXHR = function ( from , to ) { if ( from > to ) throw new Error ( " invalid range ( " + from + " , " + to + " ) or no bytes requested ! " ) ; if ( to > datalength - 1 ) throw new Error ( " only " + datalength + " bytes available ! programmer error ! " ) ; var xhr = new XMLHttpRequest ; xhr . open ( " GET " , url , false ) ; if ( datalength ! = = chunkSize ) xhr . setRequestHeader ( " Range " , " bytes = " + from + " - " + to ) ; if ( typeof Uint8Array ! = " undefined " ) xhr . responseType = " arraybuffer " ; if ( xhr . overrideMimeType ) { xhr . overrideMimeType ( " text / plain ; charset = x - user - defined " ) } xhr . send ( null ) ; if ( ! ( xhr . status > = 200 & & xhr . status < 300 | | xhr . status = = = 304 ) ) throw new Error ( " Couldn ' t load " + url + " . Status : " + xhr . status ) ; if ( xhr . response ! = = undefined ) { return new Uint8Array ( xhr . response | | [ ] ) } else { return intArrayFromString ( xhr . responseText | | " " , true ) } } ; var lazyArray = this ; lazyArray . setDataGetter ( function ( chunkNum ) { var start = chunkNum * chunkSize ; var end = ( chunkNum + 1 ) * chunkSize - 1 ; end = Math . min ( end , datalength - 1 ) ; if ( typeof lazyArray . chunks [ chunkNum ] = = = " undefined " ) { lazyArray . chunks [ chunkNum ] = doXHR ( start , end ) } if ( typeof lazyArray . chunks [ chunkNum ] = = = " undefined " ) throw new Error ( " doXHR failed ! " ) ; return lazyArray . chunks [ chunkNum ] } ) ; if ( usesGzip | | ! datalength ) { chunkSize = datalength = 1 ; datalength = this . getter ( 0 ) . length ; chunkSize = datalength ; console . log ( " LazyFiles on gzip forces download of the whole file when length is accessed " ) } this . _length = datalength ; this . _chunkSize = chunkSize ; this . lengthKnown = true } ; if ( typeof XMLHttpRequest ! = = " undefined " ) { if ( ! ENVIRONMENT_IS_WORKER ) throw " Cannot do synchronous binary XHRs outside webworkers in modern browsers . Use - - embed - file or - - preload - file in emcc " ; var lazyArray = new LazyUint8Array ; Object . defineProperties ( lazyArray , { length : { get : function ( ) { if ( ! this . lengthKnown ) { this . cacheLength ( ) } return this . _length } } , chunkSize : { get : function ( ) { if ( ! this . lengthKnown ) { this . cacheLength ( ) } return this . _chunkSize } } } ) ; var properties = { isDevice : false , contents : lazyArray } } else { var properties = { isDevice : false , url : url } } var node = FS . createFile ( parent , name , properties , canRead , canWrite ) ; if ( properties . contents ) { node . contents = properties . contents } else if ( properties . url ) { node . contents = null ; node . url = properties . url } Object . defineProperties ( node , { usedBytes : { get : function ( ) { return this . contents . length } } } ) ; var stream_ops = { } ; var keys = Object . keys ( node . stream_ops ) ; keys . forEach ( function ( key ) { var fn = node . stream_ops [ key ] ; stream_ops [ key ] = function forceLoadLazyFile ( ) { if ( ! FS . forceLoadFile ( node ) ) { throw new FS . ErrnoError ( 5 ) } return fn . apply ( null , arguments ) } } ) ; stream_ops . read = function stream_ops_read ( stream , buffer , offset , length , position ) { if ( ! FS . forceLoadFile ( node ) ) { throw new FS . ErrnoError ( 5 ) } var contents = stream . node . contents ; if ( position > = contents . length ) return 0 ; var size = Math . min ( contents . length - position , length ) ; if ( contents . slice ) { for ( var i = 0 ; i < size ; i + + ) { buffer [ offset + i ] = contents [ position + i ] } } else { for ( var i = 0 ; i < size ; i + + ) { buffer [ offset + i ] = contents . get ( position + i ) } } return size } ; node . stream_ops = stream_ops ; return node } , createPreloadedFile : function ( parent , name , url , canRead , canWrite , onload , onerror , dontCreateFile , canOwn , preFinish ) { Browser . init ( ) ; var fullname = name ? PATH . resolve ( PATH . join2 ( parent , name ) ) : parent ; var dep = getUniqueRunDependency ( " cp " + fullname ) ; function processData ( byteArray ) { function finish ( byteArray ) { if ( preFinish ) preFinish ( ) ; if ( ! dontCreateFile ) { FS . createDataFile ( parent , name , byteArray , canRead , canWrite , canOwn ) } if ( onload ) onload ( ) ; removeRunDependency ( dep ) } var handled = false ; Module [ " preloadPlugins " ] . forEach ( function ( plugin ) { if ( handled ) return ; if ( plugin [ " canHandle " ] ( fullname ) ) { plugin [ " handle " ] ( byteArray , fullname , finish , function ( ) { if ( onerror ) onerror ( ) ; removeRunDependency ( dep ) } ) ; handled = true } } ) ; if ( ! handled ) finish ( byteArray ) } addRunDependency ( dep ) ; if ( typeof url = = " string " ) { Browser . asyncLoad ( url , function ( byteArray ) { processData ( byteArray ) } , onerror ) } else { processData ( url ) } } , indexedDB : function ( ) { return window . indexedDB | | window . mozIndexedDB | | window . webkitIndexedDB | | window . msIndexedDB } , DB_NAME : function ( ) { return " EM_FS_ " + window . location . pathname } , DB_VERSION : 20 , DB_STORE_NAME : " FILE_DATA " , saveFilesToDB : function ( paths , onload , onerror ) { onload = onload | | function ( ) { } ; onerror = onerror | | function ( ) { } ; var indexedDB = FS . indexedDB ( ) ; try { var openRequest = indexedDB . open ( FS . DB_NAME ( ) , FS . DB_VERSION ) } catch ( e ) { return onerror ( e ) } openRequest . onupgradeneeded = function openRequest_onupgradeneeded ( ) { console . log ( " creating db " ) ; var db = openRequest . result ; db . createObjectStore ( FS . DB_STORE_NAME ) } ; openRequest . onsuccess = function openRequest_onsuccess ( ) { var db = openRequest . result ; var transaction = db . transaction ( [ FS . DB_STORE_NAME ] , " readwrite " ) ; var files = transaction . objectStore ( FS . DB_STORE_NAME ) ; var ok = 0 , fail = 0 , total = paths . length ; function finish ( ) { if ( fail = = 0 ) onload ( ) ; else onerror ( ) } paths . forEach ( function ( path ) { var putRequest = files . put ( FS . analyzePath ( path ) . object . contents , path ) ; putRequest . onsuccess = function putRequest_onsuccess ( ) { ok + + ; if ( ok + fail = = total ) finish ( ) } ; putRequest . onerror = function putRequest_onerror ( ) { fail + + ; if ( ok + fail = = total ) finish ( ) } } ) ; transaction . onerror = onerror } ; openRequest . onerror = onerror } , loadFilesFromDB : function ( paths , onload , onerror ) { onload = onload | | function ( ) { } ; onerror = onerror | | function ( ) { } ; var indexedDB = FS . indexedDB ( ) ; try { var openRequest = indexedDB . open ( FS . DB_NAME ( ) , FS . DB_VERSION ) } catch ( e ) { return onerror ( e ) } openRequest . onupgradeneeded = onerror ; openRequest . onsuccess = function openRequest_onsuccess ( ) { var db = openRequest . result ; try { var transaction = db . transaction ( [ FS . DB_STORE_NAME ] , " readonly " ) } catch ( e ) { onerror ( e ) ; return } var files = transaction . objectStore ( FS . DB_STORE_NAME ) ; var ok = 0 , fail = 0 , total = paths . length ; function finish ( ) { if ( fail = = 0 ) onload ( ) ; else onerror ( ) } paths . forEach ( function ( path ) { var getRequest = files . get ( path ) ; getRequest . onsuccess = function getRequest_onsuccess ( ) { if ( FS . analyzePath ( path ) . exists ) { FS . unlink ( path ) } FS . createDataFile ( PATH . dirname ( path ) , PATH . basename ( path ) , getRequest . result , true , true , true ) ; ok + + ; if ( ok + fail = = total ) finish ( ) } ; getRequest . onerror = function getRequest_onerror ( ) { fail + + ; if ( ok + fail = = total ) finish ( ) } } ) ; transaction . onerror = onerror } ; openRequest . onerror = onerror } } ; var ERRNO_CODES = { EPERM : 1 , ENOENT : 2 , ESRCH : 3 , EINTR : 4 , EIO : 5 , ENXIO : 6 , E2BIG : 7 , ENOEXEC : 8 , EBADF : 9 , ECHILD : 10 , EAGAIN : 11 , EWOULDBLOCK : 11 , ENOMEM : 12 , EACCES : 13 , EFAULT : 14 , ENOTBLK : 15 , EBUSY : 16 , EEXIST : 17 , EXDEV : 18 , ENODEV : 19 , ENOTDIR : 20 , EISDIR : 21 , EINVAL : 22 , ENFILE : 23 , EMFILE : 24 , ENOTTY : 25 , ETXTBSY : 26 , EFBIG : 27 , ENOSPC : 28 , ESPIPE : 29 , EROFS : 30 , EMLINK : 31 , EPIPE : 32 , EDOM : 33 , ERANGE : 34 , ENOMSG : 42 , EIDRM : 43 , ECHRNG : 44 , EL2NSYNC : 45 , EL3HLT : 46 , EL3RST : 47 , ELNRNG : 48 , EUNATCH : 49 , ENOCSI : 50 , EL2HLT : 51 , EDEADLK : 35 , ENOLCK : 37 , EBADE : 52 , EBADR : 53 , EXFULL : 54 , ENOANO : 55 , EBADRQC : 56 , EBADSLT : 57 , EDEADLOCK : 35 , EBFONT : 59 , ENOSTR : 60 , ENODATA : 61 , ETIME : 62 , ENOSR : 63 , ENONET : 64 , ENOPKG : 65 , EREMOTE : 66 , ENOLINK : 67 , EADV : 68 , ESRMNT : 69 , ECOMM : 70 , EPROTO : 71 , EMULTIHOP : 72 , EDOTDOT : 73 , EBADMSG : 74 , ENOTUNIQ : 76 , EBADFD : 77 , EREMCHG : 78 , ELIBACC : 79 , ELIBBAD : 80 , ELIBSCN : 81 , ELIBMAX : 82 , ELIBEXEC : 83 , ENOSYS : 38 , ENOTEMPTY : 39 , ENAMETOOLONG : 36 , ELOOP : 40 , EOPNOTSUPP : 95 , EPFNOSUPPORT : 96 , ECONNRESET : 104 , ENOBUFS : 105 , EAFNOSUPPORT : 97 , EPROTOTYPE : 91 , ENOTSOCK : 88 , ENOPROTOOPT : 92 , ESHUTDOWN : 108 , ECONNREFUSED : 111 , EADDRINUSE : 98 , ECONNABORTED : 103 , ENETUNREACH : 101 , ENETDOWN : 100 , ETIMEDOUT : 110 , EHOSTDOWN : 112 , EHOSTUNREACH : 113 , EINPROGRESS : 115 , EALREADY : 114 , EDESTADDRREQ : 89 , EMSGSIZE : 90 , EPROTONOSUPPORT : 93 , ESOCKTNOSUPPORT : 94 , EADDRNOTAVAIL : 99 , ENETRESET : 102 , EISCONN : 106 , ENOTCONN : 107 , ETOOMANYREFS : 109 , EUSERS : 87 , EDQUOT : 122 , ESTALE : 116 , ENOTSUP : 95 , ENOMEDIUM : 123 , EILSEQ : 84 , EOVERFLOW : 75 , ECANCELED : 125 , ENOTRECOVERABLE : 131 , EOWNERDEAD : 130 , ESTRPIPE : 86 } ; var SYSCALLS = { DEFAULT_POLLMASK : 5 , mappings : { } , umask : 511 , calculateAt : function ( dirfd , path ) { if ( path [ 0 ] ! = = " / " ) { var dir ; if ( dirfd = = = - 100 ) { dir = FS . cwd ( ) } else { var dirstream = FS . getStream ( dirfd ) ; if ( ! dirstream ) throw new FS . ErrnoError ( ERRNO_CODES . EBADF ) ; dir = dirstream . path } path = PATH . join2 ( dir , path ) } return path } , doStat : function ( func , path , buf ) { try { var stat = func ( path ) } catch ( e ) { if ( e & & e . node & & PATH . normalize ( path ) ! = = PATH . normalize ( FS . getPath ( e . node ) ) ) { return - ERRNO_CODES . ENOTDIR } throw e } HEAP32 [ buf > > 2 ] = stat . dev ; HEAP32 [ buf + 4 > > 2 ] = 0 ; HEAP32 [ buf + 8 > > 2 ] = stat . ino ; HEAP32 [ buf + 12 > > 2 ] = stat . mode ; HEAP32 [ buf + 16 > > 2 ] = stat . nlink ; HEAP32 [ buf + 20 > > 2 ] = stat . uid ; HEAP32 [ buf + 24 > > 2 ] = stat . gid ; HEAP32 [ buf + 28 > > 2 ] = stat . rdev ; HEAP32 [ buf + 32 > > 2 ] = 0 ; HEAP32 [ buf + 36 > > 2 ] = stat . size ; HEAP32 [ buf + 40 > > 2 ] = 4096 ; HEAP32 [ buf + 44 > > 2 ] = stat . blocks ; HEAP32 [ buf + 48 > > 2 ] = stat . atime . getTime ( ) / 1e3 | 0 ; HEAP32 [ buf + 52 > > 2 ] = 0 ; HEAP32 [ buf + 56 > > 2 ] = stat . mtime . getTime ( ) / 1e3 | 0 ; HEAP32 [ buf + 60 > > 2 ] = 0 ; HEAP32 [ buf + 64 > > 2 ] = stat . ctime . getTime ( ) / 1e3 | 0 ; HEAP32 [ buf + 68 > > 2 ] = 0 ; HEAP32 [ buf + 72 > > 2 ] = stat . ino ; return 0 } , doMsync : function ( addr , stream , len , flags ) { var buffer = new Uint8Array ( HEAPU8 . subarray ( addr , addr + len ) ) ; FS . msync ( stream , buffer , 0 , len , flags ) } , doMkdir : function ( path , mode ) { path = PATH . normalize ( path ) ; if ( path [ path . length - 1 ] = = = " / " ) path = path . substr ( 0 , path . length - 1 ) ; FS . mkdir ( path , mode , 0 ) ; return 0 } , doMknod : function ( path , mode , dev ) { switch ( mode & 61440 ) { case 32768 : case 8192 : case 24576 : case 4096 : case 49152 : break ; default : return - ERRNO_CODES . EINVAL } FS . mknod ( path , mode , dev ) ; return 0 } , doReadlink : function ( path , buf , bufsize ) { if ( bufsize < = 0 ) return - ERRNO_CODES . EINVAL ; var ret = FS . readlink ( path ) ; var len = Math . min ( bufsize , lengthBytesUTF8 ( ret ) ) ; var endChar = HEAP8 [ buf + len ] ; stringToUTF8 ( ret , buf , bufsize + 1 ) ; HEAP8 [ buf + len ] = endChar ; return len } , doAccess : function ( path , amode ) { if ( amode & ~ 7 ) { return - ERRNO_CODES . EINVAL } var node ; var lookup = FS . lookupPath ( path , { follow : true } ) ; node = lookup . node ; var perms = " " ; if ( amode & 4 ) perms + = " r " ; if ( amode & 2 ) perms + = " w " ; if ( amode & 1 ) perms + = " x " ; if ( perms & & FS . nodePermissions ( node , perms ) ) { return - ERRNO_CODES . EACCES } return 0 } , doDup : function ( path , flags , suggestFD ) { var suggest = FS . getStream ( suggestFD ) ; if ( suggest ) FS . close ( suggest ) ; return FS . open ( path , flags , 0 , suggestFD , suggestFD ) . fd } , doReadv : function ( stream , iov , iovcnt , offset ) { var ret = 0 ; for ( var i = 0 ; i < iovcnt ; i + + ) { var ptr = HEAP32 [ iov + i * 8 > > 2 ] ; var len = HEAP32 [ iov + ( i * 8 + 4 ) > > 2 ] ; var curr = FS . read ( stream , HEAP8 , ptr , len , offset ) ; if ( curr < 0 ) return - 1 ; ret + = curr ; if ( curr < len ) break } return ret } , doWritev : function ( stream , iov , iovcnt , offset ) { var ret = 0 ; for ( var i = 0 ; i < iovcnt ; i + + ) { var ptr = HEAP32 [ iov + i * 8 > > 2 ] ; var len = HEAP32 [ iov + ( i * 8 + 4 ) > > 2 ] ; var curr = FS . write ( stream , HEAP8 , ptr , len , offset ) ; if ( curr < 0 ) return - 1 ; ret + = curr } return ret } , varargs : 0 , get : function ( varargs ) { SYSCALLS . varargs + = 4 ; var ret = HEAP32 [ SYSCALLS . varargs - 4 > > 2 ] ; return ret } , getStr : function ( ) { var ret = UTF8ToString ( SYSCALLS . get ( ) ) ; return ret } , getStreamFromFD : function ( ) { var stream = FS . getStream ( SYSCALLS . get ( ) ) ; if ( ! stream ) throw new FS . ErrnoError ( ERRNO_CODES . EBADF ) ; return stream } , getSocketFromFD : function ( ) { var socket = SOCKFS . getSocket ( SYSCALLS . get ( ) ) ; if ( ! socket ) throw new FS . ErrnoError ( ERRNO_CODES . EBADF ) ; return socket } , getSocketAddress : function ( allowNull ) { var addrp = SYSCALLS . get ( ) , addrlen = SYSCALLS . get ( ) ; if ( allowNull & & addrp = = = 0 ) return null ; var info = __read_sockaddr ( addrp , addrlen ) ; if ( info . errno ) throw new FS . ErrnoError ( info . errno ) ; info . addr = DNS . lookup_addr ( info . addr ) | | info . addr ; return info } , get64 : function ( ) { var low = SYSCALLS . get ( ) , high = SYSCALLS . get ( ) ; return low } , getZero : function ( ) { SYSCALLS . get ( ) } } ; function ___syscall140 ( which , varargs ) { SYSCALLS . varargs = varargs ; try { var stream = SYSCALLS . getStreamFromFD ( ) , offset_high = SYSCALLS . get ( ) , offset_low = SYSCALLS . get ( ) , result = SYSCALLS . get ( ) , whence = SYSCALLS . get ( ) ; var offset = offset_low ; FS . llseek ( stream , offset , whence ) ; HEAP32 [ result > > 2 ] = stream . position ; if ( stream . getdents & & offset = = = 0 & & whence = = = 0 ) stream . getdents = null ; return 0 } catch ( e ) { if ( typeof FS = = = " undefined " | | ! ( e instanceof FS . ErrnoError ) ) abort ( e ) ; return - e . errno } } function ___syscall146 ( which , varargs ) { SYSCALLS . varargs = varargs ; try { var stream = SYSCALLS . getStreamFromFD ( ) , iov = SYSCALLS . get ( ) , iovcnt = SYSCALLS . get ( ) ; return SYSCALLS . doWritev ( stream , iov , iovcnt ) } catch ( e ) { if ( typeof FS = = = " undefined " | | ! ( e instanceof FS . ErrnoError ) ) abort ( e ) ; return - e . errno } } function ___syscall221 ( which , varargs ) { SYSCALLS . varargs = varargs ; try { var stream = SYSCALLS . getStreamFromFD ( ) , cmd = SYSCALLS . get ( ) ; switch ( cmd ) { case 0 : { var arg = SYSCALLS . get ( ) ; if ( arg < 0 ) { return - ERRNO_CODES . EINVAL } var newStream ; newStream = FS . open ( stream . path , stream . flags , 0 , arg ) ; return newStream . fd } case 1 : case 2 : return 0 ; case 3 : return stream . flags ; case 4 : { var arg = SYSCALLS . get ( ) ; stream . flags | = arg ; return 0 } case 12 : { var arg = SYSCALLS . get ( ) ; var offset = 0 ; HEAP16 [ arg + offset > > 1 ] = 2 ; return 0 } case 13 : case 14 : return 0 ; case 16 : case 8 : return - ERRNO_CODES . EINVAL ; case 9 : ___setErrNo ( ERRNO_CODES . EINVAL ) ; return - 1 ; default : { return - ERRNO_CODES . EINVAL } } } catch ( e ) { if ( typeof FS = = = " undefined " | | ! ( e instanceof FS . ErrnoError ) ) abort ( e ) ; return - e . errno } } function ___syscall3 ( which , varargs ) { SYSCALLS . varargs = varargs ; try { var stream = SYSCALLS . getStreamFromFD ( ) , buf = SYSCALLS . get ( ) , count = SYSCALLS . get ( ) ; return FS . read ( stream , HEAP8 , buf , count ) } catch ( e ) { if ( typeof FS = = = " undefined " | | ! ( e instanceof FS . ErrnoError ) ) abort ( e ) ; return - e . errno } } function ___syscall5 ( which , varargs ) { SYSCALLS . varargs = varargs ; try { var pathname = SYSCALLS . getStr ( ) , flags = SYSCALLS . get ( ) , mode = SYSCALLS . get ( ) ; var stream = FS . open ( pathname , flags , mode ) ; return stream . fd } catch ( e ) { if ( typeof FS = = = " undefined " | | ! ( e instanceof FS . ErrnoError ) ) abort ( e ) ; return - e . errno } } function ___syscall54 ( which , varargs ) { SYSCALLS . varargs = varargs ; try { var stream = SYSCALLS . getStreamFromFD ( ) , op = SYSCALLS . get ( ) ; switch ( op ) { case 21509 : case 21505 : { if ( ! stream . tty ) return - ERRNO_CODES . ENOTTY ; return 0 } case 21510 : case 21511 : case 21512 : case 21506 : case 21507 : case 21508 : { if ( ! stream . tty ) return - ERRNO_CODES . ENOTTY ; return 0 } case 21519 : { if ( ! stream . tty ) return - ERRNO_CODES . ENOTTY ; var argp = SYSCALLS . get ( ) ; HEAP32 [ argp > > 2 ] = 0 ; return 0 } case 21520 : { if ( ! stream . tty ) return - ERRNO_CODES . ENOTTY ; return - ERRNO_CODES . EINVAL } case 21531 : { var argp = SYSCALLS . get ( ) ; return FS . ioctl ( stream , op , argp ) } case 21523 : { if ( ! stream . tty ) return - ERRNO_CODES . ENOTTY ; return 0 } case 21524 : { if ( ! stream . tty ) return - ERRNO_CODES . ENOTTY ; return 0 } default : abort ( " bad ioctl syscall " + op ) } } catch ( e ) { if ( typeof FS = = = " undefined " | | ! ( e instanceof FS . ErrnoError ) ) abort ( e ) ; return - e . errno } } function ___syscall6 ( which , varargs ) { SYSCALLS . varargs = varargs ; try { var stream = SYSCALLS . getStreamFromFD ( ) ; FS . close ( stream ) ; return 0 } catch ( e ) { if ( typeof FS = = = " undefined " | | ! ( e instanceof FS . ErrnoError ) ) abort ( e ) ; return - e . errno } } var tupleRegistrations = { } ; function runDestructors ( destructors ) { while ( destructors . length ) { var ptr = destructors . pop ( ) ; var del = destructors . pop ( ) ; del ( ptr ) } } function simpleReadValueFromPointer ( pointer ) { return this [ " fromWireType " ] ( HEAPU32 [ pointer > > 2 ] ) } var awaitingDependencies = { } ; var registeredTypes = { } ; var typeDependencies = { } ; var char_0 = 48 ; var char_9 = 57 ; function makeLegalFunctionName ( name ) { if ( undefined = = = name ) { return " _unknown " } name = name . replace ( / [ ^ a - zA - Z0 - 9_ ] / g , " $ " ) ; var f = name . charCodeAt ( 0 ) ; if ( f > = char_0 & & f < = char_9 ) { return " _ " + name } else { return name } } function createNamedFunction ( name , body ) { name = makeLegalFunctionName ( name ) ; return new Function ( " body " , " return function " + name + " ( ) { \ n " + ' " use strict " ; ' + " return body . apply ( this , arguments ) ; \ n " + " } ; \ n " ) ( body ) } function extendError ( baseErrorType , errorName ) { var errorClass = createNamedFunction ( errorName , function ( message ) { this . name = errorName ; this . message = message ; var stack = new Error ( message ) . stack ; if ( stack ! = = undefined ) { this . stack = this . toString ( ) + " \ n " + stack . replace ( / ^ Error ( : [ ^ \ n ] * ) ? \ n / , " " ) } } ) ; errorClass . prototype = Object . create ( baseErrorType . prototype ) ; errorClass . prototype . constructor = errorClass ; errorClass . prototype . toString = function ( ) { if ( this . message = = = undefined ) { return this . name } else { return this . name + " : " + this . message } } ; return errorClass } var InternalError = undefined ; function throwInternalError ( message ) { throw new InternalError ( message ) } function whenDependentTypesAreResolved ( myTypes , dependentTypes , getTypeConverters ) { myTypes . forEach ( function ( type ) { typeDependencies [ type ] = dependentTypes } ) ; function onComplete ( typeConverters ) { var myTypeConverters = getTypeConverters ( typeConverters ) ; if ( myTypeConverters . length ! = = myTypes . length ) { throwInternalError ( " Mismatched type converter count " ) } for ( var i = 0 ; i < myTypes . length ; + + i ) { registerType ( myTypes [ i ] , myTypeConverters [ i ] ) } } var typeConverters = new Array ( dependentTypes . length ) ; var unregisteredTypes = [ ] ; var registered = 0 ; dependentTypes . forEach ( function ( dt , i ) { if ( registeredTypes . hasOwnProperty ( dt ) ) { typeConverters [ i ] = registeredTypes [ dt ] } else { unregisteredTypes . push ( dt ) ; if ( ! awaitingDependencies . hasOwnProperty ( dt ) ) { awaitingDependencies [ dt ] = [ ] } awaitingDependencies [ dt ] . push ( function ( ) { typeConverters [ i ] = registeredTypes [ dt ] ; + + registered ; if ( registered = = = unregisteredTypes . length ) { onComplete ( typeConverters ) } } ) } } ) ; if ( 0 = = = unregisteredTypes . length ) { onComplete ( typeConverters ) } } function __embind_finalize_value_array ( rawTupleType ) { var reg = tupleRegistrations [ rawTupleType ] ; delete tupleRegistrations [ rawTupleType ] ; var elements = reg . elements ; var elementsLength = elements . length ; var elementTypes = elements . map ( function ( elt ) { return elt . getterReturnType } ) . concat ( elements . map ( function ( elt ) { return elt . setterArgumentType } ) ) ; var rawConstructor = reg . rawConstructor ; var rawDestructor = reg . rawDestructor ; whenDependentTypesAreResolved ( [ rawTupleType ] , elementTypes , function ( elementTypes ) { elements . forEach ( function ( elt , i ) { var getterReturnType = elementTypes [ i ] ; var getter = elt . getter ; var getterContext = elt . getterContext ; var setterArgumentType = elementTypes [ i + elementsLength ] ; var setter = elt . setter ; var setterContext = elt . setterContext ; elt . read = function ( ptr ) { return getterReturnType [ " fromWireType " ] ( getter ( getterContext , ptr ) ) } ; elt . write = function ( ptr , o ) { var destructors = [ ] ; setter ( setterContext , ptr , setterArgumentType [ " toWireType " ] ( destructors , o ) ) ; runDestructors ( destructors ) } } ) ; return [ { name : reg . name , " fromWireType " : function ( ptr ) { var rv = new Array ( elementsLength ) ; for ( var i = 0 ; i < elementsLength ; + + i ) { rv [ i ] = elements [ i ] . read ( ptr ) } rawDestructor ( ptr ) ; return rv } , " toWireType " : function ( destructors , o ) { if ( elementsLength ! = = o . length ) { throw new TypeError ( " Incorrect number of tuple elements for " + reg . name + " : expected = " + elementsLength + " , actual = " + o . length ) } var ptr = rawConstructor ( ) ; for ( var i = 0 ; i < elementsLength ; + + i ) { elements [ i ] . write ( ptr , o [ i ] ) } if ( destructors ! = = null ) { destructors . push ( rawDestructor , ptr ) } return ptr } , " argPackAdvance " : 8 , " readValueFromPointer " : simpleReadValueFromPointer , destructorFunction : rawDestructor } ] } ) } var structRegistrations = { } ; function __embind_finalize_value_object ( structType ) { var reg = structRegistrations [ structType ] ; delete structRegistrations [ structType ] ; var rawConstructor = reg . rawConstructor ; var rawDestructor = reg . rawDestructor ; var fieldRecords = reg . fields ; var fieldTypes = fieldRecords . map ( function ( field ) { return field . getterReturnType } ) . concat ( fieldRecords . map ( function ( field ) { return field . setterArgumentType } ) ) ; whenDependentTypesAreResolved ( [ structType ] , fieldTypes , function ( fieldTypes ) { var fields = { } ; fieldRecords . forEach ( function ( field , i ) { var fieldName = field . fieldName ; var getterReturnType = fieldTypes [ i ] ; var getter = field . getter ; var getterContext = field . getterContext ; var setterArgumentType = fieldTypes [ i + fieldRecords . length ] ; var setter = field . setter ; var setterContext = field . setterContext ; fields [ fieldName ] = { read : function ( ptr ) { return getterReturnType [ " fromWireType " ] ( getter ( getterContext , ptr ) ) } , write : function ( ptr , o ) { var destructors = [ ] ; setter ( setterContext , ptr , setterArgumentType [ " toWireType " ] ( destructors , o ) ) ; runDestructors ( destructors ) } } } ) ; return [ { name : reg . name , " fromWireType " : function ( ptr ) { var rv = { } ; for ( var i in fields ) { rv [ i ] = fields [ i ] . read ( ptr ) } rawDestructor ( ptr ) ; return rv } , " toWireType " : function ( destructors , o ) { for ( var fieldName in fields ) { if ( ! ( fieldName in o ) ) { throw new TypeError ( " Missing field " ) } } var ptr = rawConstructor ( ) ; for ( fieldName in fields ) { fields [ fieldName ] . write ( ptr , o [ fieldName ] ) } if ( destructors ! = = null ) { destructors . push ( rawDestructor , ptr ) } return ptr } , " argPackAdvance " : 8 , " readValueFromPointer " : simpleReadValueFromPointer , destructorFunction : rawDestructor } ] } ) } function getShiftFromSize ( size ) { switch ( size ) { case 1 : return 0 ; case 2 : return 1 ; case 4 : return 2 ; case 8 : return 3 ; default : throw new TypeError ( " Unknown type size : " + size ) } } function embind_init_charCodes ( ) { var codes = new Array ( 256 ) ; for ( var i = 0 ; i < 256 ; + + i ) { codes [ i ] = String . fromCharCode ( i ) } embind_charCodes = codes } var embind_charCodes = undefined ; function readLatin1String ( ptr ) { var ret = " " ; var c = ptr ; while ( HEAPU8 [ c ] ) { ret + = embind_charCodes [ HEAPU8 [ c + + ] ] } return ret } var BindingError = undefined ; function throwBindingError ( message ) { throw new BindingError ( message ) } function registerType ( rawType , registeredInstance , options ) { options = options | | { } ; if ( ! ( " argPackAdvance " in registeredInstance ) ) { throw new TypeError ( " registerType registeredInstance requires argPackAdvance " ) } var name = registeredInstance . name ; if ( ! rawType ) { throwBindingError ( ' type " ' + name + ' " must have a positive integer typeid pointer ' ) } if ( registeredTypes . hasOwnProperty ( rawType ) ) { if ( options . ignoreDuplicateRegistrations ) { return } else { throwBindingError ( " Cannot register type ' " + name + " ' twice " ) } } registeredTypes [ rawType ] = registeredInstance ; delete typeDependencies [ rawType ] ; if ( awaitingDependencies . hasOwnProperty ( rawType ) ) { var callbacks = awaitingDependencies [ rawType ] ; delete awaitingDependencies [ rawType ] ; callbacks . forEach ( function ( cb ) { cb ( ) } ) } } function __embind_register_bool ( rawType , name , size , trueValue , falseValue ) { var shift = getShiftFromSize ( size ) ; name = readLatin1String ( name ) ; registerType ( rawType , { name : name , " fromWireType " : function ( wt ) { return ! ! wt } , " toWireType " : function ( destructors , o ) { return o ? trueValue : falseValue } , " argPackAdvance " : 8 , " readValueFromPointer " : function ( pointer ) { var heap ; if ( size = = = 1 ) { heap = HEAP8 } else if ( size = = = 2 ) { heap = HEAP16 } else if ( size = = = 4 ) { heap = HEAP32 } else { throw new TypeError ( " Unknown boolean type size : " + name ) } return this [ " fromWireType " ] ( heap [ pointer > > shift ] ) } , destructorFunction : null } ) } function ClassHandle_isAliasOf ( other ) { if ( ! ( this instanceof ClassHandle ) ) { return false } if ( ! ( other instanceof ClassHandle ) ) { return false } var leftClass = this . $ $ . ptrType . registeredClass ; var left = this . $ $ . ptr ; var rightClass = other . $ $ . ptrType . registeredClass ; var right = other . $ $ . ptr ; while ( leftClass . baseClass ) { left = leftClass . upcast ( left ) ; leftClass = leftClass . baseClass } while ( rightClass . baseClass ) { right = rightClass . upcast ( right ) ; rightClass = rightClass . baseClass } return leftClass = = = rightClass & & left = = = right } function shallowCopyInternalPointer ( o ) { return { count : o . count , deleteScheduled : o . deleteScheduled , preservePointerOnDelete : o . preservePointerOnDelete , ptr : o . ptr , ptrType : o . ptrType , smartPtr : o . smartPtr , smartPtrType : o . smartPtrType } } function throwInstanceAlreadyDeleted ( obj ) { function getInstanceTypeName ( handle ) { return handle . $ $ . ptrType . registeredClass . name } throwBindingError ( getInstanceTypeName ( obj ) + " instance already deleted " ) } function ClassHandle_clone ( ) { if ( ! this . $ $ . ptr ) { throwInstanceAlreadyDeleted ( this ) } if ( this . $ $ . preservePointerOnDelete ) { this . $ $ . count . value + = 1 ; return this } else { var clone = Object . create ( Object . getPrototypeOf ( this ) , { $ $ : { value : shallowCopyInternalPointer ( this . $ $ ) } } ) ; clone . $ $ . count . value + = 1 ; clone . $ $ . deleteScheduled = false ; return clone } } function runDestructor ( handle ) { var $ $ = handle . $ $ ; if ( $ $ . smartPtr ) { $ $ . smartPtrType . rawDestructor ( $ $ . smartPtr ) } else { $ $ . ptrType . registeredClass . rawDestructor ( $ $ . ptr ) } } function ClassHandle_delete ( ) { if ( ! this . $ $ . ptr ) { throwInstanceAlreadyDeleted ( this ) } if ( this . $ $ . deleteScheduled & & ! this . $ $ . preservePointerOnDelete ) { throwBindingError ( " Object already scheduled for deletion " ) } this . $ $ . count . value - = 1 ; var toDelete = 0 = = = this . $ $ . count . value ; if ( toDelete ) { runDestructor ( this ) } if ( ! this . $ $ . preservePointerOnDelete ) { this . $ $ . smartPtr = undefined ; this . $ $ . ptr = undefined } } function ClassHandle_isDeleted ( ) { return ! this . $ $ . ptr } var delayFunction = undefined ; var deletionQueue = [ ] ; function flushPendingDeletes ( ) { while ( deletionQueue . length ) { var obj = deletionQueue . pop ( ) ; obj . $ $ . deleteScheduled = false ; obj [ " delete " ] ( ) } } function ClassHandle_deleteLater ( ) { if ( ! this . $ $ . ptr ) { throwInstanceAlreadyDeleted ( this ) } if ( this . $ $ . deleteScheduled & & ! this . $ $ . preservePointerOnDelete ) { throwBindingError ( " Object already scheduled for deletion " ) } deletionQueue . push ( this ) ; if ( deletionQueue . length = = = 1 & & delayFunction ) { delayFunction ( flushPendingDeletes ) } this . $ $ . deleteScheduled = true ; return this } function init_ClassHandle ( ) { ClassHandle . prototype [ " isAliasOf " ] = ClassHandle_isAliasOf ; ClassHandle . prototype [ " clone " ] = ClassHandle_clone ; ClassHandle . prototype [ " delete " ] = ClassHandle_delete ; ClassHandle . prototype [ " isDeleted " ] = ClassHandle_isDeleted ; ClassHandle . prototype [ " deleteLater " ] = ClassHandle_deleteLater } function ClassHandle ( ) { } var registeredPointers = { } ; function ensureOverloadTable ( proto , methodName , humanName ) { if ( undefined = = = proto [ methodName ] . overloadTable ) { var prevFunc = proto [ methodName ] ; proto [ methodName ] = function ( ) { if ( ! proto [ methodName ] . overloadTable . hasOwnProperty ( arguments . length ) ) { throwBindingError ( " Function ' " + humanName + " ' called with an invalid number of arguments ( " + arguments . length + " ) - expects one of ( " + proto [ methodName ] . overloadTable + " ) ! " ) } return proto [ methodName ] . overloadTable [ arguments . length ] . apply ( this , arguments ) } ; proto [ methodName ] . overloadTable = [ ] ; proto [ methodName ] . overloadTable [ prevFunc . argCount ] = prevFunc } } function exposePublicSymbol ( name , value , numArguments ) { if ( Module . hasOwnProperty ( name ) ) { if ( undefined = = = numArguments | | undefined ! = = Module [ name ] . overloadTable & & undefined ! = = Module [ name ] . overloadTable [ numArguments ] ) { throwBindingError ( " Cannot register public name ' " + name + " ' twice " ) } ensureOverloadTable ( Module , name , name ) ; if ( Module . hasOwnProperty ( numArguments ) ) { throwBindingError ( " Cannot register multiple overloads of a function with the same number of arguments ( " + numArguments + " ) ! " ) } Module [ name ] . overloadTable [ numArguments ] = value } else { Module [ name ] = value ; if ( undefined ! = = numArguments ) { Module [ name ] . numArguments = numArguments } } } function RegisteredClass ( name , constructor , instancePrototype , rawDestructor , baseClass , getActualType , upcast , downcast ) { this . name = name ; this . constructor = constructor ; this . instancePrototype = instancePrototype ; this . rawDestructor = rawDestructor ; this . baseClass = baseClass ; this . getActualType = getActualType ; this . upcast = upcast ; this . downcast = downcast ; this . pureVirtualFunctions = [ ] } function upcastPointer ( ptr , ptrClass , desiredClass ) { while ( ptrClass ! = = desiredClass ) { if ( ! ptrClass . upcast ) { throwBindingError ( " Expected null or instance of " + desiredClass . name + " , got an instance of " + ptrClass . name ) } ptr = ptrClass . upcast ( ptr ) ; ptrClass = ptrClass . baseClass } return ptr } function constNoSmartPtrRawPointerToWireType ( destructors , handle ) { if ( handle = = = null ) { if ( this . isReference ) { throwBindingError ( " null is not a valid " + this . name ) } return 0 } if ( ! handle . $ $ ) { throwBindingError ( ' Cannot pass " ' + _embind_repr ( handle ) + ' " as a ' + this . name ) } if ( ! handle . $ $ . ptr ) { throwBindingError ( " Cannot pass deleted object as a pointer of type " + this . name ) } var handleClass = handle . $ $ . ptrType . registeredClass ; var ptr = upcastPointer ( handle . $ $ . ptr , handleClass , this . registeredClass ) ; return ptr } function genericPointerToWireType ( destructors , handle ) { var ptr ; if ( handle = = = null ) { if ( this . isReference ) { throwBindingError ( " null is not a valid " + this . name ) } if ( this . isSmartPointer ) { ptr = this . rawConstructor ( ) ; if ( destructors ! = = null ) { destructors . push ( this . rawDestructor , ptr ) } return ptr } else { return 0 } } if ( ! handle . $ $ ) { throwBindingError ( ' Cannot pass " ' + _embind_repr ( handle ) + ' " as a ' + this . name ) } if ( ! handle . $ $ . ptr ) { throwBindingError ( " Cannot pass deleted object as a pointer of type " + this . name ) } if ( ! this . isConst & & handle . $ $ . ptrType . isConst ) { throwBindingError ( " Cannot convert argument of type " + ( handle . $ $ . smartPtrType ? handle . $ $ . smartPtrType . name : handle . $ $ . ptrType . name ) + " to parameter type " + this . name ) } var handleClass = handle . $ $ . ptrType . registeredClass ; ptr = upcastPointer ( handle . $ $ . ptr , handleClass , this . registeredClass ) ; if ( this . isSmartPointer ) { if ( undefined = = = handle . $ $ . smartPtr ) { throwBindingError ( " Passing raw pointer to smart pointer is illegal " ) } switch ( this . sharingPolicy ) { case 0 : if ( handle . $ $ . smartPtrType = = = this ) { ptr = handle . $ $ . smartPtr } else { throwBindingError ( " Cannot convert argument of type " + ( handle . $ $ . smartPtrType ? handle . $ $ . smartPtrType . name : handle . $ $ . ptrType . name ) + " to parameter type " + this . name ) } break ; case 1 : ptr = handle . $ $ . smartPtr ; break ; case 2 : if ( handle . $ $ . smartPtrType = = = this ) { ptr = handle . $ $ . smartPtr } else { var clonedHandle = handle [ " clone " ] ( ) ; ptr = this . rawShare ( ptr , __emval_register ( function ( ) { clonedHandle [ " delete " ] ( ) } ) ) ; if ( destructors ! = = null ) { destructors . push ( this . rawDestructor , ptr ) } } break ; default : throwBindingError ( " Unsupporting sharing policy " ) } } return ptr } function nonConstNoSmartPtrRawPointerToWireType ( destructors , handle ) { if ( handle = = = null ) { if ( this . isReference ) { throwBindingError ( " null is not a valid " + this . name ) } return 0 } if ( ! handle . $ $ ) { throwBindingError ( ' Cannot pass " ' + _embind_repr ( handle ) + ' " as a ' + this . name ) } if ( ! handle . $ $ . ptr ) { throwBindingError ( " Cannot pass deleted object as a pointer of type " + this . name ) } if ( handle . $ $ . ptrType . isConst ) { throwBindingError ( " Cannot convert argument of type " + handle . $ $ . ptrType . name + " to parameter type " + this . name ) } var handleClass = handle . $ $ . ptrType . registeredClass ; var ptr = upcastPointer ( handle . $ $ . ptr , handleClass , this . registeredClass ) ; return ptr } function RegisteredPointer_getPointee ( ptr ) { if ( this . rawGetPointee ) { ptr = this . rawGetPointee ( ptr ) } return ptr } function RegisteredPointer_destructor ( ptr ) { if ( this . rawDestructor ) { this . rawDestructor ( ptr ) } } function RegisteredPointer_deleteObject ( handle ) { if ( handle ! = = null ) { handle [ " delete " ] ( ) } } function downcastPointer ( ptr , ptrClass , desiredClass ) { if ( ptrClass = = = desiredClass ) { return ptr } if ( undefined = = = desiredClass . baseClass ) { return null } var rv = downcastPointer ( ptr , ptrClass , desiredClass . baseClass ) ; if ( rv = = = null ) { return null } return desiredClass . downcast ( rv ) } function getInheritedInstanceCount ( ) { return Object . keys ( registeredInstances ) . length } function getLiveInheritedInstances ( ) { var rv = [ ] ; for ( var k in registeredInstances ) { if ( registeredInstances . hasOwnProperty ( k ) ) { rv . push ( registeredInstances [ k ] ) } } return rv } function setDelayFunction ( fn ) { delayFunction = fn ; if ( deletionQueue . length & & delayFunction ) { delayFunction ( flushPendingDeletes ) } } function init_embind ( ) { Module [ " getInheritedInstanceCount " ] = getInheritedInstanceCount ; Module [ " getLiveInheritedInstances " ] = getLiveInheritedInstances ; Module [ " flushPendingDeletes " ] = flushPendingDeletes ; Module [ " setDelayFunction " ] = setDelayFunction } var registeredInstances = { } ; function getBasestPointer ( class_ , ptr ) { if ( ptr = = = undefined ) { throwBindingError ( " ptr should not be undefined " ) } while ( class_ . baseClass ) { ptr = class_ . upcast ( ptr ) ; class_ = class_ . baseClass } return ptr } function getInheritedInstance ( class_ , ptr ) { ptr = getBasestPointer ( class_ , ptr ) ; return registeredInstances [ ptr ] } function makeClassHandle ( prototype , record ) { if ( ! record . ptrType | | ! record . ptr ) { throwInternalError ( " makeClassHandle requires ptr and ptrType " ) } var hasSmartPtrType = ! ! record . smartPtrType ; var hasSmartPtr = ! ! record . smartPtr ; if ( hasSmartPtrType ! = = hasSmartPtr ) { throwInternalError ( " Both smartPtrType and smartPtr must be specified " ) } record . count = { value : 1 } ; return Object . create ( prototype , { $ $ : { value : record } } ) } function RegisteredPointer_fromWireType ( ptr ) { var rawPointer = this . getPointee ( ptr ) ; if ( ! rawPointer ) { this . destructor ( ptr ) ; return null } var registeredInstance = getInheritedInstance ( this . registeredClass , rawPointer ) ; if ( undefined ! = = registeredInstance ) { if ( 0 = = = registeredInstance . $ $ . count . value ) { registeredInstance . $ $ . ptr = rawPointer ; registeredInstance . $ $ . smartPtr = ptr ; return registeredInstance [ " clone " ] ( ) } else { var rv = registeredInstance [ " clone " ] ( ) ; this . destructor ( ptr ) ; return rv } } function makeDefaultHandle ( ) { if ( this . isSmartPointer ) { return makeClassHandle ( this . registeredClass . instancePrototype , { ptrType : this . pointeeType , ptr : rawPointer , smartPtrType : this , smartPtr : ptr } ) } else { return makeClassHandle ( this . registeredClass . instancePrototype , { ptrType : this , ptr : ptr } ) } } var actualType = this . registeredClass . getActualType ( rawPointer ) ; var registeredPointerRecord = registeredPointers [ actualType ] ; if ( ! registeredPointerRecord ) { return makeDefaultHandle . call ( this ) } var toType ; if ( this . isConst ) { toType = registeredPointerRecord . constPointerType } else { toType = registeredPointerRecord . pointerType } var dp = downcastPointer ( rawPointer , this . registeredClass , toType . registeredClass ) ; if ( dp = = = null ) { return makeDefaultHandle . call ( this ) } if ( this . isSmartPointer ) { return makeClassHandle ( toType . registeredClass . instancePrototype , { ptrType : toType , ptr : dp , smartPtrType : this , smartPtr : ptr } ) } else { return makeClassHandle ( toType . registeredClass . instancePrototype , { ptrType : toType , ptr : dp } ) } } function init_RegisteredPointer ( ) { RegisteredPointer . prototype . getPointee = RegisteredPointer_getPointee ; RegisteredPointer . prototype . destructor = RegisteredPointer_destructor ; RegisteredPointer . prototype [ " argPackAdvance " ] = 8 ; RegisteredPointer . prototype [ " readValueFromPointer " ] = simpleReadValueFromPointer ; RegisteredPointer . prototype [ " deleteObject " ] = RegisteredPointer_deleteObject ; RegisteredPointer . prototype [ " fromWireType " ] = RegisteredPointer_fromWireType } function RegisteredPointer ( name , registeredClass , isReference , isConst , isSmartPointer , pointeeType , sharingPolicy , rawGetPointee , rawConstructor , rawShare , rawDestructor ) { this . name = name ; this . registeredClass = registeredClass ; this . isReference = isReference ; this . isConst = isConst ; this . isSmartPointer = isSmartPointer ; this . pointeeType = pointeeType ; this . sharingPolicy = sharingPolicy ; this . rawGetPointee = rawGetPointee ; this . rawConstructor = rawConstructor ; this . rawShare = rawShare ; this . rawDestructor = rawDestructor ; if ( ! isSmartPointer & & registeredClass . baseClass = = = undefined ) { if ( isConst ) { this [ " toWireType " ] = constNoSmartPtrRawPointerToWireType ; this . destructorFunction = null } else { this [ " toWireType " ] = nonConstNoSmartPtrRawPointerToWireType ; this . destructorFunction = null } } else { this [ " toWireType " ] = genericPointerToWireType } } function replacePublicSymbol ( name , value , numArguments ) { if ( ! Module . hasOwnProperty ( name ) ) { throwInternalError ( " Replacing nonexistant public symbol " ) } if ( undefined ! = = Module [ name ] . overloadTable & & undefined ! = = numArguments ) { Module [ name ] . overloadTable [ numArguments ] = value } else { Module [ name ] = value ; Module [ name ] . argCount = numArguments } } function embind__requireFunction ( signature , rawFunction ) { signature = readLatin1String ( signature ) ; function makeDynCaller ( dynCall ) { var args = [ ] ; for ( var i = 1 ; i < signature . length ; + + i ) { args . push ( " a " + i ) } var name = " dynCall_ " + signature + " _ " + rawFunction ; var body = " return function " + name + " ( " + args . join ( " , " ) + " ) { \ n " ; body + = " return dynCall ( rawFunction " + ( args . length ? " , " : " " ) + args . join ( " , " ) + " ) ; \ n " ; body + = " } ; \ n " ; return new Function ( " dynCall " , " rawFunction " , body ) ( dynCall , rawFunction ) } var fp ; if ( Module [ " FUNCTION_TABLE_ " + signature ] ! = = undefined ) { fp = Module [ " FUNCTION_TABLE_ " + signature ] [ rawFunction ] } else if ( typeof FUNCTION_TABLE ! = = " undefined " ) { fp = FUNCTION_TABLE [ rawFunction ] } else { var dc = Module [ " dynCall_ " + signature ] ; if ( dc = = = undefined ) { dc = Module [ " dynCall_ " + signature . replace ( / f / g , " d " ) ] ; if ( dc = = = undefined ) { throwBindingError ( " No dynCall invoker for signature : " + signature ) } } fp = makeDynCaller ( dc ) } if ( typeof fp ! = = " function " ) { throwBindingError ( " unknown function pointer with signature " + signature + " : " + rawFunction ) } return fp } var UnboundTypeError = undefined ; function getTypeName ( type ) { var ptr = ___getTypeName ( type ) ; var rv = readLatin1String ( ptr ) ; _free ( ptr ) ; return rv } function throwUnboundTypeError ( message , types ) { var unboundTypes = [ ] ; var seen = { } ; function visit ( type ) { if ( seen [ type ] ) { return } if ( registeredTypes [ type ] ) { return } if ( typeDependencies [ type ] ) { typeDependencies [ type ] . forEach ( visit ) ; return } unboundTypes . push ( type ) ; seen [ type ] = true } types . forEach ( visit ) ; throw new UnboundTypeError ( message + " : " + unboundTypes . map ( getTypeName ) . join ( [ " , " ] ) ) } function __embind_register_class ( rawType , rawPointerType , rawConstPointerType , baseClassRawType , getActualTypeSignature , getActualType , upcastSignature , upcast , downcastSignature , downcast , name , destructorSignature , rawDestructor ) { name = readLatin1String ( name ) ; getActualType = embind__requireFunction ( getActualTypeSignature , getActualType ) ; if ( upcast ) { upcast = embind__requireFunction ( upcastSignature , upcast ) } if ( downcast ) { downcast = embind__requireFunction ( downcastSignature , downcast ) } rawDestructor = embind__requireFunction ( destructorSignature , rawDestructor ) ; var legalFunctionName = makeLegalFunctionName ( name ) ; exposePublicSymbol ( legalFunctionName , function ( ) { throwUnboundTypeError ( " Cannot construct " + name + " due to unbound types " , [ baseClassRawType ] ) } ) ; whenDependentTypesAreResolved ( [ rawType , rawPointerType , rawConstPointerType ] , baseClassRawType ? [ baseClassRawType ] : [ ] , function ( base ) { base = base [ 0 ] ; var baseClass ; var basePrototype ; if ( baseClassRawType ) { baseClass = base . registeredClass ; basePrototype = baseClass . instancePrototype } else { basePrototype = ClassHandle . prototype } var constructor = createNamedFunction ( legalFunctionName , function ( ) { if ( Object . getPrototypeOf ( this ) ! = = instancePrototype ) { throw new BindingError ( " Use ' new ' to construct " + name ) } if ( undefined = = = registeredClass . constructor_body ) { throw new BindingError ( name + " has no accessible constructor " ) } var body = registeredClass . constructor_body [ arguments . length ] ; if ( undefined = = = body ) { throw new BindingError ( " Tried to invoke ctor of " + name + " with invalid number of parameters ( " + arguments . length + " ) - expected ( " + Object . keys ( registeredClass . constructor_body ) . toString ( ) + " ) parameters instead ! " ) } return body . apply ( this , arguments ) } ) ; var instancePrototype = Object . create ( basePrototype , { constructor : { value : constructor } } ) ; constructor . prototype = instancePrototype ; var registeredClass = new RegisteredClass ( name , constructor , instancePrototype , rawDestructor , baseClass , getActualType , upcast , downcast ) ; var referenceConverter = new RegisteredPointer ( name , registeredClass , true , false , false ) ; var pointerConverter = new RegisteredPointer ( name + " * " , registeredClass , false , false , false ) ; var constPointerConverter = new RegisteredPointer ( name + " const * " , registeredClass , false , true , false ) ; registeredPointers [ rawType ] = { pointerType : pointerConverter , constPointerType : constPointerConverter } ; replacePublicSymbol ( legalFunctionName , constructor ) ; return [ referenceConverter , pointerConverter , constPointerConverter ] } ) } function new_ ( constructor , argumentList ) { if ( ! ( constructor instanceof Function ) ) { throw new TypeError ( " new_ called with constructor type " + typeof constructor + " which is not a function " ) } var dummy = createNamedFunction ( constructor . name | | " unknownFunctionName " , function ( ) { } ) ; dummy . prototype = constructor . prototype ; var obj = new dummy ; var r = constructor . apply ( obj , argumentList ) ; return r instanceof Object ? r : obj } function craftInvokerFunction ( humanName , argTypes , classType , cppInvokerFunc , cppTargetFunc ) { var argCount = argTypes . length ; if ( argCount < 2 ) { throwBindingError ( " argTypes array size mismatch ! Must at least get return value and ' this ' types ! " ) } var isClassMethodFunc = argTypes [ 1 ] ! = = null & & classType ! = = null ; var needsDestructorStack = false ; for ( var i = 1 ; i < argTypes . length ; + + i ) { if ( argTypes [ i ] ! = = null & & argTypes [ i ] . destructorFunction = = = undefined ) { needsDestructorStack = true ; break } } var returns = argTypes [ 0 ] . name ! = = " void " ; var argsList = " " ; var argsListWired = " " ; for ( var i = 0 ; i < argCount - 2 ; + + i ) { argsList + = ( i ! = = 0 ? " , " : " " ) + " arg " + i ; argsListWired + = ( i ! = = 0 ? " , " : " " ) + " arg " + i + " Wired " } var invokerFnBody = " return function " + makeLegalFunctionName ( humanName ) + " ( " + argsList + " ) { \ n " + " if ( arguments . length ! = = " + ( argCount - 2 ) + " ) { \ n " + " throwBindingError ( ' function " + humanName + " called with ' + arguments . length + ' arguments , expected " + ( argCount - 2 ) + " args ! ' ) ; \ n " + " } \ n " ; if ( needsDestructorStack ) { invokerFnBody + = " var destructors = [ ] ; \ n " } var dtorStack = needsDestructorStack ? " destructors " : " null " ; var args1 = [ " throwBindingError " , " invoker " , " fn " , " runDestructors " , " retType " , " classParam " ] ; var args2 = [ throwBindingError , cppInvokerFunc , cppTargetFunc , runDestructors , argTypes [ 0 ] , argTypes [ 1 ] ] ; if ( isClassMethodFunc ) { invokerFnBody + = " var thisWired = classParam . toWireType ( " + dtorStack + " , this ) ; \ n " } for ( var i = 0 ; i < argCount - 2 ; + + i ) { invokerFnBody + = " var arg " + i + " Wired = argType " + i + " . toWireType ( " + dtorStack + " , arg " + i + " ) ; / / " + argTypes [ i + 2 ] . name + " \ n " ; args1 . push ( " argType " + i ) ; args2 . push ( argTypes [ i + 2 ] ) } if ( isClassMethodFunc ) { argsListWired = " thisWired " + ( argsListWired . length > 0 ? " , " : " " ) + argsListWired } invokerFnBody + = ( returns ? " var rv = " : " " ) + " invoker ( fn " + ( argsListWired . length > 0 ? " , " : " " ) + argsListWired + " ) ; \ n " ; if ( needsDestructorStack ) { invokerFnBody + = " runDestructors ( destructors ) ; \ n " } else { for ( var i = isClassMethodFunc ? 1 : 2 ; i < argTypes . length ; + + i ) { var paramName = i = = = 1 ? " thisWired " : " arg " + ( i - 2 ) + " Wired " ; if ( argTypes [ i ] . destructorFunction ! = = null ) { invokerFnBody + = paramName + " _dtor ( " + paramName + " ) ; / / " + argTypes [ i ] . name + " \ n " ; args1 . push ( paramName + " _dtor " ) ; args2 . push ( argTypes [ i ] . destructorFunction ) } } } if ( returns ) { invokerFnBody + = " var ret = retType . fromWireType ( rv ) ; \ n " + " return ret ; \ n " } else { } invokerFnBody + = " } \ n " ; args1 . push ( invokerFnBody ) ; var invokerFunction = new_ ( Function , args1 ) . apply ( null , args2 ) ; return invokerFunction } function heap32VectorToArray ( count , firstElement ) { var array = [ ] ; for ( var i = 0 ; i < count ; i + + ) { array . push ( HEAP32 [ ( firstElement > > 2 ) + i ] ) } return array } function __embind_register_class_class_function ( rawClassType , methodName , argCount , rawArgTypesAddr , invokerSignature , rawInvoker , fn ) { var rawArgTypes = heap32VectorToArray ( argCount , rawArgTypesAddr ) ; methodName = readLatin1String ( methodName ) ; rawInvoker = embind__requireFunction ( invokerSignature , rawInvoker ) ; whenDependentTypesAreResolved ( [ ] , [ rawClassType ] , function ( classType ) { classType = classType [ 0 ] ; var humanName = classType . name + " . " + methodName ; function unboundTypesHandler ( ) { throwUnboundTypeError ( " Cannot call " + humanName + " due to unbound types " , rawArgTypes ) } var proto = classType . registeredClass . constructor ; if ( undefined = = = proto [ methodName ] ) { unboundTypesHandler . argCount = argCount - 1 ; proto [ methodName ] = unboundTypesHandler } else { ensureOverloadTable ( proto , methodName , humanName ) ; proto [ methodName ] . overloadTable [ argCount - 1 ] = unboundTypesHandler } whenDependentTypesAreResolved ( [ ] , rawArgTypes , function ( argTypes ) { var invokerArgsArray = [ argTypes [ 0 ] , null ] . concat ( argTypes . slice ( 1 ) ) ; var func = craftInvokerFunction ( humanName , invokerArgsArray , null , rawInvoker , fn ) ; if ( undefined = = = proto [ methodName ] . overloadTable ) { func . argCount = argCount - 1 ; proto [ methodName ] = func } else { proto [ methodName ] . overloadTable [ argCount - 1 ] = func } return [ ] } ) ; return [ ] } ) } function __embind_register_class_constructor ( rawClassType , argCount , rawArgTypesAddr , invokerSignature , invoker , rawConstructor ) { var rawArgTypes = heap32VectorToArray ( argCount , rawArgTypesAddr ) ; invoker = embind__requireFunction ( invokerSignature , invoker ) ; whenDependentTypesAreResolved ( [ ] , [ rawClassType ] , function ( classType ) { classType = classType [ 0 ] ; var humanName = " constructor " + classType . name ; if ( undefined = = = classType . registeredClass . constructor_body ) { classType . registeredClass . constructor_body = [ ] } if ( undefined ! = = classType . registeredClass . constructor_body [ argCount - 1 ] ) { throw new BindingError ( " Cannot register multiple constructors with identical number of parameters ( " + ( argCount - 1 ) + " ) for class ' " + classType . name + " ' ! Overload resolution is currently only performed using the parameter count , not actual type info ! " ) } classType . registeredClass . constructor_body [ argCount - 1 ] = function unboundTypeHandler ( ) { throwUnboundTypeError ( " Cannot construct " + classType . name + " due to unbound types " , rawArgTypes ) } ; whenDependentTypesAreResolved ( [ ] , rawArgTypes , function ( argTypes ) { classType . registeredClass . constructor_body [ argCount - 1 ] = function constructor_body ( ) { if ( arguments . length ! = = argCount - 1 ) { throwBindingError ( humanName + " called with " + arguments . length + " arguments , expected " + ( argCount - 1 ) ) } var destructors = [ ] ; var args = new Array ( argCount ) ; args [ 0 ] = rawConstructor ; for ( var i = 1 ; i < argCount ; + + i ) { args [ i ] = argTypes [ i ] [ " toWireType " ] ( destructors , arguments [ i - 1 ] ) } var ptr = invoker . apply ( null , args ) ; runDestructors ( destructors ) ; return argTypes [ 0 ] [ " fromWireType " ] ( ptr ) } ; return [ ] } ) ; return [ ] } ) } function __embind_register_class_function ( rawClassType , methodName , argCount , rawArgTypesAddr , invokerSignature , rawInvoker , context , isPureVirtual ) { var rawArgTypes = heap32VectorToArray ( argCount , rawArgTypesAddr ) ; methodName = readLatin1String ( methodName ) ; rawInvoker = embind__requireFunction ( invokerSignature , rawInvoker ) ; whenDependentTypesAreResolved ( [ ] , [ rawClassType ] , function ( classType ) { classType = classType [ 0 ] ; var humanName = classType . name + " . " + methodName ; if ( isPureVirtual ) { classType . registeredClass . pureVirtualFunctions . push ( methodName ) } function unboundTypesHandler ( ) { throwUnboundTypeError ( " Cannot call " + humanName + " due to unbound types " , rawArgTypes ) } var proto = classType . registeredClass . instancePrototype ; var method = proto [ methodName ] ; if ( undefined = = = method | | undefined = = = method . overloadTable & & method . className ! = = classType . name & & method . argCount = = = argCount - 2 ) { unboundTypesHandler . argCount = argCount - 2 ; unboundTypesHandler . className = classType . name ; proto [ methodName ] = unboundTypesHandler } else { ensureOverloadTable ( proto , methodName , humanName ) ; proto [ methodName ] . overloadTable [ argCount - 2 ] = unboundTypesHandler } whenDependentTypesAreResolved ( [ ] , rawArgTypes , function ( argTypes ) { var memberFunction = craftInvokerFunction ( humanName , argTypes , classType , rawInvoker , context ) ; if ( undefined = = = proto [ methodName ] . overloadTable ) { memberFunction . argCount = argCount - 2 ; proto [ methodName ] = memberFunction } else { proto [ methodName ] . overloadTable [ argCount - 2 ] = memberFunction } return [ ] } ) ; return [ ] } ) } function validateThis ( this_ , classType , humanName ) { if ( ! ( this_ instanceof Object ) ) { throwBindingError ( humanName + ' with invalid " this " : ' + this_ ) } if ( ! ( this_ instanceof classType . registeredClass . constructor ) ) { throwBindingError ( humanName + ' incompatible with " this " of type ' + this_ . constructor . name ) } if ( ! this_ . $ $ . ptr ) { throwBindingError ( " cannot call emscripten binding method " + humanName + " on deleted object " ) } return upcastPointer ( this_ . $ $ . ptr , this_ . $ $ . ptrType . registeredClass , classType . registeredClass ) } function __embind_register_class_property ( classType , fieldName , getterReturnType , getterSignature , getter , getterContext , setterArgumentType , setterSignature , setter , setterContext ) { fieldName = readLatin1String ( fieldName ) ; getter = embind__requireFunction ( getterSignature , getter ) ; whenDependentTypesAreResolved ( [ ] , [ classType ] , function ( classType ) { classType = classType [ 0 ] ; var humanName = classType . name + " . " + fieldName ; var desc = { get : function ( ) { throwUnboundTypeError ( " Cannot access " + humanName + " due to unbound types " , [ getterReturnType , setterArgumentType ] ) } , enumerable : true , configurable : true } ; if ( setter ) { desc . set = function ( ) { throwUnboundTypeError ( " Cannot access " + humanName + " due to unbound types " , [ getterReturnType , setterArgumentType ] ) } } else { desc . set = function ( v ) { throwBindingError ( humanName + " is a read - only property " ) } } Object . defineProperty ( classType . registeredClass . instancePrototype , fieldName , desc ) ; whenDependentTypesAreResolved ( [ ] , setter ? [ getterReturnType , setterArgumentType ] : [ getterReturnType ] , function ( types ) { var getterReturnType = types [ 0 ] ; var desc = { get : function ( ) { var ptr = validateThis ( this , classType , humanName + " getter " ) ; return getterReturnType [ " fromWireType " ] ( getter ( getterContext , ptr ) ) } , enumerable : true } ; if ( setter ) { setter = embind__requireFunction ( setterSignature , setter ) ; var setterArgumentType = types [ 1 ] ; desc . set = function ( v ) { var ptr = validateThis ( this , classType , humanName + " setter " ) ; var destructors = [ ] ; setter ( setterContext , ptr , setterArgumentType [ " toWireType " ] ( destructors , v ) ) ; runDestructors ( destructors ) } } Object . defineProperty ( classType . registeredClass . instancePrototype , fieldName , desc ) ; return [ ] } ) ; return [ ] } ) } var emval_free_list = [ ] ; var emval_handle_array = [ { } , { value : undefined } , { value : null } , { value : true } , { value : false } ] ; function __emval_decref ( handle ) { if ( handle > 4 & & 0 = = = - - emval_handle_array [ handle ] . refcount ) { emval_handle_array [ handle ] = undefined ; emval_free_list . push ( handle ) } } function count_emval_handles ( ) { var count = 0 ; for ( var i = 5 ; i < emval_handle_array . length ; + + i ) { if ( emval_handle_array [ i ] ! = = undefined ) { + + count } } return count } function get_first_emval ( ) { for ( var i = 5 ; i < emval_handle_array . length ; + + i ) { if ( emval_handle_array [ i ] ! = = undefined ) { return emval_handle_array [ i ] } } return null } function init_emval ( ) { Module [ " count_emval_handles " ] = count_emval_handles ; Module [ " get_first_emval " ] = get_first_emval } function __emval_register ( value ) { switch ( value ) { case undefined : { return 1 } case null : { return 2 } case true : { return 3 } case false : { return 4 } default : { var handle = emval_free_list . length ? emval_free_list . pop ( ) : emval_handle_array . length ; emval_handle_array [ handle ] = { refcount : 1 , value : value } ; return handle } } } function __embind_register_emval ( rawType , name ) { name = readLatin1String ( name ) ; registerType ( rawType , { name : name , " fromWireType " : function ( handle ) { var rv = emval_handle_array [ handle ] . value ; __emval_decref ( handle ) ; return rv } , " toWireType " : function ( destructors , value ) { return __emval_register ( value ) } , " argPackAdvance " : 8 , " readValueFromPointer " : simpleReadValueFromPointer , destructorFunction : null } ) } function enumReadValueFromPointer ( name , shift , signed ) { switch ( shift ) { case 0 : return function ( pointer ) { var heap = signed ? HEAP8 : HEAPU8 ; return this [ " fromWireType " ] ( heap [ pointer ] ) } ; case 1 : return function ( pointer ) { var heap = signed ? HEAP16 : HEAPU16 ; return this [ " fromWireType " ] ( heap [ pointer > > 1 ] ) } ; case 2 : return function ( pointer ) { var heap = signed ? HEAP32 : HEAPU32 ; return this [ " fromWireType " ] ( heap [ pointer > > 2 ] ) } ; default : throw new TypeError ( " Unknown integer type : " + name ) } } function __embind_register_enum ( rawType , name , size , isSigned ) { var shift = getShiftFromSize ( size ) ; name = readLatin1String ( name ) ; function ctor ( ) { } ctor . values = { } ; registerType ( rawType , { name : name , constructor : ctor , " fromWireType " : function ( c ) { return this . constructor . values [ c ] } , " toWireType " : function ( destructors , c ) { return c . value } , " argPackAdvance " : 8 , " readValueFromPointer " : enumReadValueFromPointer ( name , shift , isSigned ) , destructorFunction : null } ) ; exposePublicSymbol ( name , ctor ) } function requireRegisteredType ( rawType , humanName ) { var impl = registeredTypes [ rawType ] ; if ( undefined = = = impl ) { throwBindingError ( humanName + " has unknown type " + getTypeName ( rawType ) ) } return impl } function __embind_register_enum_value ( rawEnumType , name , enumValue ) { var enumType = requireRegisteredType ( rawEnumType , " enum " ) ; name = readLatin1String ( name ) ; var Enum = enumType . constructor ; var Value = Object . create ( enumType . constructor . prototype , { value : { value : enumValue } , constructor : { value : createNamedFunction ( enumType . name + " _ " + name , function ( ) { } ) } } ) ; Enum . values [ enumValue ] = Value ; Enum [ name ] = Value } function _embind_repr ( v ) { if ( v = = = null ) { return " null " } var t = typeof v ; if ( t = = = " object " | | t = = = " array " | | t = = = " function " ) { return v . toString ( ) } else { return " " + v } } function floatReadValueFromPointer ( name , shift ) { switch ( shift ) { case 2 : return function ( pointer ) { return this [ " fromWireType " ] ( HEAPF32 [ pointer > > 2 ] ) } ; case 3 : return function ( pointer ) { return this [ " fromWireType " ] ( HEAPF64 [ pointer > > 3 ] ) } ; default : throw new TypeError ( " Unknown float type : " + name ) } } function __embind_register_float ( rawType , name , size ) { var shift = getShiftFromSize ( size ) ; name = readLatin1String ( name ) ; registerType ( rawType , { name : name , " fromWireType " : function ( value ) { return value } , " toWireType " : function ( destructors , value ) { if ( typeof value ! = = " number " & & typeof value ! = = " boolean " ) { throw new TypeError ( ' Cannot convert " ' + _embind_repr ( value ) + ' " to ' + this . name ) } return value } , " argPackAdvance " : 8 , " readValueFromPointer " : floatReadValueFromPointer ( name , shift ) , destructorFunction : null } ) } function __embind_register_function ( name , argCount , rawArgTypesAddr , signature , rawInvoker , fn ) { var argTypes = heap32VectorToArray ( argCount , rawArgTypesAddr ) ; name = readLatin1String ( name ) ; rawInvoker = embind__requireFunction ( signature , rawInvoker ) ; exposePublicSymbol ( name , function ( ) { throwUnboundTypeError ( " Cannot call " + name + " due to unbound types " , argTypes ) } , argCount - 1 ) ; whenDependentTypesAreResolved ( [ ] , argTypes , function ( argTypes ) { var invokerArgsArray = [ argTypes [ 0 ] , null ] . concat ( argTypes . slice ( 1 ) ) ; replacePublicSymbol ( name , craftInvokerFunction ( name , invokerArgsArray , null , rawInvoker , fn ) , argCount - 1 ) ; return [ ] } ) } function integerReadValueFromPointer ( name , shift , signed ) { switch ( shift ) { case 0 : return signed ? function readS8FromPointer ( pointer ) { return HEAP8 [ pointer ] } : function readU8FromPointer ( pointer ) { return HEAPU8 [ pointer ] } ; case 1 : return signed ? function readS16FromPointer ( pointer ) { return HEAP16 [ pointer > > 1 ] } : function readU16FromPointer ( pointer ) { return HEAPU16 [ pointer > > 1 ] } ; case 2 : return signed ? function readS32FromPointer ( pointer ) { return HEAP32 [ pointer > > 2 ] } : function readU32FromPointer ( pointer ) { return HEAPU32 [ pointer > > 2 ] } ; default : throw new TypeError ( " Unknown integer type : " + name ) } } function __embind_register_integer ( primitiveType , name , size , minRange , maxRange ) { name = readLatin1String ( name ) ; if ( maxRange = = = - 1 ) { maxRange = 4294967295 } var shift = getShiftFromSize ( size ) ; var fromWireType = function ( value ) { return value } ; if ( minRange = = = 0 ) { var bitshift = 32 - 8 * size ; fromWireType = function ( value ) { return value < < bitshift > > > bitshift } } var isUnsignedType = name . indexOf ( " unsigned " ) ! = - 1 ; registerType ( primitiveType , { name : name , " fromWireType " : fromWireType , " toWireType " : function ( destructors , value ) { if ( typeof value ! = = " number " & & typeof value ! = = " boolean " ) { throw new TypeError ( ' Cannot convert " ' + _embind_repr ( value ) + ' " to ' + this . name ) } if ( value < minRange | | value > maxRange ) { throw new TypeError ( ' Passing a number " ' + _embind_repr ( value ) + ' " from JS side to C / C + + side to an argument of type " ' + name + ' " , which is outside the valid range [ ' + minRange + " , " + maxRange + " ] ! " ) } return isUnsignedType ? value > > > 0 : value | 0 } , " argPackAdvance " : 8 , " readValueFromPointer " : integerReadValueFromPointer ( name , shift , minRange ! = = 0 ) , destructorFunction : null } ) } function __embind_register_memory_view ( rawType , dataTypeIndex , name ) { var typeMapping = [ Int8Array , Uint8Array , Int16Array , Uint16Array , Int32Array , Uint32Array , Float32Array , Float64Array ] ; var TA = typeMapping [ dataTypeIndex ] ; function decodeMemoryView ( handle ) { handle = handle > > 2 ; var heap = HEAPU32 ; var size = heap [ handle ] ; var data = heap [ handle + 1 ] ; return new TA ( heap [ " buffer " ] , data , size ) } name = readLatin1String ( name ) ; registerType ( rawType , { name : name , " fromWireType " : decodeMemoryView , " argPackAdvance " : 8 , " readValueFromPointer " : decodeMemoryView } , { ignoreDuplicateRegistrations : true } ) } function __embind_register_std_string ( rawType , name ) { name = readLatin1String ( name ) ; var stdStringIsUTF8 = name = = = " std : : string " ; registerType ( rawType , { name : name , " fromWireType " : function ( value ) { var length = HEAPU32 [ value > > 2 ] ; var str ; if ( stdStringIsUTF8 ) { var endChar = HEAPU8 [ value + 4 + length ] ; var endCharSwap = 0 ; if ( endChar ! = 0 ) { endCharSwap = endChar ; HEAPU8 [ value + 4 + length ] = 0 } var decodeStartPtr = value + 4 ; for ( var i = 0 ; i < = length ; + + i ) { var currentBytePtr = value + 4 + i ; if ( HEAPU8 [ currentBytePtr ] = = 0 ) { var stringSegment = UTF8ToString ( decodeStartPtr ) ; if ( str = = = undefined ) str = stringSegment ; else { str + = String . fromCharCode ( 0 ) ; str + = stringSegment } decodeStartPtr = currentBytePtr + 1 } } if ( endCharSwap ! = 0 ) HEAPU8 [ value + 4 + length ] = endCharSwap } else { var a = new Array ( length ) ; for ( var i = 0 ; i < length ; + + i ) { a [ i ] = String . fromCharCode ( HEAPU8 [ value + 4 + i ] ) } str = a . join ( " " ) } _free ( value ) ; return str } , " toWireType " : function ( destructors , value ) { if ( value instanceof ArrayBuffer ) { value = new Uint8Array ( value ) } var getLength ; var valueIsOfTypeString = typeof value = = = " string " ; if ( ! ( valueIsOfTypeString | | value instanceof Uint8Array | | value instanceof Uint8ClampedArray | | value instanceof Int8Array ) ) { throwBindingError ( " Cannot pass non - string to std : : string " ) } if ( stdStringIsUTF8 & & valueIsOfTypeString ) { getLength = function ( ) { return lengthBytesUTF8 ( value ) } } else { getLength = function ( ) { return value . length } } var length = getLength ( ) ; var ptr = _malloc ( 4 + length + 1 ) ; HEAPU32 [ ptr > > 2 ] = length ; if ( stdStringIsUTF8 & & valueIsOfTypeString ) { stringToUTF8 ( value , ptr + 4 , length + 1 ) } else { if ( valueIsOfTypeString ) { for ( var i = 0 ; i < length ; + + i ) { var charCode = value . charCodeAt ( i ) ; if ( charCode > 255 ) { _free ( ptr ) ; throwBindingError ( " String has UTF - 16 code units that do not fit in 8 bits " ) } HEAPU8 [ ptr + 4 + i ] = charCode } } else { for ( var i = 0 ; i < length ; + + i ) { HEAPU8 [ ptr + 4 + i ] = value [ i ] } } } if ( destructors ! = = null ) { destructors . push ( _free , ptr ) } return ptr } , " argPackAdvance " : 8 , " readValueFromPointer " : simpleReadValueFromPointer , destructorFunction : function ( ptr ) { _free ( ptr ) } } ) } function __embind_register_std_wstring ( rawType , charSize , name ) { name = readLatin1String ( name ) ; var getHeap , shift ; if ( charSize = = = 2 ) { getHeap = function ( ) { return HEAPU16 } ; shift = 1 } else if ( charSize = = = 4 ) { getHeap = function ( ) { return HEAPU32 } ; shift = 2 } registerType ( rawType , { name : name , " fromWireType " : function ( value ) { var HEAP = getHeap ( ) ; var length = HEAPU32 [ value > > 2 ] ; var a = new Array ( length ) ; var start = value + 4 > > shift ; for ( var i = 0 ; i < length ; + + i ) { a [ i ] = String . fromCharCode ( HEAP [ start + i ] ) } _free ( value ) ; return a . join ( " " ) } , " toWireType " : function ( destructors , value ) { var HEAP = getHeap ( ) ; var length = value . length ; var ptr = _malloc ( 4 + length * charSize ) ; HEAPU32 [ ptr > > 2 ] = length ; var start = ptr + 4 > > shift ; for ( var i = 0 ; i < length ; + + i ) { HEAP [ start + i ] = value . charCodeAt ( i ) } if ( destructors ! = = null ) { destructors . push ( _free , ptr ) } return ptr } , " argPackAdvance " : 8 , " readValueFromPointer " : simpleReadValueFromPointer , destructorFunction : function ( ptr ) { _free ( ptr ) } } ) } function __embind_register_value_array ( rawType , name , constructorSignature , rawConstructor , destructorSignature , rawDestructor ) { tupleRegistrations [ rawType ] = { name : readLatin1String ( name ) , rawConstructor : embind__requireFunction ( constructorSignature , rawConstructor ) , rawDestructor : embind__requireFunction ( destructorSignature , rawDestructor ) , elements : [ ] } } function __embind_register_value_array_element ( rawTupleType , getterReturnType , getterSignature , getter , getterContext , setterArgumentType , setterSignature , setter , setterContext ) { tupleRegistrations [ rawTupleType ] . elements . push ( { getterReturnType : getterReturnType , getter : embind__requireFunction ( getterSignature , getter ) , getterContext : getterContext , setterArgumentType : setterArgumentType , setter : embind__requireFunction ( setterSignature , setter ) , setterContext : setterContext } ) } function __embind_register_value_object ( rawType , name , constructorSignature , rawConstructor , destructorSignature , rawDestructor ) { structRegistrations [ rawType ] = { name : readLatin1String ( name ) , rawConstructor : embind__requireFunction ( constructorSignature , rawConstructor ) , rawDestructor : embind__requireFunction ( destructorSignature , rawDestructor ) , fields : [ ] } } function __embind_register_value_object_field ( structType , fieldName , getterReturnType , getterSignature , getter , getterContext , setterArgumentType , setterSignature , setter , setterContext ) { structRegistrations [ structType ] . fields . push ( { fieldName : readLatin1String ( fieldName ) , getterReturnType : getterReturnType , getter : embind__requireFunction ( getterSignature , getter ) , getterContext : getterContext , setterArgumentType : setterArgumentType , setter : embind__requireFunction ( setterSignature , setter ) , setterContext : setterContext } ) } function __embind_register_void ( rawType , name ) { name = readLatin1String ( name ) ; registerType ( rawType , { isVoid : true , name : name , " argPackAdvance " : 0 , " fromWireType " : function ( ) { return undefined } , " toWireType " : function ( destructors , o ) { return undefined } } ) } function requireHandle ( handle ) { if ( ! handle ) { throwBindingError ( " Cannot use deleted val . handle = " + handle ) } return emval_handle_array [ handle ] . value } function __emval_as ( handle , returnType , destructorsRef ) { handle = requireHandle ( handle ) ; returnType = requireRegisteredType ( returnType , " emval : : as " ) ; var destructors = [ ] ; var rd = __emval_register ( destructors ) ; HEAP32 [ destructorsRef > > 2 ] = rd ; return returnType [ " toWireType " ] ( destructors , handle ) } function __emval_get_property ( handle , key ) { handle = requireHandle ( handle ) ; key = requireHandle ( key ) ; return __emval_register ( handle [ key ] ) } function __emval_incref ( handle ) { if ( handle > 4 ) { emval_handle_array [ handle ] . refcount + = 1 } } var emval_symbols = { } ; function getStringOrSymbol ( address ) { var symbol = emval_symbols [ address ] ; if ( symbol = = = undefined ) { return readLatin1String ( address ) } else { return symbol } } function __emval_new_cstring ( v ) { return __emval_register ( getStringOrSymbol ( v ) ) } function __emval_run_destructors ( handle ) { var destructors = emval_handle_array [ handle ] . value ; runDestructors ( destructors ) ; __emval_decref ( handle ) } function __emval_take_value ( type , argv ) { type = requireRegisteredType ( type , " _emval_take_value " ) ; var v = type [ " readValueFromPointer " ] ( argv ) ; return __emval_register ( v ) } function _abort ( ) { Module [ " abort " ] ( ) } function _emscripten_get_now ( ) { abort ( ) } function _emscripten_get_now_is_monotonic ( ) { return 0 | | ENVIRONMENT_IS_NODE | | typeof dateNow ! = = " undefined " | | ( ENVIRONMENT_IS_WEB | | ENVIRONMENT_IS_WORKER ) & & self [ " performance " ] & & self [ " performance " ] [ " now " ] } function _clock_gettime ( clk_id , tp ) { var now ; if ( clk_id = = = 0 ) { now = Date . now ( ) } else if ( clk_id = = = 1 & & _emscripten_get_now_is_monotonic ( ) ) { now = _emscripten_get_now ( ) } else { ___setErrNo ( 22 ) ; return - 1 } HEAP32 [ tp > > 2 ] = now / 1e3 | 0 ; HEAP32 [ tp + 4 > > 2 ] = now % 1e3 * 1e3 * 1e3 | 0 ; return 0 } function _emscripten_get_heap_size ( ) { return TOTAL_MEMORY } var GL = { counter : 1 , lastError : 0 , buffers : [ ] , mappedBuffers : { } , programs : [ ] , framebuffers : [ ] , renderbuffers : [ ] , textures : [ ] , uniforms : [ ] , shaders : [ ] , vaos : [ ] , contexts : { } , currentContext : null , offscreenCanvases : { } , timerQueriesEXT : [ ] , queries : [ ] , samplers : [ ] , transformFeedbacks : [ ] , syncs : [ ] , programInfos : { } , stringCache : { } , stringiCache : { } , unpackAlignment : 4 , init : function ( ) { GL . miniTempBuffer = new Float32Array ( GL . MINI_TEMP_BUFFER_SIZE ) ; for ( var i = 0 ; i < GL . MINI_TEMP_BUFFER_SIZE ; i + + ) { GL . miniTempBufferViews [ i ] = GL . miniTempBuffer . subarray ( 0 , i + 1 ) } } , recordError : function recordError ( errorCode ) { if ( ! GL . lastError ) { GL . lastError = errorCode } } , getNewId : function ( table ) { var ret = GL . counter + + ; for ( var i = table . length ; i < ret ; i + + ) { table [ i ] = null } return ret } , MINI_TEMP_BUFFER_SIZE : 256 , miniTempBuffer : null , miniTempBufferViews : [ 0 ] , getSource : function ( shader , count , string , length ) { var source = " " ; for ( var i = 0 ; i < count ; + + i ) { var len = length ? HEAP32 [ length + i * 4 > > 2 ] : - 1 ; source + = UTF8ToString ( HEAP32 [ string + i * 4 > > 2 ] , len < 0 ? undefined : len ) } return source } , createContext : function ( canvas , webGLContextAttributes ) { var ctx = webGLContextAttributes . majorVersion > 1 ? canvas . getContext ( " webgl2 " , webGLContextAttributes ) : canvas . getContext ( " webgl " , webGLContextAttributes ) | | canvas . getContext ( " experimental - webgl " , webGLContextAttributes ) ; return ctx & & GL . registerContext ( ctx , webGLContextAttributes ) } , registerContext : function ( ctx , webGLContextAttributes ) { var handle = _malloc ( 8 ) ; var context = { handle : handle , attributes : webGLContextAttributes , version : webGLContextAttributes . majorVersion , GLctx : ctx } ; function getChromeVersion ( ) { var raw = navigator . userAgent . match ( / Chrom ( e | ium ) \ / ( [ 0 - 9 ] + ) \ . / ) ; return raw ? parseInt ( raw [ 2 ] , 10 ) : false } context . supportsWebGL2EntryPoints = context . version > = 2 & & ( getChromeVersion ( ) = = = false | | getChromeVersion ( ) > = 58 ) ; if ( ctx . canvas ) ctx . canvas . GLctxObject = context ; GL . contexts [ handle ] = context ; if ( typeof webGLContextAttributes . enableExtensionsByDefault = = = " undefined " | | webGLContextAttributes . enableExtensionsByDefault ) { GL . initExtensions ( context ) } return handle } , makeContextCurrent : function ( contextHandle ) { GL . currentContext = GL . contexts [ contextHandle ] ; Module . ctx = GLctx = GL . currentContext & & GL . currentContext . GLctx ; return ! ( contextHandle & & ! GLctx ) } , getContext : function ( contextHandle ) { return GL . contexts [ contextHandle ] } , deleteContext : function ( contextHandle ) { if ( GL . currentContext = = = GL . contexts [ contextHandle ] ) GL . currentContext = null ; if ( typeof JSEvents = = = " object " ) JSEvents . removeAllHandlersOnTarget ( GL . contexts [ contextHandle ] . GLctx . canvas ) ; if ( GL . contexts [ contextHandle ] & & GL . contexts [ contextHandle ] . GLctx . canvas ) GL . contexts [ contextHandle ] . GLctx . canvas . GLctxObject = undefined ; _free ( GL . contexts [ contextHandle ] ) ; GL . contexts [ contextHandle ] = null } , initExtensions : function ( context ) { if ( ! context ) context = GL . currentContext ; if ( context . initExtensionsDone ) return ; context . initExtensionsDone = true ; var GLctx = context . GLctx ; if ( context . version < 2 ) { var instancedArraysExt = GLctx . getExtension ( " ANGLE_instanced_arrays " ) ; if ( instancedArraysExt ) { GLctx [ " vertexAttribDivisor " ] = function ( index , divisor ) { instancedArraysExt [ " vertexAttribDivisorANGLE " ] ( index , divisor ) } ; GLctx [ " drawArraysInstanced " ] = function ( mode , first , count , primcount ) { instancedArraysExt [ " drawArraysInstancedANGLE " ] ( mode , first , count , primcount ) } ; GLctx [ " drawElementsInstanced " ] = function ( mode , count , type , indices , primcount ) { instancedArraysExt [ " drawElementsInstancedANGLE " ] ( mode , count , type , indices , primcount ) } } var vaoExt = GLctx . getExtension ( " OES_vertex_array_object " ) ; if ( vaoExt ) { GLctx [ " createVertexArray " ] = function ( ) { return vaoExt [ " createVertexArrayOES " ] ( ) } ; GLctx [ " deleteVertexArray " ] = function ( vao ) { vaoExt [ " deleteVertexArrayOES " ] ( vao ) } ; GLctx [ " bindVertexArray " ] = function ( vao ) { vaoExt [ " bindVertexArrayOES " ] ( vao ) } ; GLctx [ " isVertexArray " ] = function ( vao ) { return vaoExt [ " isVertexArrayOES " ] ( vao ) } } var drawBuffersExt = GLctx . getExtension ( " WEBGL_draw_buffers " ) ; if ( drawBuffersExt ) { GLctx [ " drawBuffers " ] = function ( n , bufs ) { drawBuffersExt [ " drawBuffersWEBGL " ] ( n , bufs ) } } } GLctx . disjointTimerQueryExt = GLctx . getExtension ( " EXT_disjoint_timer_query " ) ; var automaticallyEnabledExtensions = [ " OES_texture_float " , " OES_texture_half_float " , " OES_standard_derivatives " , " OES_vertex_array_object " , " WEBGL_compressed_texture_s3tc " , " WEBGL_depth_texture " , " OES_element_index_uint " , " EXT_texture_filter_anisotropic " , " EXT_frag_depth " , " WEBGL_draw_buffers " , " ANGLE_instanced_arrays " , " OES_texture_float_linear " , " OES_texture_half_float_linear " , " EXT_blend_minmax " , " EXT_shader_texture_lod " , " WEBGL_compressed_texture_pvrtc " , " EXT_color_buffer_half_float " , " WEBGL_color_buffer_float " , " EXT_sRGB " , " WEBGL_compressed_texture_etc1 " , " EXT_disjoint_timer_query " , " WEBGL_compressed_texture_etc " , " WEBGL_compressed_texture_astc " , " EXT_color_buffer_float " , " WEBGL_compressed_texture_s3tc_srgb " , " EXT_disjoint_timer_query_webgl2 " ] ; var exts = GLctx . getSupportedExtensions ( ) ; if ( exts & & exts . length > 0 ) { GLctx . getSupportedExtensions ( ) . forEach ( function ( ext ) { if ( automaticallyEnabledExtensions . indexOf ( ext ) ! = - 1 ) { GLctx . getExtension ( ext ) } } ) } } , populateUniformTable : function ( program ) { var p = GL . programs [ program ] ; var ptable = GL . programInfos [ program ] = { uniforms : { } , maxUniformLength : 0 , maxAttributeLength : - 1 , maxUniformBlockNameLength : - 1 } ; var utable = ptable . uniforms ; var numUniforms = GLctx . getProgramParameter ( p , 35718 ) ; for ( var i = 0 ; i < numUniforms ; + + i ) { var u = GLctx . getActiveUniform ( p , i ) ; var name = u . name ; ptable . maxUniformLength = Math . max ( ptable . maxUniformLength , name . length + 1 ) ; if ( name . slice ( - 1 ) = = " ] " ) { name = name . slice ( 0 , name . lastIndexOf ( " [ " ) ) } var loc = GLctx . getUniformLocation ( p , name ) ; if ( loc ) { var id = GL . getNewId ( GL . uniforms ) ; utable [ name ] = [ u . size , id ] ; GL . uniforms [ id ] = loc ; for ( var j = 1 ; j < u . size ; + + j ) { var n = name + " [ " + j + " ] " ; loc = GLctx . getUniformLocation ( p , n ) ; id = GL . getNewId ( GL . uniforms ) ; GL . uniforms [ id ] = loc } } } } } ; function _emscripten_glActiveTexture ( x0 ) { GLctx [ " activeTexture " ] ( x0 ) } function _emscripten_glAttachShader ( program , shader ) { GLctx . attachShader ( GL . programs [ program ] , GL . shaders [ shader ] ) } function _emscripten_glBeginQuery ( target , id ) { GLctx [ " beginQuery " ] ( target , GL . queries [ id ] ) } function _emscripten_glBeginQueryEXT ( target , id ) { GLctx . disjointTimerQueryExt [ " beginQueryEXT " ] ( target , GL . timerQueriesEXT [ id ] ) } function _emscripten_glBeginTransformFeedback ( x0 ) { GLctx [ " beginTransformFeedback " ] ( x0 ) } function _emscripten_glBindAttribLocation ( program , index , name ) { GLctx . bindAttribLocation ( GL . programs [ program ] , index , UTF8ToString ( name ) ) } function _emscripten_glBindBuffer ( target , buffer ) { if ( target = = 35051 ) { GLctx . currentPixelPackBufferBinding = buffer } else if ( target = = 35052 ) { GLctx . currentPixelUnpackBufferBinding = buffer } GLctx . bindBuffer ( target , GL . buffers [ buffer ] ) } function _emscripten_glBindBufferBase ( target , index , buffer ) { GLctx [ " bindBufferBase " ] ( target , index , GL . buffers [ buffer ] ) } function _emscripten_glBindBufferRange ( target , index , buffer , offset , ptrsize ) { GLctx [ " bindBufferRange " ] ( target , index , GL . buffers [ buffer ] , offset , ptrsize ) } function _emscripten_glBindFramebuffer ( target , framebuffer ) { GLctx . bindFramebuffer ( target , GL . framebuffers [ framebuffer ] ) } function _emscripten_glBindRenderbuffer ( target , renderbuffer ) { GLctx . bindRenderbuffer ( target , GL . renderbuffers [ renderbuffer ] ) } function _emscripten_glBindSampler ( unit , sampler ) { GLctx [ " bindSampler " ] ( unit , GL . samplers [ sampler ] ) } function _emscripten_glBindTexture ( target , texture ) { GLctx . bindTexture ( target , GL . textures [ texture ] ) } function _emscripten_glBindTransformFeedback ( target , id ) { GLctx [ " bindTransformFeedback " ] ( target , GL . transformFeedbacks [ id ] ) } function _emscripten_glBindVertexArray ( vao ) { GLctx [ " bindVertexArray " ] ( GL . vaos [ vao ] ) } function _emscripten_glBindVertexArrayOES ( vao ) { GLctx [ " bindVertexArray " ] ( GL . vaos [ vao ] ) } function _emscripten_glBlendColor ( x0 , x1 , x2 , x3 ) { GLctx [ " blendColor " ] ( x0 , x1 , x2 , x3 ) } function _emscripten_glBlendEquation ( x0 ) { GLctx [ " blendEquation " ] ( x0 ) } function _emscripten_glBlendEquationSeparate ( x0 , x1 ) { GLctx [ " blendEquationSeparate " ] ( x0 , x1 ) } function _emscripten_glBlendFunc ( x0 , x1 ) { GLctx [ " blendFunc " ] ( x0 , x1 ) } function _emscripten_glBlendFuncSeparate ( x0 , x1 , x2 , x3 ) { GLctx [ " blendFuncSeparate " ] ( x0 , x1 , x2 , x3 ) } function _emscripten_glBlitFramebuffer ( x0 , x1 , x2 , x3 , x4 , x5 , x6 , x7 , x8 , x9 ) { GLctx [ " blitFramebuffer " ] ( x0 , x1 , x2 , x3 , x4 , x5 , x6 , x7 , x8 , x9 ) } function _emscripten_glBufferData ( target , size , data , usage ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { if ( data ) { GLctx . bufferData ( target , HEAPU8 , usage , data , size ) } else { GLctx . bufferData ( target , size , usage ) } } else { GLctx . bufferData ( target , data ? HEAPU8 . subarray ( data , data + size ) : size , usage ) } } function _emscripten_glBufferSubData ( target , offset , size , data ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { GLctx . bufferSubData ( target , offset , HEAPU8 , data , size ) ; return } GLctx . bufferSubData ( target , offset , HEAPU8 . subarray ( data , data + size ) ) } function _emscripten_glCheckFramebufferStatus ( x0 ) { return GLctx [ " checkFramebufferStatus " ] ( x0 ) } function _emscripten_glClear ( x0 ) { GLctx [ " clear " ] ( x0 ) } function _emscripten_glClearBufferfi ( x0 , x1 , x2 , x3 ) { GLctx [ " clearBufferfi " ] ( x0 , x1 , x2 , x3 ) } function _emscripten_glClearBufferfv ( buffer , drawbuffer , value ) { GLctx [ " clearBufferfv " ] ( buffer , drawbuffer , HEAPF32 , value > > 2 ) } function _emscripten_glClearBufferiv ( buffer , drawbuffer , value ) { GLctx [ " clearBufferiv " ] ( buffer , drawbuffer , HEAP32 , value > > 2 ) } function _emscripten_glClearBufferuiv ( buffer , drawbuffer , value ) { GLctx [ " clearBufferuiv " ] ( buffer , drawbuffer , HEAPU32 , value > > 2 ) } function _emscripten_glClearColor ( x0 , x1 , x2 , x3 ) { GLctx [ " clearColor " ] ( x0 , x1 , x2 , x3 ) } function _emscripten_glClearDepthf ( x0 ) { GLctx [ " clearDepth " ] ( x0 ) } function _emscripten_glClearStencil ( x0 ) { GLctx [ " clearStencil " ] ( x0 ) } function _emscripten_glClientWaitSync ( sync , flags , timeoutLo , timeoutHi ) { timeoutLo = timeoutLo > > > 0 ; timeoutHi = timeoutHi > > > 0 ; var timeout = timeoutLo = = 4294967295 & & timeoutHi = = 4294967295 ? - 1 : makeBigInt ( timeoutLo , timeoutHi , true ) ; return GLctx . clientWaitSync ( GL . syncs [ sync ] , flags , timeout ) } function _emscripten_glColorMask ( red , green , blue , alpha ) { GLctx . colorMask ( ! ! red , ! ! green , ! ! blue , ! ! alpha ) } function _emscripten_glCompileShader ( shader ) { GLctx . compileShader ( GL . shaders [ shader ] ) } function _emscripten_glCompressedTexImage2D ( target , level , internalFormat , width , height , border , imageSize , data ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { if ( GLctx . currentPixelUnpackBufferBinding ) { GLctx [ " compressedTexImage2D " ] ( target , level , internalFormat , width , height , border , imageSize , data ) } else { GLctx [ " compressedTexImage2D " ] ( target , level , internalFormat , width , height , border , HEAPU8 , data , imageSize ) } return } GLctx [ " compressedTexImage2D " ] ( target , level , internalFormat , width , height , border , data ? HEAPU8 . subarray ( data , data + imageSize ) : null ) } function _emscripten_glCompressedTexImage3D ( target , level , internalFormat , width , height , depth , border , imageSize , data ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { if ( GLctx . currentPixelUnpackBufferBinding ) { GLctx [ " compressedTexImage3D " ] ( target , level , internalFormat , width , height , depth , border , imageSize , data ) } else { GLctx [ " compressedTexImage3D " ] ( target , level , internalFormat , width , height , depth , border , HEAPU8 , data , imageSize ) } } else { GLctx [ " compressedTexImage3D " ] ( target , level , internalFormat , width , height , depth , border , data ? HEAPU8 . subarray ( data , data + imageSize ) : null ) } } function _emscripten_glCompressedTexSubImage2D ( target , level , xoffset , yoffset , width , height , format , imageSize , data ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { if ( GLctx . currentPixelUnpackBufferBinding ) { GLctx [ " compressedTexSubImage2D " ] ( target , level , xoffset , yoffset , width , height , format , imageSize , data ) } else { GLctx [ " compressedTexSubImage2D " ] ( target , level , xoffset , yoffset , width , height , format , HEAPU8 , data , imageSize ) } return } GLctx [ " compressedTexSubImage2D " ] ( target , level , xoffset , yoffset , width , height , format , data ? HEAPU8 . subarray ( data , data + imageSize ) : null ) } function _emscripten_glCompressedTexSubImage3D ( target , level , xoffset , yoffset , zoffset , width , height , depth , format , imageSize , data ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { if ( GLctx . currentPixelUnpackBufferBinding ) { GLctx [ " compressedTexSubImage3D " ] ( target , level , xoffset , yoffset , zoffset , width , height , depth , format , imageSize , data ) } else { GLctx [ " compressedTexSubImage3D " ] ( target , level , xoffset , yoffset , zoffset , width , height , depth , format , HEAPU8 , data , imageSize ) } } else { GLctx [ " compressedTexSubImage3D " ] ( target , level , xoffset , yoffset , zoffset , width , height , depth , format , data ? HEAPU8 . subarray ( data , data + imageSize ) : null ) } } function _emscripten_glCopyBufferSubData ( x0 , x1 , x2 , x3 , x4 ) { GLctx [ " copyBufferSubData " ] ( x0 , x1 , x2 , x3 , x4 ) } function _emscripten_glCopyTexImage2D ( x0 , x1 , x2 , x3 , x4 , x5 , x6 , x7 ) { GLctx [ " copyTexImage2D " ] ( x0 , x1 , x2 , x3 , x4 , x5 , x6 , x7 ) } function _emscripten_glCopyTexSubImage2D ( x0 , x1 , x2 , x3 , x4 , x5 , x6 , x7 ) { GLctx [ " copyTexSubImage2D " ] ( x0 , x1 , x2 , x3 , x4 , x5 , x6 , x7 ) } function _emscripten_glCopyTexSubImage3D ( x0 , x1 , x2 , x3 , x4 , x5 , x6 , x7 , x8 ) { GLctx [ " copyTexSubImage3D " ] ( x0 , x1 , x2 , x3 , x4 , x5 , x6 , x7 , x8 ) } function _emscripten_glCreateProgram ( ) { var id = GL . getNewId ( GL . programs ) ; var program = GLctx . createProgram ( ) ; program . name = id ; GL . programs [ id ] = program ; return id } function _emscripten_glCreateShader ( shaderType ) { var id = GL . getNewId ( GL . shaders ) ; GL . shaders [ id ] = GLctx . createShader ( shaderType ) ; return id } function _emscripten_glCullFace ( x0 ) { GLctx [ " cullFace " ] ( x0 ) } function _emscripten_glDeleteBuffers ( n , buffers ) { for ( var i = 0 ; i < n ; i + + ) { var id = HEAP32 [ buffers + i * 4 > > 2 ] ; var buffer = GL . buffers [ id ] ; if ( ! buffer ) continue ; GLctx . deleteBuffer ( buffer ) ; buffer . name = 0 ; GL . buffers [ id ] = null ; if ( id = = GL . currArrayBuffer ) GL . currArrayBuffer = 0 ; if ( id = = GL . currElementArrayBuffer ) GL . currElementArrayBuffer = 0 ; if ( id = = GLctx . currentPixelPackBufferBinding ) GLctx . currentPixelPackBufferBinding = 0 ; if ( id = = GLctx . currentPixelUnpackBufferBinding ) GLctx . currentPixelUnpackBufferBinding = 0 } } function _emscripten_glDeleteFramebuffers ( n , framebuffers ) { for ( var i = 0 ; i < n ; + + i ) { var id = HEAP32 [ framebuffers + i * 4 > > 2 ] ; var framebuffer = GL . framebuffers [ id ] ; if ( ! framebuffer ) continue ; GLctx . deleteFramebuffer ( framebuffer ) ; framebuffer . name = 0 ; GL . framebuffers [ id ] = null } } function _emscripten_glDeleteProgram ( id ) { if ( ! id ) return ; var program = GL . programs [ id ] ; if ( ! program ) { GL . recordError ( 1281 ) ; return } GLctx . deleteProgram ( program ) ; program . name = 0 ; GL . programs [ id ] = null ; GL . programInfos [ id ] = null } function _emscripten_glDeleteQueries ( n , ids ) { for ( var i = 0 ; i < n ; i + + ) { var id = HEAP32 [ ids + i * 4 > > 2 ] ; var query = GL . queries [ id ] ; if ( ! query ) continue ; GLctx [ " deleteQuery " ] ( query ) ; GL . queries [ id ] = null } } function _emscripten_glDeleteQueriesEXT ( n , ids ) { for ( var i = 0 ; i < n ; i + + ) { var id = HEAP32 [ ids + i * 4 > > 2 ] ; var query = GL . timerQueriesEXT [ id ] ; if ( ! query ) continue ; GLctx . disjointTimerQueryExt [ " deleteQueryEXT " ] ( query ) ; GL . timerQueriesEXT [ id ] = null } } function _emscripten_glDeleteRenderbuffers ( n , renderbuffers ) { for ( var i = 0 ; i < n ; i + + ) { var id = HEAP32 [ renderbuffers + i * 4 > > 2 ] ; var renderbuffer = GL . renderbuffers [ id ] ; if ( ! renderbuffer ) continue ; GLctx . deleteRenderbuffer ( renderbuffer ) ; renderbuffer . name = 0 ; GL . renderbuffers [ id ] = null } } function _emscripten_glDeleteSamplers ( n , samplers ) { for ( var i = 0 ; i < n ; i + + ) { var id = HEAP32 [ samplers + i * 4 > > 2 ] ; var sampler = GL . samplers [ id ] ; if ( ! sampler ) continue ; GLctx [ " deleteSampler " ] ( sampler ) ; sampler . name = 0 ; GL . samplers [ id ] = null } } function _emscripten_glDeleteShader ( id ) { if ( ! id ) return ; var shader = GL . shaders [ id ] ; if ( ! shader ) { GL . recordError ( 1281 ) ; return } GLctx . deleteShader ( shader ) ; GL . shaders [ id ] = null } function _emscripten_glDeleteSync ( id ) { if ( ! id ) return ; var sync = GL . syncs [ id ] ; if ( ! sync ) { GL . recordError ( 1281 ) ; return } GLctx . deleteSync ( sync ) ; sync . name = 0 ; GL . syncs [ id ] = null } function _emscripten_glDeleteTextures ( n , textures ) { for ( var i = 0 ; i < n ; i + + ) { var id = HEAP32 [ textures + i * 4 > > 2 ] ; var texture = GL . textures [ id ] ; if ( ! texture ) continue ; GLctx . deleteTexture ( texture ) ; texture . name = 0 ; GL . textures [ id ] = null } } function _emscripten_glDeleteTransformFeedbacks ( n , ids ) { for ( var i = 0 ; i < n ; i + + ) { var id = HEAP32 [ ids + i * 4 > > 2 ] ; var transformFeedback = GL . transformFeedbacks [ id ] ; if ( ! transformFeedback ) continue ; GLctx [ " deleteTransformFeedback " ] ( transformFeedback ) ; transformFeedback . name = 0 ; GL . transformFeedbacks [ id ] = null } } function _emscripten_glDeleteVertexArrays ( n , vaos ) { for ( var i = 0 ; i < n ; i + + ) { var id = HEAP32 [ vaos + i * 4 > > 2 ] ; GLctx [ " deleteVertexArray " ] ( GL . vaos [ id ] ) ; GL . vaos [ id ] = null } } function _emscripten_glDeleteVertexArraysOES ( n , vaos ) { for ( var i = 0 ; i < n ; i + + ) { var id = HEAP32 [ vaos + i * 4 > > 2 ] ; GLctx [ " deleteVertexArray " ] ( GL . vaos [ id ] ) ; GL . vaos [ id ] = null } } function _emscripten_glDepthFunc ( x0 ) { GLctx [ " depthFunc " ] ( x0 ) } function _emscripten_glDepthMask ( flag ) { GLctx . depthMask ( ! ! flag ) } function _emscripten_glDepthRangef ( x0 , x1 ) { GLctx [ " depthRange " ] ( x0 , x1 ) } function _emscripten_glDetachShader ( program , shader ) { GLctx . detachShader ( GL . programs [ program ] , GL . shaders [ shader ] ) } function _emscripten_glDisable ( x0 ) { GLctx [ " disable " ] ( x0 ) } function _emscripten_glDisableVertexAttribArray ( index ) { GLctx . disableVertexAttribArray ( index ) } function _emscripten_glDrawArrays ( mode , first , count ) { GLctx . drawArrays ( mode , first , count ) } function _emscripten_glDrawArraysInstanced ( mode , first , count , primcount ) { GLctx [ " drawArraysInstanced " ] ( mode , first , count , primcount ) } function _emscripten_glDrawArraysInstancedANGLE ( mode , first , count , primcount ) { GLctx [ " drawArraysInstanced " ] ( mode , first , count , primcount ) } function _emscripten_glDrawArraysInstancedARB ( mode , first , count , primcount ) { GLctx [ " drawArraysInstanced " ] ( mode , first , count , primcount ) } function _emscripten_glDrawArraysInstancedEXT ( mode , first , count , primcount ) { GLctx [ " drawArraysInstanced " ] ( mode , first , count , primcount ) } function _emscripten_glDrawArraysInstancedNV ( mode , first , count , primcount ) { GLctx [ " drawArraysInstanced " ] ( mode , first , count , primcount ) } var __tempFixedLengthArray = [ ] ; function _emscripten_glDrawBuffers ( n , bufs ) { var bufArray = __tempFixedLengthArray [ n ] ; for ( var i = 0 ; i < n ; i + + ) { bufArray [ i ] = HEAP32 [ bufs + i * 4 > > 2 ] } GLctx [ " drawBuffers " ] ( bufArray ) } function _emscripten_glDrawBuffersEXT ( n , bufs ) { var bufArray = __tempFixedLengthArray [ n ] ; for ( var i = 0 ; i < n ; i + + ) { bufArray [ i ] = HEAP32 [ bufs + i * 4 > > 2 ] } GLctx [ " drawBuffers " ] ( bufArray ) } function _emscripten_glDrawBuffersWEBGL ( n , bufs ) { var bufArray = __tempFixedLengthArray [ n ] ; for ( var i = 0 ; i < n ; i + + ) { bufArray [ i ] = HEAP32 [ bufs + i * 4 > > 2 ] } GLctx [ " drawBuffers " ] ( bufArray ) } function _emscripten_glDrawElements ( mode , count , type , indices ) { GLctx . drawElements ( mode , count , type , indices ) } function _emscripten_glDrawElementsInstanced ( mode , count , type , indices , primcount ) { GLctx [ " drawElementsInstanced " ] ( mode , count , type , indices , primcount ) } function _emscripten_glDrawElementsInstancedANGLE ( mode , count , type , indices , primcount ) { GLctx [ " drawElementsInstanced " ] ( mode , count , type , indices , primcount ) } function _emscripten_glDrawElementsInstancedARB ( mode , count , type , indices , primcount ) { GLctx [ " drawElementsInstanced " ] ( mode , count , type , indices , primcount ) } function _emscripten_glDrawElementsInstancedEXT ( mode , count , type , indices , primcount ) { GLctx [ " drawElementsInstanced " ] ( mode , count , type , indices , primcount ) } function _emscripten_glDrawElementsInstancedNV ( mode , count , type , indices , primcount ) { GLctx [ " drawElementsInstanced " ] ( mode , count , type , indices , primcount ) } function _glDrawElements ( mode , count , type , indices ) { GLctx . drawElements ( mode , count , type , indices ) } function _emscripten_glDrawRangeElements ( mode , start , end , count , type , indices ) { _glDrawElements ( mode , count , type , indices ) } function _emscripten_glEnable ( x0 ) { GLctx [ " enable " ] ( x0 ) } function _emscripten_glEnableVertexAttribArray ( index ) { GLctx . enableVertexAttribArray ( index ) } function _emscripten_glEndQuery ( x0 ) { GLctx [ " endQuery " ] ( x0 ) } function _emscripten_glEndQueryEXT ( target ) { GLctx . disjointTimerQueryExt [ " endQueryEXT " ] ( target ) } function _emscripten_glEndTransformFeedback ( ) { GLctx [ " endTransformFeedback " ] ( ) } function _emscripten_glFenceSync ( condition , flags ) { var sync = GLctx . fenceSync ( condition , flags ) ; if ( sync ) { var id = GL . getNewId ( GL . syncs ) ; sync . name = id ; GL . syncs [ id ] = sync ; return id } else { return 0 } } function _emscripten_glFinish ( ) { GLctx [ " finish " ] ( ) } function _emscripten_glFlush ( ) { GLctx [ " flush " ] ( ) } function _emscripten_glFlushMappedBufferRange ( ) { err ( " missing function : emscripten_glFlushMappedBufferRange " ) ; abort ( - 1 ) } function _emscripten_glFramebufferRenderbuffer ( target , attachment , renderbuffertarget , renderbuffer ) { GLctx . framebufferRenderbuffer ( target , attachment , renderbuffertarget , GL . renderbuffers [ renderbuffer ] ) } function _emscripten_glFramebufferTexture2D ( target , attachment , textarget , texture , level ) { GLctx . framebufferTexture2D ( target , attachment , textarget , GL . textures [ texture ] , level ) } function _emscripten_glFramebufferTextureLayer ( target , attachment , texture , level , layer ) { GLctx . framebufferTextureLayer ( target , attachment , GL . textures [ texture ] , level , layer ) } function _emscripten_glFrontFace ( x0 ) { GLctx [ " frontFace " ] ( x0 ) } function __glGenObject ( n , buffers , createFunction , objectTable ) { for ( var i = 0 ; i < n ; i + + ) { var buffer = GLctx [ createFunction ] ( ) ; var id = buffer & & GL . getNewId ( objectTable ) ; if ( buffer ) { buffer . name = id ; objectTable [ id ] = buffer } else { GL . recordError ( 1282 ) } HEAP32 [ buffers + i * 4 > > 2 ] = id } } function _emscripten_glGenBuffers ( n , buffers ) { __glGenObject ( n , buffers , " createBuffer " , GL . buffers ) } function _emscripten_glGenFramebuffers ( n , ids ) { __glGenObject ( n , ids , " createFramebuffer " , GL . framebuffers ) } function _emscripten_glGenQueries ( n , ids ) { __glGenObject ( n , ids , " createQuery " , GL . queries ) } function _emscripten_glGenQueriesEXT ( n , ids ) { for ( var i = 0 ; i < n ; i + + ) { var query = GLctx . disjointTimerQueryExt [ " createQueryEXT " ] ( ) ; if ( ! query ) { GL . recordError ( 1282 ) ; while ( i < n ) HEAP32 [ ids + i + + * 4 > > 2 ] = 0 ; return } var id = GL . getNewId ( GL . timerQueriesEXT ) ; query . name = id ; GL . timerQueriesEXT [ id ] = query ; HEAP32 [ ids + i * 4 > > 2 ] = id } } function _emscripten_glGenRenderbuffers ( n , renderbuffers ) { __glGenObject ( n , renderbuffers , " createRenderbuffer " , GL . renderbuffers ) } function _emscripten_glGenSamplers ( n , samplers ) { __glGenObject ( n , samplers , " createSampler " , GL . samplers ) } function _emscripten_glGenTextures ( n , textures ) { __glGenObject ( n , textures , " createTexture " , GL . textures ) } function _emscripten_glGenTransformFeedbacks ( n , ids ) { __glGenObject ( n , ids , " createTransformFeedback " , GL . transformFeedbacks ) } function _emscripten_glGenVertexArrays ( n , arrays ) { __glGenObject ( n , arrays , " createVertexArray " , GL . vaos ) } function _emscripten_glGenVertexArraysOES ( n , arrays ) { __glGenObject ( n , arrays , " createVertexArray " , GL . vaos ) } function _emscripten_glGenerateMipmap ( x0 ) { GLctx [ " generateMipmap " ] ( x0 ) } function _emscripten_glGetActiveAttrib ( program , index , bufSize , length , size , type , name ) { program = GL . programs [ program ] ; var info = GLctx . getActiveAttrib ( program , index ) ; if ( ! info ) return ; if ( bufSize > 0 & & name ) { var numBytesWrittenExclNull = stringToUTF8 ( info . name , name , bufSize ) ; if ( length ) HEAP32 [ length > > 2 ] = numBytesWrittenExclNull } else { if ( length ) HEAP32 [ length > > 2 ] = 0 } if ( size ) HEAP32 [ size > > 2 ] = info . size ; if ( type ) HEAP32 [ type > > 2 ] = info . type } function _emscripten_glGetActiveUniform ( program , index , bufSize , length , size , type , name ) { program = GL . programs [ program ] ; var info = GLctx . getActiveUniform ( program , index ) ; if ( ! info ) return ; if ( bufSize > 0 & & name ) { var numBytesWrittenExclNull = stringToUTF8 ( info . name , name , bufSize ) ; if ( length ) HEAP32 [ length > > 2 ] = numBytesWrittenExclNull } else { if ( length ) HEAP32 [ length > > 2 ] = 0 } if ( size ) HEAP32 [ size > > 2 ] = info . size ; if ( type ) HEAP32 [ type > > 2 ] = info . type } function _emscripten_glGetActiveUniformBlockName ( program , uniformBlockIndex , bufSize , length , uniformBlockName ) { program = GL . programs [ program ] ; var result = GLctx [ " getActiveUniformBlockName " ] ( program , uniformBlockIndex ) ; if ( ! result ) return ; if ( uniformBlockName & & bufSize > 0 ) { var numBytesWrittenExclNull = stringToUTF8 ( result , uniformBlockName , bufSize ) ; if ( length ) HEAP32 [ length > > 2 ] = numBytesWrittenExclNull } else { if ( length ) HEAP32 [ length > > 2 ] = 0 } } function _emscripten_glGetActiveUniformBlockiv ( program , uniformBlockIndex , pname , params ) { if ( ! params ) { GL . recordError ( 1281 ) ; return } program = GL . programs [ program ] ; switch ( pname ) { case 35393 : var name = GLctx [ " getActiveUniformBlockName " ] ( program , uniformBlockIndex ) ; HEAP32 [ params > > 2 ] = name . length + 1 ; return ; default : var result = GLctx [ " getActiveUniformBlockParameter " ] ( program , uniformBlockIndex , pname ) ; if ( ! result ) return ; if ( typeof result = = " number " ) { HEAP32 [ params > > 2 ] = result } else { for ( var i = 0 ; i < result . length ; i + + ) { HEAP32 [ params + i * 4 > > 2 ] = result [ i ] } } } } function _emscripten_glGetActiveUniformsiv ( program , uniformCount , uniformIndices , pname , params ) { if ( ! params ) { GL . recordError ( 1281 ) ; return } if ( uniformCount > 0 & & uniformIndices = = 0 ) { GL . recordError ( 1281 ) ; return } program = GL . programs [ program ] ; var ids = [ ] ; for ( var i = 0 ; i < uniformCount ; i + + ) { ids . push ( HEAP32 [ uniformIndices + i * 4 > > 2 ] ) } var result = GLctx [ " getActiveUniforms " ] ( program , ids , pname ) ; if ( ! result ) return ; var len = result . length ; for ( var i = 0 ; i < len ; i + + ) { HEAP32 [ params + i * 4 > > 2 ] = result [ i ] } } function _emscripten_glGetAttachedShaders ( program , maxCount , count , shaders ) { var result = GLctx . getAttachedShaders ( GL . programs [ program ] ) ; var len = result . length ; if ( len > maxCount ) { len = maxCount } HEAP32 [ count > > 2 ] = len ; for ( var i = 0 ; i < len ; + + i ) { var id = GL . shaders . indexOf ( result [ i ] ) ; HEAP32 [ shaders + i * 4 > > 2 ] = id } } function _emscripten_glGetAttribLocation ( program , name ) { return GLctx . getAttribLocation ( GL . programs [ program ] , UTF8ToString ( name ) ) } function emscriptenWebGLGet ( name_ , p , type ) { if ( ! p ) { GL . recordError ( 1281 ) ; return } var ret = undefined ; switch ( name_ ) { case 36346 : ret = 1 ; break ; case 36344 : if ( type ! = = " Integer " & & type ! = = " Integer64 " ) { GL . recordError ( 1280 ) } return ; case 34814 : case 36345 : ret = 0 ; break ; case 34466 : var formats = GLctx . getParameter ( 34467 ) ; ret = formats ? formats . length : 0 ; break ; case 33309 : if ( GL . currentContext . version < 2 ) { GL . recordError ( 1282 ) ; return } var exts = GLctx . getSupportedExtensions ( ) ; ret = 2 * exts . length ; break ; case 33307 : case 33308 : if ( GL . currentContext . version < 2 ) { GL . recordError ( 1280 ) ; return } ret = name_ = = 33307 ? 3 : 0 ; break } if ( ret = = = undefined ) { var result = GLctx . getParameter ( name_ ) ; switch ( typeof result ) { case " number " : ret = result ; break ; case " boolean " : ret = result ? 1 : 0 ; break ; case " string " : GL . recordError ( 1280 ) ; return ; case " object " : if ( result = = = null ) { switch ( name_ ) { case 34964 : case 35725 : case 34965 : case 36006 : case 36007 : case 32873 : case 34229 : case 35097 : case 36389 : case 34068 : { ret = 0 ; break } default : { GL . recordError ( 1280 ) ; return } } } else if ( result instanceof Float32Array | | result instanceof Uint32Array | | result instanceof Int32Array | | result instanceof Array ) { for ( var i = 0 ; i < result . length ; + + i ) { switch ( type ) { case " Integer " : HEAP32 [ p + i * 4 > > 2 ] = result [ i ] ; break ; case " Float " : HEAPF32 [ p + i * 4 > > 2 ] = result [ i ] ; break ; case " Boolean " : HEAP8 [ p + i > > 0 ] = result [ i ] ? 1 : 0 ; break ; default : throw " internal glGet error , bad type : " + type } } return } else { try { ret = result . name | 0 } catch ( e ) { GL . recordError ( 1280 ) ; err ( " GL_INVALID_ENUM in glGet " + type + " v : Unknown object returned from WebGL getParameter ( " + name_ + " ) ! ( error : " + e + " ) " ) ; return } } break ; default : GL . recordError ( 1280 ) ; return } } switch ( type ) { case " Integer64 " : tempI64 = [ ret > > > 0 , ( tempDouble = ret , + Math_abs ( tempDouble ) > = 1 ? tempDouble > 0 ? ( Math_min ( + Math_floor ( tempDouble / 4294967296 ) , 4294967295 ) | 0 ) > > > 0 : ~ ~ + Math_ceil ( ( tempDouble - + ( ~ ~ tempDouble > > > 0 ) ) / 4294967296 ) > > > 0 : 0 ) ] , HEAP32 [ p > > 2 ] = tempI64 [ 0 ] , HEAP32 [ p + 4 > > 2 ] = tempI64 [ 1 ] ; break ; case " Integer " : HEAP32 [ p > > 2 ] = ret ; break ; case " Float " : HEAPF32 [ p > > 2 ] = ret ; break ; case " Boolean " : HEAP8 [ p > > 0 ] = ret ? 1 : 0 ; break ; default : throw " internal glGet error , bad type : " + type } } function _emscripten_glGetBooleanv ( name_ , p ) { emscriptenWebGLGet ( name_ , p , " Boolean " ) } function _emscripten_glGetBufferParameteri64v ( target , value , data ) { if ( ! data ) { GL . recordError ( 1281 ) ; return } tempI64 = [ GLctx . getBufferParameter ( target , value ) > > > 0 , ( tempDouble = GLctx . getBufferParameter ( target , value ) , + Math_abs ( tempDouble ) > = 1 ? tempDouble > 0 ? ( Math_min ( + Math_floor ( tempDouble / 4294967296 ) , 4294967295 ) | 0 ) > > > 0 : ~ ~ + Math_ceil ( ( tempDouble - + ( ~ ~ tempDouble > > > 0 ) ) / 4294967296 ) > > > 0 : 0 ) ] , HEAP32 [ data > > 2 ] = tempI64 [ 0 ] , HEAP32 [ data + 4 > > 2 ] = tempI64 [ 1 ] } function _emscripten_glGetBufferParameteriv ( target , value , data ) { if ( ! data ) { GL . recordError ( 1281 ) ; return } HEAP32 [ data > > 2 ] = GLctx . getBufferParameter ( target , value ) } function _emscripten_glGetBufferPointerv ( ) { err ( " missing function : emscripten_glGetBufferPointerv " ) ; abort ( - 1 ) } function _emscripten_glGetError ( ) { if ( GL . lastError ) { var error = GL . lastError ; GL . lastError = 0 ; return error } else { return GLctx . getError ( ) } } function _emscripten_glGetFloatv ( name_ , p ) { emscriptenWebGLGet ( name_ , p , " Float " ) } function _emscripten_glGetFragDataLocation ( program , name ) { return GLctx [ " getFragDataLocation " ] ( GL . programs [ program ] , UTF8ToString ( name ) ) } function _emscripten_glGetFramebufferAttachmentParameteriv ( target , attachment , pname , params ) { var result = GLctx . getFramebufferAttachmentParameter ( target , attachment , pname ) ; if ( result instanceof WebGLRenderbuffer | | result instanceof WebGLTexture ) { result = result . name | 0 } HEAP32 [ params > > 2 ] = result } function emscriptenWebGLGetIndexed ( target , index , data , type ) { if ( ! data ) { GL . recordError ( 1281 ) ; return } var result = GLctx [ " getIndexedParameter " ] ( target , index ) ; var ret ; switch ( typeof result ) { case " boolean " : ret = result ? 1 : 0 ; break ; case " number " : ret = result ; break ; case " object " : if ( result = = = null ) { switch ( target ) { case 35983 : case 35368 : ret = 0 ; break ; default : { GL . recordError ( 1280 ) ; return } } } else if ( result instanceof WebGLBuffer ) { ret = result . name | 0 } else { GL . recordError ( 1280 ) ; return } break ; default : GL . recordError ( 1280 ) ; return } switch ( type ) { case " Integer64 " : tempI64 = [ ret > > > 0 , ( tempDouble = ret , + Math_abs ( tempDouble ) > = 1 ? tempDouble > 0 ? ( Math_min ( + Math_floor ( tempDouble / 4294967296 ) , 4294967295 ) | 0 ) > > > 0 : ~ ~ + Math_ceil ( ( tempDouble - + ( ~ ~ tempDouble > > > 0 ) ) / 4294967296 ) > > > 0 : 0 ) ] , HEAP32 [ data > > 2 ] = tempI64 [ 0 ] , HEAP32 [ data + 4 > > 2 ] = tempI64 [ 1 ] ; break ; case " Integer " : HEAP32 [ data > > 2 ] = ret ; break ; case " Float " : HEAPF32 [ data > > 2 ] = ret ; break ; case " Boolean " : HEAP8 [ data > > 0 ] = ret ? 1 : 0 ; break ; default : throw " internal emscriptenWebGLGetIndexed ( ) error , bad type : " + type } } function _emscripten_glGetInteger64i_v ( target , index , data ) { emscriptenWebGLGetIndexed ( target , index , data , " Integer64 " ) } function _emscripten_glGetInteger64v ( name_ , p ) { emscriptenWebGLGet ( name_ , p , " Integer64 " ) } function _emscripten_glGetIntegeri_v ( target , index , data ) { emscriptenWebGLGetIndexed ( target , index , data , " Integer " ) } function _emscripten_glGetIntegerv ( name_ , p ) { emscriptenWebGLGet ( name_ , p , " Integer " ) } function _emscripten_glGetInternalformativ ( ) { err ( " missing function : emscripten_glGetInternalformativ " ) ; abort ( - 1 ) } function _emscripten_glGetProgramBinary ( program , bufSize , length , binaryFormat , binary ) { GL . recordError ( 1282 ) } function _emscripten_glGetProgramInfoLog ( program , maxLength , length , infoLog ) { var log = GLctx . getProgramInfoLog ( GL . programs [ program ] ) ; if ( log = = = null ) log = " ( unknown error ) " ; if ( maxLength > 0 & & infoLog ) { var numBytesWrittenExclNull = stringToUTF8 ( log , infoLog , maxLength ) ; if ( length ) HEAP32 [ length > > 2 ] = numBytesWrittenExclNull } else { if ( length ) HEAP32 [ length > > 2 ] = 0 } } function _emscripten_glGetProgramiv ( program , pname , p ) { if ( ! p ) { GL . recordError ( 1281 ) ; return } if ( program > = GL . counter ) { GL . recordError ( 1281 ) ; return } var ptable = GL . programInfos [ program ] ; if ( ! ptable ) { GL . recordError ( 1282 ) ; return } if ( pname = = 35716 ) { var log = GLctx . getProgramInfoLog ( GL . programs [ program ] ) ; if ( log = = = null ) log = " ( unknown error ) " ; HEAP32 [ p > > 2 ] = log . length + 1 } else if ( pname = = 35719 ) { HEAP32 [ p > > 2 ] = ptable . maxUniformLength } else if ( pname = = 35722 ) { if ( ptable . maxAttributeLength = = - 1 ) { program = GL . programs [ program ] ; var numAttribs = GLctx . getProgramParameter ( program , 35721 ) ; ptable . maxAttributeLength = 0 ; for ( var i = 0 ; i < numAttribs ; + + i ) { var activeAttrib = GLctx . getActiveAttrib ( program , i ) ; ptable . maxAttributeLength = Math . max ( ptable . maxAttributeLength , activeAttrib . name . length + 1 ) } } HEAP32 [ p > > 2 ] = ptable . maxAttributeLength } else if ( pname = = 35381 ) { if ( ptable . maxUniformBlockNameLength = = - 1 ) { program = GL . programs [ program ] ; var numBlocks = GLctx . getProgramParameter ( program , 35382 ) ; ptable . maxUniformBlockNameLength = 0 ; for ( var i = 0 ; i < numBlocks ; + + i ) { var activeBlockName = GLctx . getActiveUniformBlockName ( program , i ) ; ptable . maxUniformBlockNameLength = Math . max ( ptable . maxUniformBlockNameLength , activeBlockName . length + 1 ) } } HEAP32 [ p > > 2 ] = ptable . maxUniformBlockNameLength } else { HEAP32 [ p > > 2 ] = GLctx . getProgramParameter ( GL . programs [ program ] , pname ) } } function _emscripten_glGetQueryObjecti64vEXT ( id , pname , params ) { if ( ! params ) { GL . recordError ( 1281 ) ; return } var query = GL . timerQueriesEXT [ id ] ; var param = GLctx . disjointTimerQueryExt [ " getQueryObjectEXT " ] ( query , pname ) ; var ret ; if ( typeof param = = " boolean " ) { ret = param ? 1 : 0 } else { ret = param } tempI64 = [ ret > > > 0 , ( tempDouble = ret , + Math_abs ( tempDouble ) > = 1 ? tempDouble > 0 ? ( Math_min ( + Math_floor ( tempDouble / 4294967296 ) , 4294967295 ) | 0 ) > > > 0 : ~ ~ + Math_ceil ( ( tempDouble - + ( ~ ~ tempDouble > > > 0 ) ) / 4294967296 ) > > > 0 : 0 ) ] , HEAP32 [ params > > 2 ] = tempI64 [ 0 ] , HEAP32 [ params + 4 > > 2 ] = tempI64 [ 1 ] } function _emscripten_glGetQueryObjectivEXT ( id , pname , params ) { if ( ! params ) { GL . recordError ( 1281 ) ; return } var query = GL . timerQueriesEXT [ id ] ; var param = GLctx . disjointTimerQueryExt [ " getQueryObjectEXT " ] ( query , pname ) ; var ret ; if ( typeof param = = " boolean " ) { ret = param ? 1 : 0 } else { ret = param } HEAP32 [ params > > 2 ] = ret } function _emscripten_glGetQueryObjectui64vEXT ( id , pname , params ) { if ( ! params ) { GL . recordError ( 1281 ) ; return } var query = GL . timerQueriesEXT [ id ] ; var param = GLctx . disjointTimerQueryExt [ " getQueryObjectEXT " ] ( query , pname ) ; var ret ; if ( typeof param = = " boolean " ) { ret = param ? 1 : 0 } else { ret = param } tempI64 = [ ret > > > 0 , ( tempDouble = ret , + Math_abs ( tempDouble ) > = 1 ? tempDouble > 0 ? ( Math_min ( + Math_floor ( tempDouble / 4294967296 ) , 4294967295 ) | 0 ) > > > 0 : ~ ~ + Math_ceil ( ( tempDouble - + ( ~ ~ tempDouble > > > 0 ) ) / 4294967296 ) > > > 0 : 0 ) ] , HEAP32 [ params > > 2 ] = tempI64 [ 0 ] , HEAP32 [ params + 4 > > 2 ] = tempI64 [ 1 ] } function _emscripten_glGetQueryObjectuiv ( id , pname , params ) { if ( ! params ) { GL . recordError ( 1281 ) ; return } var query = GL . queries [ id ] ; var param = GLctx [ " getQueryParameter " ] ( query , pname ) ; var ret ; if ( typeof param = = " boolean " ) { ret = param ? 1 : 0 } else { ret = param } HEAP32 [ params > > 2 ] = ret } function _emscripten_glGetQueryObjectuivEXT ( id , pname , params ) { if ( ! params ) { GL . recordError ( 1281 ) ; return } var query = GL . timerQueriesEXT [ id ] ; var param = GLctx . disjointTimerQueryExt [ " getQueryObjectEXT " ] ( query , pname ) ; var ret ; if ( typeof param = = " boolean " ) { ret = param ? 1 : 0 } else { ret = param } HEAP32 [ params > > 2 ] = ret } function _emscripten_glGetQueryiv ( target , pname , params ) { if ( ! params ) { GL . recordError ( 1281 ) ; return } HEAP32 [ params > > 2 ] = GLctx [ " getQuery " ] ( target , pname ) } function _emscripten_glGetQueryivEXT ( target , pname , params ) { if ( ! params ) { GL . recordError ( 1281 ) ; return } HEAP32 [ params > > 2 ] = GLctx . disjointTimerQueryExt [ " getQueryEXT " ] ( target , pname ) } function _emscripten_glGetRenderbufferParameteriv ( target , pname , params ) { if ( ! params ) { GL . recordError ( 1281 ) ; return } HEAP32 [ params > > 2 ] = GLctx . getRenderbufferParameter ( target , pname ) } function _emscripten_glGetSamplerParameterfv ( sampler , pname , params ) { if ( ! params ) { GL . recordError ( 1281 ) ; return } sampler = GL . samplers [ sampler ] ; HEAPF32 [ params > > 2 ] = GLctx [ " getSamplerParameter " ] ( sampler , pname ) } function _emscripten_glGetSamplerParameteriv ( sampler , pname , params ) { if ( ! params ) { GL . recordError ( 1281 ) ; return } sampler = GL . samplers [ sampler ] ; HEAP32 [ params > > 2 ] = GLctx [ " getSamplerParameter " ] ( sampler , pname ) } function _emscripten_glGetShaderInfoLog ( shader , maxLength , length , infoLog ) { var log = GLctx . getShaderInfoLog ( GL . shaders [ shader ] ) ; if ( log = = = null ) log = " ( unknown error ) " ; if ( maxLength > 0 & & infoLog ) { var numBytesWrittenExclNull = stringToUTF8 ( log , infoLog , maxLength ) ; if ( length ) HEAP32 [ length > > 2 ] = numBytesWrittenExclNull } else { if ( length ) HEAP32 [ length > > 2 ] = 0 } } function _emscripten_glGetShaderPrecisionFormat ( shaderType , precisionType , range , precision ) { var result = GLctx . getShaderPrecisionFormat ( shaderType , precisionType ) ; HEAP32 [ range > > 2 ] = result . rangeMin ; HEAP32 [ range + 4 > > 2 ] = result . rangeMax ; HEAP32 [ precision > > 2 ] = result . precision } function _emscripten_glGetShaderSource ( shader , bufSize , length , source ) { var result = GLctx . getShaderSource ( GL . shaders [ shader ] ) ; if ( ! result ) return ; if ( bufSize > 0 & & source ) { var numBytesWrittenExclNull = stringToUTF8 ( result , source , bufSize ) ; if ( length ) HEAP32 [ length > > 2 ] = numBytesWrittenExclNull } else { if ( length ) HEAP32 [ length > > 2 ] = 0 } } function _emscripten_glGetShaderiv ( shader , pname , p ) { if ( ! p ) { GL . recordError ( 1281 ) ; return } if ( pname = = 35716 ) { var log = GLctx . getShaderInfoLog ( GL . shaders [ shader ] ) ; if ( log = = = null ) log = " ( unknown error ) " ; HEAP32 [ p > > 2 ] = log . length + 1 } else if ( pname = = 35720 ) { var source = GLctx . getShaderSource ( GL . shaders [ shader ] ) ; var sourceLength = source = = = null | | source . length = = 0 ? 0 : source . length + 1 ; HEAP32 [ p > > 2 ] = sourceLength } else { HEAP32 [ p > > 2 ] = GLctx . getShaderParameter ( GL . shaders [ shader ] , pname ) } } function stringToNewUTF8 ( jsString ) { var length = lengthBytesUTF8 ( jsString ) + 1 ; var cString = _malloc ( length ) ; stringToUTF8 ( jsString , cString , length ) ; return cString } function _emscripten_glGetString ( name_ ) { if ( GL . stringCache [ name_ ] ) return GL . stringCache [ name_ ] ; var ret ; switch ( name_ ) { case 7939 : var exts = GLctx . getSupportedExtensions ( ) ; var gl_exts = [ ] ; for ( var i = 0 ; i < exts . length ; + + i ) { gl_exts . push ( exts [ i ] ) ; gl_exts . push ( " GL_ " + exts [ i ] ) } ret = stringToNewUTF8 ( gl_exts . join ( " " ) ) ; break ; case 7936 : case 7937 : case 37445 : case 37446 : var s = GLctx . getParameter ( name_ ) ; if ( ! s ) { GL . recordError ( 1280 ) } ret = stringToNewUTF8 ( s ) ; break ; case 7938 : var glVersion = GLctx . getParameter ( GLctx . VERSION ) ; if ( GL . currentContext . version > = 2 ) glVersion = " OpenGL ES 3 . 0 ( " + glVersion + " ) " ; else { glVersion = " OpenGL ES 2 . 0 ( " + glVersion + " ) " } ret = stringToNewUTF8 ( glVersion ) ; break ; case 35724 : var glslVersion = GLctx . getParameter ( GLctx . SHADING_LANGUAGE_VERSION ) ; var ver_re = / ^ WebGL GLSL ES ( [ 0 - 9 ] \ . [ 0 - 9 ] [ 0 - 9 ] ? ) ( ? : $ | . * ) / ; var ver_num = glslVersion . match ( ver_re ) ; if ( ver_num ! = = null ) { if ( ver_num [ 1 ] . length = = 3 ) ver_num [ 1 ] = ver_num [ 1 ] + " 0 " ; glslVersion = " OpenGL ES GLSL ES " + ver_num [ 1 ] + " ( " + glslVersion + " ) " } ret = stringToNewUTF8 ( glslVersion ) ; break ; default : GL . recordError ( 1280 ) ; return 0 } GL . stringCache [ name_ ] = ret ; return ret } function _emscripten_glGetStringi ( name , index ) { if ( GL . currentContext . version < 2 ) { GL . recordError ( 1282 ) ; return 0 } var stringiCache = GL . stringiCache [ name ] ; if ( stringiCache ) { if ( index < 0 | | index > = stringiCache . length ) { GL . recordError ( 1281 ) ; return 0 } return stringiCache [ index ] } switch ( name ) { case 7939 : var exts = GLctx . getSupportedExtensions ( ) ; var gl_exts = [ ] ; for ( var i = 0 ; i < exts . length ; + + i ) { gl_exts . push ( stringToNewUTF8 ( exts [ i ] ) ) ; gl_exts . push ( stringToNewUTF8 ( " GL_ " + exts [ i ] ) ) } stringiCache = GL . stringiCache [ name ] = gl_exts ; if ( index < 0 | | index > = stringiCache . length ) { GL . recordError ( 1281 ) ; return 0 } return stringiCache [ index ] ; default : GL . recordError ( 1280 ) ; return 0 } } function _emscripten_glGetSynciv ( sync , pname , bufSize , length , values ) { if ( bufSize < 0 ) { GL . recordError ( 1281 ) ; return } if ( ! values ) { GL . recordError ( 1281 ) ; return } var ret = GLctx . getSyncParameter ( GL . syncs [ sync ] , pname ) ; HEAP32 [ length > > 2 ] = ret ; if ( ret ! = = null & & length ) HEAP32 [ length > > 2 ] = 1 } function _emscripten_glGetTexParameterfv ( target , pname , params ) { if ( ! params ) { GL . recordError ( 1281 ) ; return } HEAPF32 [ params > > 2 ] = GLctx . getTexParameter ( target , pname ) } function _emscripten_glGetTexParameteriv ( target , pname , params ) { if ( ! params ) { GL . recordError ( 1281 ) ; return } HEAP32 [ params > > 2 ] = GLctx . getTexParameter ( target , pname ) } function _emscripten_glGetTransformFeedbackVarying ( program , index , bufSize , length , size , type , name ) { program = GL . programs [ program ] ; var info = GLctx [ " getTransformFeedbackVarying " ] ( program , index ) ; if ( ! info ) return ; if ( name & & bufSize > 0 ) { var numBytesWrittenExclNull = stringToUTF8 ( info . name , name , bufSize ) ; if ( length ) HEAP32 [ length > > 2 ] = numBytesWrittenExclNull } else { if ( length ) HEAP32 [ length > > 2 ] = 0 } if ( size ) HEAP32 [ size > > 2 ] = info . size ; if ( type ) HEAP32 [ type > > 2 ] = info . type } function _emscripten_glGetUniformBlockIndex ( program , uniformBlockName ) { return GLctx [ " getUniformBlockIndex " ] ( GL . programs [ program ] , UTF8ToString ( uniformBlockName ) ) } function _emscripten_glGetUniformIndices ( program , uniformCount , uniformNames , uniformIndices ) { if ( ! uniformIndices ) { GL . recordError ( 1281 ) ; return } if ( uniformCount > 0 & & ( uniformNames = = 0 | | uniformIndices = = 0 ) ) { GL . recordError ( 1281 ) ; return } program = GL . programs [ program ] ; var names = [ ] ; for ( var i = 0 ; i < uniformCount ; i + + ) names . push ( UTF8ToString ( HEAP32 [ uniformNames + i * 4 > > 2 ] ) ) ; var result = GLctx [ " getUniformIndices " ] ( program , names ) ; if ( ! result ) return ; var len = result . length ; for ( var i = 0 ; i < len ; i + + ) { HEAP32 [ uniformIndices + i * 4 > > 2 ] = result [ i ] } } function _emscripten_glGetUniformLocation ( program , name ) { name = UTF8ToString ( name ) ; var arrayIndex = 0 ; if ( name [ name . length - 1 ] = = " ] " ) { var leftBrace = name . lastIndexOf ( " [ " ) ; arrayIndex = name [ leftBrace + 1 ] ! = " ] " ? parseInt ( name . slice ( leftBrace + 1 ) ) : 0 ; name = name . slice ( 0 , leftBrace ) } var uniformInfo = GL . programInfos [ program ] & & GL . programInfos [ program ] . uniforms [ name ] ; if ( uniformInfo & & arrayIndex > = 0 & & arrayIndex < uniformInfo [ 0 ] ) { return uniformInfo [ 1 ] + arrayIndex } else { return - 1 } } function emscriptenWebGLGetUniform ( program , location , params , type ) { if ( ! params ) { GL . recordError ( 1281 ) ; return } var data = GLctx . getUniform ( GL . programs [ program ] , GL . uniforms [ location ] ) ; if ( typeof data = = " number " | | typeof data = = " boolean " ) { switch ( type ) { case " Integer " : HEAP32 [ params > > 2 ] = data ; break ; case " Float " : HEAPF32 [ params > > 2 ] = data ; break ; default : throw " internal emscriptenWebGLGetUniform ( ) error , bad type : " + type } } else { for ( var i = 0 ; i < data . length ; i + + ) { switch ( type ) { case " Integer " : HEAP32 [ params + i * 4 > > 2 ] = data [ i ] ; break ; case " Float " : HEAPF32 [ params + i * 4 > > 2 ] = data [ i ] ; break ; default : throw " internal emscriptenWebGLGetUniform ( ) error , bad type : " + type } } } } function _emscripten_glGetUniformfv ( program , location , params ) { emscriptenWebGLGetUniform ( program , location , params , " Float " ) } function _emscripten_glGetUniformiv ( program , location , params ) { emscriptenWebGLGetUniform ( program , location , params , " Integer " ) } function _emscripten_glGetUniformuiv ( program , location , params ) { emscriptenWebGLGetUniform ( program , location , params , " Integer " ) } function emscriptenWebGLGetVertexAttrib ( index , pname , params , type ) { if ( ! params ) { GL . recordError ( 1281 ) ; return } var data = GLctx . getVertexAttrib ( index , pname ) ; if ( pname = = 34975 ) { HEAP32 [ params > > 2 ] = data [ " name " ] } else if ( typeof data = = " number " | | typeof data = = " boolean " ) { switch ( type ) { case " Integer " : HEAP32 [ params > > 2 ] = data ; break ; case " Float " : HEAPF32 [ params > > 2 ] = data ; break ; case " FloatToInteger " : HEAP32 [ params > > 2 ] = Math . fround ( data ) ; break ; default : throw " internal emscriptenWebGLGetVertexAttrib ( ) error , bad type : " + type } } else { for ( var i = 0 ; i < data . length ; i + + ) { switch ( type ) { case " Integer " : HEAP32 [ params + i * 4 > > 2 ] = data [ i ] ; break ; case " Float " : HEAPF32 [ params + i * 4 > > 2 ] = data [ i ] ; break ; case " FloatToInteger " : HEAP32 [ params + i * 4 > > 2 ] = Math . fround ( data [ i ] ) ; break ; default : throw " internal emscriptenWebGLGetVertexAttrib ( ) error , bad type : " + type } } } } function _emscripten_glGetVertexAttribIiv ( index , pname , params ) { emscriptenWebGLGetVertexAttrib ( index , pname , params , " Integer " ) } function _emscripten_glGetVertexAttribIuiv ( index , pname , params ) { emscriptenWebGLGetVertexAttrib ( index , pname , params , " Integer " ) } function _emscripten_glGetVertexAttribPointerv ( index , pname , pointer ) { if ( ! pointer ) { GL . recordError ( 1281 ) ; return } HEAP32 [ pointer > > 2 ] = GLctx . getVertexAttribOffset ( index , pname ) } function _emscripten_glGetVertexAttribfv ( index , pname , params ) { emscriptenWebGLGetVertexAttrib ( index , pname , params , " Float " ) } function _emscripten_glGetVertexAttribiv ( index , pname , params ) { emscriptenWebGLGetVertexAttrib ( index , pname , params , " FloatToInteger " ) } function _emscripten_glHint ( x0 , x1 ) { GLctx [ " hint " ] ( x0 , x1 ) } function _emscripten_glInvalidateFramebuffer ( target , numAttachments , attachments ) { var list = __tempFixedLengthArray [ numAttachments ] ; for ( var i = 0 ; i < numAttachments ; i + + ) { list [ i ] = HEAP32 [ attachments + i * 4 > > 2 ] } GLctx [ " invalidateFramebuffer " ] ( target , list ) } function _emscripten_glInvalidateSubFramebuffer ( target , numAttachments , attachments , x , y , width , height ) { var list = __tempFixedLengthArray [ numAttachments ] ; for ( var i = 0 ; i < numAttachments ; i + + ) { list [ i ] = HEAP32 [ attachments + i * 4 > > 2 ] } GLctx [ " invalidateSubFramebuffer " ] ( target , list , x , y , width , height ) } function _emscripten_glIsBuffer ( buffer ) { var b = GL . buffers [ buffer ] ; if ( ! b ) return 0 ; return GLctx . isBuffer ( b ) } function _emscripten_glIsEnabled ( x0 ) { return GLctx [ " isEnabled " ] ( x0 ) } function _emscripten_glIsFramebuffer ( framebuffer ) { var fb = GL . framebuffers [ framebuffer ] ; if ( ! fb ) return 0 ; return GLctx . isFramebuffer ( fb ) } function _emscripten_glIsProgram ( program ) { program = GL . programs [ program ] ; if ( ! program ) return 0 ; return GLctx . isProgram ( program ) } function _emscripten_glIsQuery ( id ) { var query = GL . queries [ id ] ; if ( ! query ) return 0 ; return GLctx [ " isQuery " ] ( query ) } function _emscripten_glIsQueryEXT ( id ) { var query = GL . timerQueriesEXT [ id ] ; if ( ! query ) return 0 ; return GLctx . disjointTimerQueryExt [ " isQueryEXT " ] ( query ) } function _emscripten_glIsRenderbuffer ( renderbuffer ) { var rb = GL . renderbuffers [ renderbuffer ] ; if ( ! rb ) return 0 ; return GLctx . isRenderbuffer ( rb ) } function _emscripten_glIsSampler ( id ) { var sampler = GL . samplers [ id ] ; if ( ! sampler ) return 0 ; return GLctx [ " isSampler " ] ( sampler ) } function _emscripten_glIsShader ( shader ) { var s = GL . shaders [ shader ] ; if ( ! s ) return 0 ; return GLctx . isShader ( s ) } function _emscripten_glIsSync ( sync ) { var sync = GL . syncs [ sync ] ; if ( ! sync ) return 0 ; return GLctx . isSync ( sync ) } function _emscripten_glIsTexture ( id ) { var texture = GL . textures [ id ] ; if ( ! texture ) return 0 ; return GLctx . isTexture ( texture ) } function _emscripten_glIsTransformFeedback ( id ) { return GLctx [ " isTransformFeedback " ] ( GL . transformFeedbacks [ id ] ) } function _emscripten_glIsVertexArray ( array ) { var vao = GL . vaos [ array ] ; if ( ! vao ) return 0 ; return GLctx [ " isVertexArray " ] ( vao ) } function _emscripten_glIsVertexArrayOES ( array ) { var vao = GL . vaos [ array ] ; if ( ! vao ) return 0 ; return GLctx [ " isVertexArray " ] ( vao ) } function _emscripten_glLineWidth ( x0 ) { GLctx [ " lineWidth " ] ( x0 ) } function _emscripten_glLinkProgram ( program ) { GLctx . linkProgram ( GL . programs [ program ] ) ; GL . populateUniformTable ( program ) } function _emscripten_glMapBufferRange ( ) { err ( " missing function : emscripten_glMapBufferRange " ) ; abort ( - 1 ) } function _emscripten_glPauseTransformFeedback ( ) { GLctx [ " pauseTransformFeedback " ] ( ) } function _emscripten_glPixelStorei ( pname , param ) { if ( pname = = 3317 ) { GL . unpackAlignment = param } GLctx . pixelStorei ( pname , param ) } function _emscripten_glPolygonOffset ( x0 , x1 ) { GLctx [ " polygonOffset " ] ( x0 , x1 ) } function _emscripten_glProgramBinary ( program , binaryFormat , binary , length ) { GL . recordError ( 1280 ) } function _emscripten_glProgramParameteri ( program , pname , value ) { GL . recordError ( 1280 ) } function _emscripten_glQueryCounterEXT ( id , target ) { GLctx . disjointTimerQueryExt [ " queryCounterEXT " ] ( GL . timerQueriesEXT [ id ] , target ) } function _emscripten_glReadBuffer ( x0 ) { GLctx [ " readBuffer " ] ( x0 ) } function __computeUnpackAlignedImageSize ( width , height , sizePerPixel , alignment ) { function roundedToNextMultipleOf ( x , y ) { return x + y - 1 & - y } var plainRowSize = width * sizePerPixel ; var alignedRowSize = roundedToNextMultipleOf ( plainRowSize , alignment ) ; return height * alignedRowSize } var __colorChannelsInGlTextureFormat = { 6402 : 1 , 6403 : 1 , 6406 : 1 , 6407 : 3 , 6408 : 4 , 6409 : 1 , 6410 : 2 , 33319 : 2 , 33320 : 2 , 35904 : 3 , 35906 : 4 , 36244 : 1 , 36248 : 3 , 36249 : 4 } ; var __sizeOfGlTextureElementType = { 5120 : 1 , 5121 : 1 , 5122 : 2 , 5123 : 2 , 5124 : 4 , 5125 : 4 , 5126 : 4 , 5131 : 2 , 32819 : 2 , 32820 : 2 , 33635 : 2 , 33640 : 4 , 34042 : 4 , 35899 : 4 , 35902 : 4 , 36193 : 2 } ; function emscriptenWebGLGetTexPixelData ( type , format , width , height , pixels , internalFormat ) { var sizePerPixel = __colorChannelsInGlTextureFormat [ format ] * __sizeOfGlTextureElementType [ type ] ; if ( ! sizePerPixel ) { GL . recordError ( 1280 ) ; return } var bytes = __computeUnpackAlignedImageSize ( width , height , sizePerPixel , GL . unpackAlignment ) ; var end = pixels + bytes ; switch ( type ) { case 5120 : return HEAP8 . subarray ( pixels , end ) ; case 5121 : return HEAPU8 . subarray ( pixels , end ) ; case 5122 : return HEAP16 . subarray ( pixels > > 1 , end > > 1 ) ; case 5124 : return HEAP32 . subarray ( pixels > > 2 , end > > 2 ) ; case 5126 : return HEAPF32 . subarray ( pixels > > 2 , end > > 2 ) ; case 5125 : case 34042 : case 35902 : case 33640 : case 35899 : case 34042 : return HEAPU32 . subarray ( pixels > > 2 , end > > 2 ) ; case 5123 : case 33635 : case 32819 : case 32820 : case 36193 : case 5131 : return HEAPU16 . subarray ( pixels > > 1 , end > > 1 ) ; default : GL . recordError ( 1280 ) } } function __heapObjectForWebGLType ( type ) { switch ( type ) { case 5120 : return HEAP8 ; case 5121 : return HEAPU8 ; case 5122 : return HEAP16 ; case 5123 : case 33635 : case 32819 : case 32820 : case 36193 : case 5131 : return HEAPU16 ; case 5124 : return HEAP32 ; case 5125 : case 34042 : case 35902 : case 33640 : case 35899 : case 34042 : return HEAPU32 ; case 5126 : return HEAPF32 } } var __heapAccessShiftForWebGLType = { 5122 : 1 , 5123 : 1 , 5124 : 2 , 5125 : 2 , 5126 : 2 , 5131 : 1 , 32819 : 1 , 32820 : 1 , 33635 : 1 , 33640 : 2 , 34042 : 2 , 35899 : 2 , 35902 : 2 , 36193 : 1 } ; function _emscripten_glReadPixels ( x , y , width , height , format , type , pixels ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { if ( GLctx . currentPixelPackBufferBinding ) { GLctx . readPixels ( x , y , width , height , format , type , pixels ) } else { GLctx . readPixels ( x , y , width , height , format , type , __heapObjectForWebGLType ( type ) , pixels > > ( __heapAccessShiftForWebGLType [ type ] | 0 ) ) } return } var pixelData = emscriptenWebGLGetTexPixelData ( type , format , width , height , pixels , format ) ; if ( ! pixelData ) { GL . recordError ( 1280 ) ; return } GLctx . readPixels ( x , y , width , height , format , type , pixelData ) } function _emscripten_glReleaseShaderCompiler ( ) { } function _emscripten_glRenderbufferStorage ( x0 , x1 , x2 , x3 ) { GLctx [ " renderbufferStorage " ] ( x0 , x1 , x2 , x3 ) } function _emscripten_glRenderbufferStorageMultisample ( x0 , x1 , x2 , x3 , x4 ) { GLctx [ " renderbufferStorageMultisample " ] ( x0 , x1 , x2 , x3 , x4 ) } function _emscripten_glResumeTransformFeedback ( ) { GLctx [ " resumeTransformFeedback " ] ( ) } function _emscripten_glSampleCoverage ( value , invert ) { GLctx . sampleCoverage ( value , ! ! invert ) } function _emscripten_glSamplerParameterf ( sampler , pname , param ) { GLctx [ " samplerParameterf " ] ( GL . samplers [ sampler ] , pname , param ) } function _emscripten_glSamplerParameterfv ( sampler , pname , params ) { var param = HEAPF32 [ params > > 2 ] ; GLctx [ " samplerParameterf " ] ( GL . samplers [ sampler ] , pname , param ) } function _emscripten_glSamplerParameteri ( sampler , pname , param ) { GLctx [ " samplerParameteri " ] ( GL . samplers [ sampler ] , pname , param ) } function _emscripten_glSamplerParameteriv ( sampler , pname , params ) { var param = HEAP32 [ params > > 2 ] ; GLctx [ " samplerParameteri " ] ( GL . samplers [ sampler ] , pname , param ) } function _emscripten_glScissor ( x0 , x1 , x2 , x3 ) { GLctx [ " scissor " ] ( x0 , x1 , x2 , x3 ) } function _emscripten_glShaderBinary ( ) { GL . recordError ( 1280 ) } function _emscripten_glShaderSource ( shader , count , string , length ) { var source = GL . getSource ( shader , count , string , length ) ; GLctx . shaderSource ( GL . shaders [ shader ] , source ) } function _emscripten_glStencilFunc ( x0 , x1 , x2 ) { GLctx [ " stencilFunc " ] ( x0 , x1 , x2 ) } function _emscripten_glStencilFuncSeparate ( x0 , x1 , x2 , x3 ) { GLctx [ " stencilFuncSeparate " ] ( x0 , x1 , x2 , x3 ) } function _emscripten_glStencilMask ( x0 ) { GLctx [ " stencilMask " ] ( x0 ) } function _emscripten_glStencilMaskSeparate ( x0 , x1 ) { GLctx [ " stencilMaskSeparate " ] ( x0 , x1 ) } function _emscripten_glStencilOp ( x0 , x1 , x2 ) { GLctx [ " stencilOp " ] ( x0 , x1 , x2 ) } function _emscripten_glStencilOpSeparate ( x0 , x1 , x2 , x3 ) { GLctx [ " stencilOpSeparate " ] ( x0 , x1 , x2 , x3 ) } function _emscripten_glTexImage2D ( target , level , internalFormat , width , height , border , format , type , pixels ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { if ( GLctx . currentPixelUnpackBufferBinding ) { GLctx . texImage2D ( target , level , internalFormat , width , height , border , format , type , pixels ) } else if ( pixels ! = 0 ) { GLctx . texImage2D ( target , level , internalFormat , width , height , border , format , type , __heapObjectForWebGLType ( type ) , pixels > > ( __heapAccessShiftForWebGLType [ type ] | 0 ) ) } else { GLctx . texImage2D ( target , level , internalFormat , width , height , border , format , type , null ) } return } GLctx . texImage2D ( target , level , internalFormat , width , height , border , format , type , pixels ? emscriptenWebGLGetTexPixelData ( type , format , width , height , pixels , internalFormat ) : null ) } function _emscripten_glTexImage3D ( target , level , internalFormat , width , height , depth , border , format , type , pixels ) { if ( GLctx . currentPixelUnpackBufferBinding ) { GLctx [ " texImage3D " ] ( target , level , internalFormat , width , height , depth , border , format , type , pixels ) } else if ( pixels ! = 0 ) { GLctx [ " texImage3D " ] ( target , level , internalFormat , width , height , depth , border , format , type , __heapObjectForWebGLType ( type ) , pixels > > ( __heapAccessShiftForWebGLType [ type ] | 0 ) ) } else { GLctx [ " texImage3D " ] ( target , level , internalFormat , width , height , depth , border , format , type , null ) } } function _emscripten_glTexParameterf ( x0 , x1 , x2 ) { GLctx [ " texParameterf " ] ( x0 , x1 , x2 ) } function _emscripten_glTexParameterfv ( target , pname , params ) { var param = HEAPF32 [ params > > 2 ] ; GLctx . texParameterf ( target , pname , param ) } function _emscripten_glTexParameteri ( x0 , x1 , x2 ) { GLctx [ " texParameteri " ] ( x0 , x1 , x2 ) } function _emscripten_glTexParameteriv ( target , pname , params ) { var param = HEAP32 [ params > > 2 ] ; GLctx . texParameteri ( target , pname , param ) } function _emscripten_glTexStorage2D ( x0 , x1 , x2 , x3 , x4 ) { GLctx [ " texStorage2D " ] ( x0 , x1 , x2 , x3 , x4 ) } function _emscripten_glTexStorage3D ( x0 , x1 , x2 , x3 , x4 , x5 ) { GLctx [ " texStorage3D " ] ( x0 , x1 , x2 , x3 , x4 , x5 ) } function _emscripten_glTexSubImage2D ( target , level , xoffset , yoffset , width , height , format , type , pixels ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { if ( GLctx . currentPixelUnpackBufferBinding ) { GLctx . texSubImage2D ( target , level , xoffset , yoffset , width , height , format , type , pixels ) } else if ( pixels ! = 0 ) { GLctx . texSubImage2D ( target , level , xoffset , yoffset , width , height , format , type , __heapObjectForWebGLType ( type ) , pixels > > ( __heapAccessShiftForWebGLType [ type ] | 0 ) ) } else { GLctx . texSubImage2D ( target , level , xoffset , yoffset , width , height , format , type , null ) } return } var pixelData = null ; if ( pixels ) pixelData = emscriptenWebGLGetTexPixelData ( type , format , width , height , pixels , 0 ) ; GLctx . texSubImage2D ( target , level , xoffset , yoffset , width , height , format , type , pixelData ) } function _emscripten_glTexSubImage3D ( target , level , xoffset , yoffset , zoffset , width , height , depth , format , type , pixels ) { if ( GLctx . currentPixelUnpackBufferBinding ) { GLctx [ " texSubImage3D " ] ( target , level , xoffset , yoffset , zoffset , width , height , depth , format , type , pixels ) } else if ( pixels ! = 0 ) { GLctx [ " texSubImage3D " ] ( target , level , xoffset , yoffset , zoffset , width , height , depth , format , type , __heapObjectForWebGLType ( type ) , pixels > > ( __heapAccessShiftForWebGLType [ type ] | 0 ) ) } else { GLctx [ " texSubImage3D " ] ( target , level , xoffset , yoffset , zoffset , width , height , depth , format , type , null ) } } function _emscripten_glTransformFeedbackVaryings ( program , count , varyings , bufferMode ) { program = GL . programs [ program ] ; var vars = [ ] ; for ( var i = 0 ; i < count ; i + + ) vars . push ( UTF8ToString ( HEAP32 [ varyings + i * 4 > > 2 ] ) ) ; GLctx [ " transformFeedbackVaryings " ] ( program , vars , bufferMode ) } function _emscripten_glUniform1f ( location , v0 ) { GLctx . uniform1f ( GL . uniforms [ location ] , v0 ) } function _emscripten_glUniform1fv ( location , count , value ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { GLctx . uniform1fv ( GL . uniforms [ location ] , HEAPF32 , value > > 2 , count ) ; return } if ( count < = GL . MINI_TEMP_BUFFER_SIZE ) { var view = GL . miniTempBufferViews [ count - 1 ] ; for ( var i = 0 ; i < count ; + + i ) { view [ i ] = HEAPF32 [ value + 4 * i > > 2 ] } } else { var view = HEAPF32 . subarray ( value > > 2 , value + count * 4 > > 2 ) } GLctx . uniform1fv ( GL . uniforms [ location ] , view ) } function _emscripten_glUniform1i ( location , v0 ) { GLctx . uniform1i ( GL . uniforms [ location ] , v0 ) } function _emscripten_glUniform1iv ( location , count , value ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { GLctx . uniform1iv ( GL . uniforms [ location ] , HEAP32 , value > > 2 , count ) ; return } GLctx . uniform1iv ( GL . uniforms [ location ] , HEAP32 . subarray ( value > > 2 , value + count * 4 > > 2 ) ) } function _emscripten_glUniform1ui ( location , v0 ) { GLctx . uniform1ui ( GL . uniforms [ location ] , v0 ) } function _emscripten_glUniform1uiv ( location , count , value ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { GLctx . uniform1uiv ( GL . uniforms [ location ] , HEAPU32 , value > > 2 , count ) } else { GLctx . uniform1uiv ( GL . uniforms [ location ] , HEAPU32 . subarray ( value > > 2 , value + count * 4 > > 2 ) ) } } function _emscripten_glUniform2f ( location , v0 , v1 ) { GLctx . uniform2f ( GL . uniforms [ location ] , v0 , v1 ) } function _emscripten_glUniform2fv ( location , count , value ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { GLctx . uniform2fv ( GL . uniforms [ location ] , HEAPF32 , value > > 2 , count * 2 ) ; return } if ( 2 * count < = GL . MINI_TEMP_BUFFER_SIZE ) { var view = GL . miniTempBufferViews [ 2 * count - 1 ] ; for ( var i = 0 ; i < 2 * count ; i + = 2 ) { view [ i ] = HEAPF32 [ value + 4 * i > > 2 ] ; view [ i + 1 ] = HEAPF32 [ value + ( 4 * i + 4 ) > > 2 ] } } else { var view = HEAPF32 . subarray ( value > > 2 , value + count * 8 > > 2 ) } GLctx . uniform2fv ( GL . uniforms [ location ] , view ) } function _emscripten_glUniform2i ( location , v0 , v1 ) { GLctx . uniform2i ( GL . uniforms [ location ] , v0 , v1 ) } function _emscripten_glUniform2iv ( location , count , value ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { GLctx . uniform2iv ( GL . uniforms [ location ] , HEAP32 , value > > 2 , count * 2 ) ; return } GLctx . uniform2iv ( GL . uniforms [ location ] , HEAP32 . subarray ( value > > 2 , value + count * 8 > > 2 ) ) } function _emscripten_glUniform2ui ( location , v0 , v1 ) { GLctx . uniform2ui ( GL . uniforms [ location ] , v0 , v1 ) } function _emscripten_glUniform2uiv ( location , count , value ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { GLctx . uniform2uiv ( GL . uniforms [ location ] , HEAPU32 , value > > 2 , count * 2 ) } else { GLctx . uniform2uiv ( GL . uniforms [ location ] , HEAPU32 . subarray ( value > > 2 , value + count * 8 > > 2 ) ) } } function _emscripten_glUniform3f ( location , v0 , v1 , v2 ) { GLctx . uniform3f ( GL . uniforms [ location ] , v0 , v1 , v2 ) } function _emscripten_glUniform3fv ( location , count , value ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { GLctx . uniform3fv ( GL . uniforms [ location ] , HEAPF32 , value > > 2 , count * 3 ) ; return } if ( 3 * count < = GL . MINI_TEMP_BUFFER_SIZE ) { var view = GL . miniTempBufferViews [ 3 * count - 1 ] ; for ( var i = 0 ; i < 3 * count ; i + = 3 ) { view [ i ] = HEAPF32 [ value + 4 * i > > 2 ] ; view [ i + 1 ] = HEAPF32 [ value + ( 4 * i + 4 ) > > 2 ] ; view [ i + 2 ] = HEAPF32 [ value + ( 4 * i + 8 ) > > 2 ] } } else { var view = HEAPF32 . subarray ( value > > 2 , value + count * 12 > > 2 ) } GLctx . uniform3fv ( GL . uniforms [ location ] , view ) } function _emscripten_glUniform3i ( location , v0 , v1 , v2 ) { GLctx . uniform3i ( GL . uniforms [ location ] , v0 , v1 , v2 ) } function _emscripten_glUniform3iv ( location , count , value ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { GLctx . uniform3iv ( GL . uniforms [ location ] , HEAP32 , value > > 2 , count * 3 ) ; return } GLctx . uniform3iv ( GL . uniforms [ location ] , HEAP32 . subarray ( value > > 2 , value + count * 12 > > 2 ) ) } function _emscripten_glUniform3ui ( location , v0 , v1 , v2 ) { GLctx . uniform3ui ( GL . uniforms [ location ] , v0 , v1 , v2 ) } function _emscripten_glUniform3uiv ( location , count , value ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { GLctx . uniform3uiv ( GL . uniforms [ location ] , HEAPU32 , value > > 2 , count * 3 ) } else { GLctx . uniform3uiv ( GL . uniforms [ location ] , HEAPU32 . subarray ( value > > 2 , value + count * 12 > > 2 ) ) } } function _emscripten_glUniform4f ( location , v0 , v1 , v2 , v3 ) { GLctx . uniform4f ( GL . uniforms [ location ] , v0 , v1 , v2 , v3 ) } function _emscripten_glUniform4fv ( location , count , value ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { GLctx . uniform4fv ( GL . uniforms [ location ] , HEAPF32 , value > > 2 , count * 4 ) ; return } if ( 4 * count < = GL . MINI_TEMP_BUFFER_SIZE ) { var view = GL . miniTempBufferViews [ 4 * count - 1 ] ; for ( var i = 0 ; i < 4 * count ; i + = 4 ) { view [ i ] = HEAPF32 [ value + 4 * i > > 2 ] ; view [ i + 1 ] = HEAPF32 [ value + ( 4 * i + 4 ) > > 2 ] ; view [ i + 2 ] = HEAPF32 [ value + ( 4 * i + 8 ) > > 2 ] ; view [ i + 3 ] = HEAPF32 [ value + ( 4 * i + 12 ) > > 2 ] } } else { var view = HEAPF32 . subarray ( value > > 2 , value + count * 16 > > 2 ) } GLctx . uniform4fv ( GL . uniforms [ location ] , view ) } function _emscripten_glUniform4i ( location , v0 , v1 , v2 , v3 ) { GLctx . uniform4i ( GL . uniforms [ location ] , v0 , v1 , v2 , v3 ) } function _emscripten_glUniform4iv ( location , count , value ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { GLctx . uniform4iv ( GL . uniforms [ location ] , HEAP32 , value > > 2 , count * 4 ) ; return } GLctx . uniform4iv ( GL . uniforms [ location ] , HEAP32 . subarray ( value > > 2 , value + count * 16 > > 2 ) ) } function _emscripten_glUniform4ui ( location , v0 , v1 , v2 , v3 ) { GLctx . uniform4ui ( GL . uniforms [ location ] , v0 , v1 , v2 , v3 ) } function _emscripten_glUniform4uiv ( location , count , value ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { GLctx . uniform4uiv ( GL . uniforms [ location ] , HEAPU32 , value > > 2 , count * 4 ) } else { GLctx . uniform4uiv ( GL . uniforms [ location ] , HEAPU32 . subarray ( value > > 2 , value + count * 16 > > 2 ) ) } } function _emscripten_glUniformBlockBinding ( program , uniformBlockIndex , uniformBlockBinding ) { program = GL . programs [ program ] ; GLctx [ " uniformBlockBinding " ] ( program , uniformBlockIndex , uniformBlockBinding ) } function _emscripten_glUniformMatrix2fv ( location , count , transpose , value ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { GLctx . uniformMatrix2fv ( GL . uniforms [ location ] , ! ! transpose , HEAPF32 , value > > 2 , count * 4 ) ; return } if ( 4 * count < = GL . MINI_TEMP_BUFFER_SIZE ) { var view = GL . miniTempBufferViews [ 4 * count - 1 ] ; for ( var i = 0 ; i < 4 * count ; i + = 4 ) { view [ i ] = HEAPF32 [ value + 4 * i > > 2 ] ; view [ i + 1 ] = HEAPF32 [ value + ( 4 * i + 4 ) > > 2 ] ; view [ i + 2 ] = HEAPF32 [ value + ( 4 * i + 8 ) > > 2 ] ; view [ i + 3 ] = HEAPF32 [ value + ( 4 * i + 12 ) > > 2 ] } } else { var view = HEAPF32 . subarray ( value > > 2 , value + count * 16 > > 2 ) } GLctx . uniformMatrix2fv ( GL . uniforms [ location ] , ! ! transpose , view ) } function _emscripten_glUniformMatrix2x3fv ( location , count , transpose , value ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { GLctx . uniformMatrix2x3fv ( GL . uniforms [ location ] , ! ! transpose , HEAPF32 , value > > 2 , count * 6 ) } else { GLctx . uniformMatrix2x3fv ( GL . uniforms [ location ] , ! ! transpose , HEAPF32 . subarray ( value > > 2 , value + count * 24 > > 2 ) ) } } function _emscripten_glUniformMatrix2x4fv ( location , count , transpose , value ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { GLctx . uniformMatrix2x4fv ( GL . uniforms [ location ] , ! ! transpose , HEAPF32 , value > > 2 , count * 8 ) } else { GLctx . uniformMatrix2x4fv ( GL . uniforms [ location ] , ! ! transpose , HEAPF32 . subarray ( value > > 2 , value + count * 32 > > 2 ) ) } } function _emscripten_glUniformMatrix3fv ( location , count , transpose , value ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { GLctx . uniformMatrix3fv ( GL . uniforms [ location ] , ! ! transpose , HEAPF32 , value > > 2 , count * 9 ) ; return } if ( 9 * count < = GL . MINI_TEMP_BUFFER_SIZE ) { var view = GL . miniTempBufferViews [ 9 * count - 1 ] ; for ( var i = 0 ; i < 9 * count ; i + = 9 ) { view [ i ] = HEAPF32 [ value + 4 * i > > 2 ] ; view [ i + 1 ] = HEAPF32 [ value + ( 4 * i + 4 ) > > 2 ] ; view [ i + 2 ] = HEAPF32 [ value + ( 4 * i + 8 ) > > 2 ] ; view [ i + 3 ] = HEAPF32 [ value + ( 4 * i + 12 ) > > 2 ] ; view [ i + 4 ] = HEAPF32 [ value + ( 4 * i + 16 ) > > 2 ] ; view [ i + 5 ] = HEAPF32 [ value + ( 4 * i + 20 ) > > 2 ] ; view [ i + 6 ] = HEAPF32 [ value + ( 4 * i + 24 ) > > 2 ] ; view [ i + 7 ] = HEAPF32 [ value + ( 4 * i + 28 ) > > 2 ] ; view [ i + 8 ] = HEAPF32 [ value + ( 4 * i + 32 ) > > 2 ] } } else { var view = HEAPF32 . subarray ( value > > 2 , value + count * 36 > > 2 ) } GLctx . uniformMatrix3fv ( GL . uniforms [ location ] , ! ! transpose , view ) } function _emscripten_glUniformMatrix3x2fv ( location , count , transpose , value ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { GLctx . uniformMatrix3x2fv ( GL . uniforms [ location ] , ! ! transpose , HEAPF32 , value > > 2 , count * 6 ) } else { GLctx . uniformMatrix3x2fv ( GL . uniforms [ location ] , ! ! transpose , HEAPF32 . subarray ( value > > 2 , value + count * 24 > > 2 ) ) } } function _emscripten_glUniformMatrix3x4fv ( location , count , transpose , value ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { GLctx . uniformMatrix3x4fv ( GL . uniforms [ location ] , ! ! transpose , HEAPF32 , value > > 2 , count * 12 ) } else { GLctx . uniformMatrix3x4fv ( GL . uniforms [ location ] , ! ! transpose , HEAPF32 . subarray ( value > > 2 , value + count * 48 > > 2 ) ) } } function _emscripten_glUniformMatrix4fv ( location , count , transpose , value ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { GLctx . uniformMatrix4fv ( GL . uniforms [ location ] , ! ! transpose , HEAPF32 , value > > 2 , count * 16 ) ; return } if ( 16 * count < = GL . MINI_TEMP_BUFFER_SIZE ) { var view = GL . miniTempBufferViews [ 16 * count - 1 ] ; for ( var i = 0 ; i < 16 * count ; i + = 16 ) { view [ i ] = HEAPF32 [ value + 4 * i > > 2 ] ; view [ i + 1 ] = HEAPF32 [ value + ( 4 * i + 4 ) > > 2 ] ; view [ i + 2 ] = HEAPF32 [ value + ( 4 * i + 8 ) > > 2 ] ; view [ i + 3 ] = HEAPF32 [ value + ( 4 * i + 12 ) > > 2 ] ; view [ i + 4 ] = HEAPF32 [ value + ( 4 * i + 16 ) > > 2 ] ; view [ i + 5 ] = HEAPF32 [ value + ( 4 * i + 20 ) > > 2 ] ; view [ i + 6 ] = HEAPF32 [ value + ( 4 * i + 24 ) > > 2 ] ; view [ i + 7 ] = HEAPF32 [ value + ( 4 * i + 28 ) > > 2 ] ; view [ i + 8 ] = HEAPF32 [ value + ( 4 * i + 32 ) > > 2 ] ; view [ i + 9 ] = HEAPF32 [ value + ( 4 * i + 36 ) > > 2 ] ; view [ i + 10 ] = HEAPF32 [ value + ( 4 * i + 40 ) > > 2 ] ; view [ i + 11 ] = HEAPF32 [ value + ( 4 * i + 44 ) > > 2 ] ; view [ i + 12 ] = HEAPF32 [ value + ( 4 * i + 48 ) > > 2 ] ; view [ i + 13 ] = HEAPF32 [ value + ( 4 * i + 52 ) > > 2 ] ; view [ i + 14 ] = HEAPF32 [ value + ( 4 * i + 56 ) > > 2 ] ; view [ i + 15 ] = HEAPF32 [ value + ( 4 * i + 60 ) > > 2 ] } } else { var view = HEAPF32 . subarray ( value > > 2 , value + count * 64 > > 2 ) } GLctx . uniformMatrix4fv ( GL . uniforms [ location ] , ! ! transpose , view ) } function _emscripten_glUniformMatrix4x2fv ( location , count , transpose , value ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { GLctx . uniformMatrix4x2fv ( GL . uniforms [ location ] , ! ! transpose , HEAPF32 , value > > 2 , count * 8 ) } else { GLctx . uniformMatrix4x2fv ( GL . uniforms [ location ] , ! ! transpose , HEAPF32 . subarray ( value > > 2 , value + count * 32 > > 2 ) ) } } function _emscripten_glUniformMatrix4x3fv ( location , count , transpose , value ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { GLctx . uniformMatrix4x3fv ( GL . uniforms [ location ] , ! ! transpose , HEAPF32 , value > > 2 , count * 12 ) } else { GLctx . uniformMatrix4x3fv ( GL . uniforms [ location ] , ! ! transpose , HEAPF32 . subarray ( value > > 2 , value + count * 48 > > 2 ) ) } } function _emscripten_glUnmapBuffer ( ) { err ( " missing function : emscripten_glUnmapBuffer " ) ; abort ( - 1 ) } function _emscripten_glUseProgram ( program ) { GLctx . useProgram ( GL . programs [ program ] ) } function _emscripten_glValidateProgram ( program ) { GLctx . validateProgram ( GL . programs [ program ] ) } function _emscripten_glVertexAttrib1f ( x0 , x1 ) { GLctx [ " vertexAttrib1f " ] ( x0 , x1 ) } function _emscripten_glVertexAttrib1fv ( index , v ) { GLctx . vertexAttrib1f ( index , HEAPF32 [ v > > 2 ] ) } function _emscripten_glVertexAttrib2f ( x0 , x1 , x2 ) { GLctx [ " vertexAttrib2f " ] ( x0 , x1 , x2 ) } function _emscripten_glVertexAttrib2fv ( index , v ) { GLctx . vertexAttrib2f ( index , HEAPF32 [ v > > 2 ] , HEAPF32 [ v + 4 > > 2 ] ) } function _emscripten_glVertexAttrib3f ( x0 , x1 , x2 , x3 ) { GLctx [ " vertexAttrib3f " ] ( x0 , x1 , x2 , x3 ) } function _emscripten_glVertexAttrib3fv ( index , v ) { GLctx . vertexAttrib3f ( index , HEAPF32 [ v > > 2 ] , HEAPF32 [ v + 4 > > 2 ] , HEAPF32 [ v + 8 > > 2 ] ) } function _emscripten_glVertexAttrib4f ( x0 , x1 , x2 , x3 , x4 ) { GLctx [ " vertexAttrib4f " ] ( x0 , x1 , x2 , x3 , x4 ) } function _emscripten_glVertexAttrib4fv ( index , v ) { GLctx . vertexAttrib4f ( index , HEAPF32 [ v > > 2 ] , HEAPF32 [ v + 4 > > 2 ] , HEAPF32 [ v + 8 > > 2 ] , HEAPF32 [ v + 12 > > 2 ] ) } function _emscripten_glVertexAttribDivisor ( index , divisor ) { GLctx [ " vertexAttribDivisor " ] ( index , divisor ) } function _emscripten_glVertexAttribDivisorANGLE ( index , divisor ) { GLctx [ " vertexAttribDivisor " ] ( index , divisor ) } function _emscripten_glVertexAttribDivisorARB ( index , divisor ) { GLctx [ " vertexAttribDivisor " ] ( index , divisor ) } function _emscripten_glVertexAttribDivisorEXT ( index , divisor ) { GLctx [ " vertexAttribDivisor " ] ( index , divisor ) } function _emscripten_glVertexAttribDivisorNV ( index , divisor ) { GLctx [ " vertexAttribDivisor " ] ( index , divisor ) } function _emscripten_glVertexAttribI4i ( x0 , x1 , x2 , x3 , x4 ) { GLctx [ " vertexAttribI4i " ] ( x0 , x1 , x2 , x3 , x4 ) } function _emscripten_glVertexAttribI4iv ( index , v ) { GLctx . vertexAttribI4i ( index , HEAP32 [ v > > 2 ] , HEAP32 [ v + 4 > > 2 ] , HEAP32 [ v + 8 > > 2 ] , HEAP32 [ v + 12 > > 2 ] ) } function _emscripten_glVertexAttribI4ui ( x0 , x1 , x2 , x3 , x4 ) { GLctx [ " vertexAttribI4ui " ] ( x0 , x1 , x2 , x3 , x4 ) } function _emscripten_glVertexAttribI4uiv ( index , v ) { GLctx . vertexAttribI4ui ( index , HEAPU32 [ v > > 2 ] , HEAPU32 [ v + 4 > > 2 ] , HEAPU32 [ v + 8 > > 2 ] , HEAPU32 [ v + 12 > > 2 ] ) } function _emscripten_glVertexAttribIPointer ( index , size , type , stride , ptr ) { GLctx [ " vertexAttribIPointer " ] ( index , size , type , stride , ptr ) } function _emscripten_glVertexAttribPointer ( index , size , type , normalized , stride , ptr ) { GLctx . vertexAttribPointer ( index , size , type , ! ! normalized , stride , ptr ) } function _emscripten_glViewport ( x0 , x1 , x2 , x3 ) { GLctx [ " viewport " ] ( x0 , x1 , x2 , x3 ) } function _emscripten_glWaitSync ( sync , flags , timeoutLo , timeoutHi ) { timeoutLo = timeoutLo > > > 0 ; timeoutHi = timeoutHi > > > 0 ; var timeout = timeoutLo = = 4294967295 & & timeoutHi = = 4294967295 ? - 1 : makeBigInt ( timeoutLo , timeoutHi , true ) ; GLctx . waitSync ( GL . syncs [ sync ] , flags , timeout ) } function abortOnCannotGrowMemory ( requestedSize ) { abort ( " OOM " ) } function emscripten_realloc_buffer ( size ) { var PAGE_MULTIPLE = 65536 ; size = alignUp ( size , PAGE_MULTIPLE ) ; var old = Module [ " buffer " ] ; var oldSize = old . byteLength ; try { var result = wasmMemory . grow ( ( size - oldSize ) / 65536 ) ; if ( result ! = = ( - 1 | 0 ) ) { return Module [ " buffer " ] = wasmMemory . buffer } else { return null } } catch ( e ) { return null } } function _emscripten_resize_heap ( requestedSize ) { var oldSize = _emscripten_get_heap_size ( ) ; var PAGE_MULTIPLE = 65536 ; var LIMIT = 2147483648 - PAGE_MULTIPLE ; if ( requestedSize > LIMIT ) { return false } var MIN_TOTAL_MEMORY = 16777216 ; var newSize = Math . max ( oldSize , MIN_TOTAL_MEMORY ) ; while ( newSize < requestedSize ) { if ( newSize < = 536870912 ) { newSize = alignUp ( 2 * newSize , PAGE_MULTIPLE ) } else { newSize = Math . min ( alignUp ( ( 3 * newSize + 2147483648 ) / 4 , PAGE_MULTIPLE ) , LIMIT ) } } var replacement = emscripten_realloc_buffer ( newSize ) ; if ( ! replacement | | replacement . byteLength ! = newSize ) { return false } updateGlobalBuffer ( replacement ) ; updateGlobalBufferViews ( ) ; TOTAL_MEMORY = newSize ; HEAPU32 [ DYNAMICTOP_PTR > > 2 ] = requestedSize ; return true } function _glActiveTexture ( x0 ) { GLctx [ " activeTexture " ] ( x0 ) } function _glAttachShader ( program , shader ) { GLctx . attachShader ( GL . programs [ program ] , GL . shaders [ shader ] ) } function _glBindBuffer ( target , buffer ) { if ( target = = 35051 ) { GLctx . currentPixelPackBufferBinding = buffer } else if ( target = = 35052 ) { GLctx . currentPixelUnpackBufferBinding = buffer } GLctx . bindBuffer ( target , GL . buffers [ buffer ] ) } function _glBindBufferRange ( target , index , buffer , offset , ptrsize ) { GLctx [ " bindBufferRange " ] ( target , index , GL . buffers [ buffer ] , offset , ptrsize ) } function _glBindFramebuffer ( target , framebuffer ) { GLctx . bindFramebuffer ( target , GL . framebuffers [ framebuffer ] ) } function _glBindRenderbuffer ( target , renderbuffer ) { GLctx . bindRenderbuffer ( target , GL . renderbuffers [ renderbuffer ] ) } function _glBindSampler ( unit , sampler ) { GLctx [ " bindSampler " ] ( unit , GL . samplers [ sampler ] ) } function _glBindTexture ( target , texture ) { GLctx . bindTexture ( target , GL . textures [ texture ] ) } function _glBindVertexArray ( vao ) { GLctx [ " bindVertexArray " ] ( GL . vaos [ vao ] ) } function _glBlendEquationSeparate ( x0 , x1 ) { GLctx [ " blendEquationSeparate " ] ( x0 , x1 ) } function _glBlendFuncSeparate ( x0 , x1 , x2 , x3 ) { GLctx [ " blendFuncSeparate " ] ( x0 , x1 , x2 , x3 ) } function _glBlitFramebuffer ( x0 , x1 , x2 , x3 , x4 , x5 , x6 , x7 , x8 , x9 ) { GLctx [ " blitFramebuffer " ] ( x0 , x1 , x2 , x3 , x4 , x5 , x6 , x7 , x8 , x9 ) } function _glBufferData ( target , size , data , usage ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { if ( data ) { GLctx . bufferData ( target , HEAPU8 , usage , data , size ) } else { GLctx . bufferData ( target , size , usage ) } } else { GLctx . bufferData ( target , data ? HEAPU8 . subarray ( data , data + size ) : size , usage ) } } function _glBufferSubData ( target , offset , size , data ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { GLctx . bufferSubData ( target , offset , HEAPU8 , data , size ) ; return } GLctx . bufferSubData ( target , offset , HEAPU8 . subarray ( data , data + size ) ) } function _glClear ( x0 ) { GLctx [ " clear " ] ( x0 ) } function _glClearColor ( x0 , x1 , x2 , x3 ) { GLctx [ " clearColor " ] ( x0 , x1 , x2 , x3 ) } function _glClearDepthf ( x0 ) { GLctx [ " clearDepth " ] ( x0 ) } function _glClearStencil ( x0 ) { GLctx [ " clearStencil " ] ( x0 ) } function _glColorMask ( red , green , blue , alpha ) { GLctx . colorMask ( ! ! red , ! ! green , ! ! blue , ! ! alpha ) } function _glCompileShader ( shader ) { GLctx . compileShader ( GL . shaders [ shader ] ) } function _glCompressedTexSubImage2D ( target , level , xoffset , yoffset , width , height , format , imageSize , data ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { if ( GLctx . currentPixelUnpackBufferBinding ) { GLctx [ " compressedTexSubImage2D " ] ( target , level , xoffset , yoffset , width , height , format , imageSize , data ) } else { GLctx [ " compressedTexSubImage2D " ] ( target , level , xoffset , yoffset , width , height , format , HEAPU8 , data , imageSize ) } return } GLctx [ " compressedTexSubImage2D " ] ( target , level , xoffset , yoffset , width , height , format , data ? HEAPU8 . subarray ( data , data + imageSize ) : null ) } function _glCompressedTexSubImage3D ( target , level , xoffset , yoffset , zoffset , width , height , depth , format , imageSize , data ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { if ( GLctx . currentPixelUnpackBufferBinding ) { GLctx [ " compressedTexSubImage3D " ] ( target , level , xoffset , yoffset , zoffset , width , height , depth , format , imageSize , data ) } else { GLctx [ " compressedTexSubImage3D " ] ( target , level , xoffset , yoffset , zoffset , width , height , depth , format , HEAPU8 , data , imageSize ) } } else { GLctx [ " compressedTexSubImage3D " ] ( target , level , xoffset , yoffset , zoffset , width , height , depth , format , data ? HEAPU8 . subarray ( data , data + imageSize ) : null ) } } function _glCreateProgram ( ) { var id = GL . getNewId ( GL . programs ) ; var program = GLctx . createProgram ( ) ; program . name = id ; GL . programs [ id ] = program ; return id } function _glCreateShader ( shaderType ) { var id = GL . getNewId ( GL . shaders ) ; GL . shaders [ id ] = GLctx . createShader ( shaderType ) ; return id } function _glCullFace ( x0 ) { GLctx [ " cullFace " ] ( x0 ) } function _glDeleteBuffers ( n , buffers ) { for ( var i = 0 ; i < n ; i + + ) { var id = HEAP32 [ buffers + i * 4 > > 2 ] ; var buffer = GL . buffers [ id ] ; if ( ! buffer ) continue ; GLctx . deleteBuffer ( buffer ) ; buffer . name = 0 ; GL . buffers [ id ] = null ; if ( id = = GL . currArrayBuffer ) GL . currArrayBuffer = 0 ; if ( id = = GL . currElementArrayBuffer ) GL . currElementArrayBuffer = 0 ; if ( id = = GLctx . currentPixelPackBufferBinding ) GLctx . currentPixelPackBufferBinding = 0 ; if ( id = = GLctx . currentPixelUnpackBufferBinding ) GLctx . currentPixelUnpackBufferBinding = 0 } } function _glDeleteFramebuffers ( n , framebuffers ) { for ( var i = 0 ; i < n ; + + i ) { var id = HEAP32 [ framebuffers + i * 4 > > 2 ] ; var framebuffer = GL . framebuffers [ id ] ; if ( ! framebuffer ) continue ; GLctx . deleteFramebuffer ( framebuffer ) ; framebuffer . name = 0 ; GL . framebuffers [ id ] = null } } function _glDeleteProgram ( id ) { if ( ! id ) return ; var program = GL . programs [ id ] ; if ( ! program ) { GL . recordError ( 1281 ) ; return } GLctx . deleteProgram ( program ) ; program . name = 0 ; GL . programs [ id ] = null ; GL . programInfos [ id ] = null } function _glDeleteRenderbuffers ( n , renderbuffers ) { for ( var i = 0 ; i < n ; i + + ) { var id = HEAP32 [ renderbuffers + i * 4 > > 2 ] ; var renderbuffer = GL . renderbuffers [ id ] ; if ( ! renderbuffer ) continue ; GLctx . deleteRenderbuffer ( renderbuffer ) ; renderbuffer . name = 0 ; GL . renderbuffers [ id ] = null } } function _glDeleteSamplers ( n , samplers ) { for ( var i = 0 ; i < n ; i + + ) { var id = HEAP32 [ samplers + i * 4 > > 2 ] ; var sampler = GL . samplers [ id ] ; if ( ! sampler ) continue ; GLctx [ " deleteSampler " ] ( sampler ) ; sampler . name = 0 ; GL . samplers [ id ] = null } } function _glDeleteShader ( id ) { if ( ! id ) return ; var shader = GL . shaders [ id ] ; if ( ! shader ) { GL . recordError ( 1281 ) ; return } GLctx . deleteShader ( shader ) ; GL . shaders [ id ] = null } function _glDeleteSync ( id ) { if ( ! id ) return ; var sync = GL . syncs [ id ] ; if ( ! sync ) { GL . recordError ( 1281 ) ; return } GLctx . deleteSync ( sync ) ; sync . name = 0 ; GL . syncs [ id ] = null } function _glDeleteTextures ( n , textures ) { for ( var i = 0 ; i < n ; i + + ) { var id = HEAP32 [ textures + i * 4 > > 2 ] ; var texture = GL . textures [ id ] ; if ( ! texture ) continue ; GLctx . deleteTexture ( texture ) ; texture . name = 0 ; GL . textures [ id ] = null } } function _glDeleteVertexArrays ( n , vaos ) { for ( var i = 0 ; i < n ; i + + ) { var id = HEAP32 [ vaos + i * 4 > > 2 ] ; GLctx [ " deleteVertexArray " ] ( GL . vaos [ id ] ) ; GL . vaos [ id ] = null } } function _glDepthFunc ( x0 ) { GLctx [ " depthFunc " ] ( x0 ) } function _glDepthMask ( flag ) { GLctx . depthMask ( ! ! flag ) } function _glDetachShader ( program , shader ) { GLctx . detachShader ( GL . programs [ program ] , GL . shaders [ shader ] ) } function _glDisable ( x0 ) { GLctx [ " disable " ] ( x0 ) } function _glDisableVertexAttribArray ( index ) { GLctx . disableVertexAttribArray ( index ) } function _glDrawArrays ( mode , first , count ) { GLctx . drawArrays ( mode , first , count ) } function _glDrawRangeElements ( mode , start , end , count , type , indices ) { _glDrawElements ( mode , count , type , indices ) } function _glEnable ( x0 ) { GLctx [ " enable " ] ( x0 ) } function _glEnableVertexAttribArray ( index ) { GLctx . enableVertexAttribArray ( index ) } function _glFenceSync ( condition , flags ) { var sync = GLctx . fenceSync ( condition , flags ) ; if ( sync ) { var id = GL . getNewId ( GL . syncs ) ; sync . name = id ; GL . syncs [ id ] = sync ; return id } else { return 0 } } function _glFlush ( ) { GLctx [ " flush " ] ( ) } function _glFramebufferRenderbuffer ( target , attachment , renderbuffertarget , renderbuffer ) { GLctx . framebufferRenderbuffer ( target , attachment , renderbuffertarget , GL . renderbuffers [ renderbuffer ] ) } function _glFramebufferTexture2D ( target , attachment , textarget , texture , level ) { GLctx . framebufferTexture2D ( target , attachment , textarget , GL . textures [ texture ] , level ) } function _glFramebufferTextureLayer ( target , attachment , texture , level , layer ) { GLctx . framebufferTextureLayer ( target , attachment , GL . textures [ texture ] , level , layer ) } function _glFrontFace ( x0 ) { GLctx [ " frontFace " ] ( x0 ) } function _glGenBuffers ( n , buffers ) { __glGenObject ( n , buffers , " createBuffer " , GL . buffers ) } function _glGenFramebuffers ( n , ids ) { __glGenObject ( n , ids , " createFramebuffer " , GL . framebuffers ) } function _glGenRenderbuffers ( n , renderbuffers ) { __glGenObject ( n , renderbuffers , " createRenderbuffer " , GL . renderbuffers ) } function _glGenSamplers ( n , samplers ) { __glGenObject ( n , samplers , " createSampler " , GL . samplers ) } function _glGenTextures ( n , textures ) { __glGenObject ( n , textures , " createTexture " , GL . textures ) } function _glGenVertexArrays ( n , arrays ) { __glGenObject ( n , arrays , " createVertexArray " , GL . vaos ) } function _glGenerateMipmap ( x0 ) { GLctx [ " generateMipmap " ] ( x0 ) } function _glGetError ( ) { if ( GL . lastError ) { var error = GL . lastError ; GL . lastError = 0 ; return error } else { return GLctx . getError ( ) } } function _glGetFloatv ( name_ , p ) { emscriptenWebGLGet ( name_ , p , " Float " ) } function _glGetIntegerv ( name_ , p ) { emscriptenWebGLGet ( name_ , p , " Integer " ) } function _glGetProgramInfoLog ( program , maxLength , length , infoLog ) { var log = GLctx . getProgramInfoLog ( GL . programs [ program ] ) ; if ( log = = = null ) log = " ( unknown error ) " ; if ( maxLength > 0 & & infoLog ) { var numBytesWrittenExclNull = stringToUTF8 ( log , infoLog , maxLength ) ; if ( length ) HEAP32 [ length > > 2 ] = numBytesWrittenExclNull } else { if ( length ) HEAP32 [ length > > 2 ] = 0 } } function _glGetProgramiv ( program , pname , p ) { if ( ! p ) { GL . recordError ( 1281 ) ; return } if ( program > = GL . counter ) { GL . recordError ( 1281 ) ; return } var ptable = GL . programInfos [ program ] ; if ( ! ptable ) { GL . recordError ( 1282 ) ; return } if ( pname = = 35716 ) { var log = GLctx . getProgramInfoLog ( GL . programs [ program ] ) ; if ( log = = = null ) log = " ( unknown error ) " ; HEAP32 [ p > > 2 ] = log . length + 1 } else if ( pname = = 35719 ) { HEAP32 [ p > > 2 ] = ptable . maxUniformLength } else if ( pname = = 35722 ) { if ( ptable . maxAttributeLength = = - 1 ) { program = GL . programs [ program ] ; var numAttribs = GLctx . getProgramParameter ( program , 35721 ) ; ptable . maxAttributeLength = 0 ; for ( var i = 0 ; i < numAttribs ; + + i ) { var activeAttrib = GLctx . getActiveAttrib ( program , i ) ; ptable . maxAttributeLength = Math . max ( ptable . maxAttributeLength , activeAttrib . name . length + 1 ) } } HEAP32 [ p > > 2 ] = ptable . maxAttributeLength } else if ( pname = = 35381 ) { if ( ptable . maxUniformBlockNameLength = = - 1 ) { program = GL . programs [ program ] ; var numBlocks = GLctx . getProgramParameter ( program , 35382 ) ; ptable . maxUniformBlockNameLength = 0 ; for ( var i = 0 ; i < numBlocks ; + + i ) { var activeBlockName = GLctx . getActiveUniformBlockName ( program , i ) ; ptable . maxUniformBlockNameLength = Math . max ( ptable . maxUniformBlockNameLength , activeBlockName . length + 1 ) } } HEAP32 [ p > > 2 ] = ptable . maxUniformBlockNameLength } else { HEAP32 [ p > > 2 ] = GLctx . getProgramParameter ( GL . programs [ program ] , pname ) } } function _glGetShaderInfoLog ( shader , maxLength , length , infoLog ) { var log = GLctx . getShaderInfoLog ( GL . shaders [ shader ] ) ; if ( log = = = null ) log = " ( unknown error ) " ; if ( maxLength > 0 & & infoLog ) { var numBytesWrittenExclNull = stringToUTF8 ( log , infoLog , maxLength ) ; if ( length ) HEAP32 [ length > > 2 ] = numBytesWrittenExclNull } else { if ( length ) HEAP32 [ length > > 2 ] = 0 } } function _glGetShaderiv ( shader , pname , p ) { if ( ! p ) { GL . recordError ( 1281 ) ; return } if ( pname = = 35716 ) { var log = GLctx . getShaderInfoLog ( GL . shaders [ shader ] ) ; if ( log = = = null ) log = " ( unknown error ) " ; HEAP32 [ p > > 2 ] = log . length + 1 } else if ( pname = = 35720 ) { var source = GLctx . getShaderSource ( GL . shaders [ shader ] ) ; var sourceLength = source = = = null | | source . length = = 0 ? 0 : source . length + 1 ; HEAP32 [ p > > 2 ] = sourceLength } else { HEAP32 [ p > > 2 ] = GLctx . getShaderParameter ( GL . shaders [ shader ] , pname ) } } function _glGetString ( name_ ) { if ( GL . stringCache [ name_ ] ) return GL . stringCache [ name_ ] ; var ret ; switch ( name_ ) { case 7939 : var exts = GLctx . getSupportedExtensions ( ) ; var gl_exts = [ ] ; for ( var i = 0 ; i < exts . length ; + + i ) { gl_exts . push ( exts [ i ] ) ; gl_exts . push ( " GL_ " + exts [ i ] ) } ret = stringToNewUTF8 ( gl_exts . join ( " " ) ) ; break ; case 7936 : case 7937 : case 37445 : case 37446 : var s = GLctx . getParameter ( name_ ) ; if ( ! s ) { GL . recordError ( 1280 ) } ret = stringToNewUTF8 ( s ) ; break ; case 7938 : var glVersion = GLctx . getParameter ( GLctx . VERSION ) ; if ( GL . currentContext . version > = 2 ) glVersion = " OpenGL ES 3 . 0 ( " + glVersion + " ) " ; else { glVersion = " OpenGL ES 2 . 0 ( " + glVersion + " ) " } ret = stringToNewUTF8 ( glVersion ) ; break ; case 35724 : var glslVersion = GLctx . getParameter ( GLctx . SHADING_LANGUAGE_VERSION ) ; var ver_re = / ^ WebGL GLSL ES ( [ 0 - 9 ] \ . [ 0 - 9 ] [ 0 - 9 ] ? ) ( ? : $ | . * ) / ; var ver_num = glslVersion . match ( ver_re ) ; if ( ver_num ! = = null ) { if ( ver_num [ 1 ] . length = = 3 ) ver_num [ 1 ] = ver_num [ 1 ] + " 0 " ; glslVersion = " OpenGL ES GLSL ES " + ver_num [ 1 ] + " ( " + glslVersion + " ) " } ret = stringToNewUTF8 ( glslVersion ) ; break ; default : GL . recordError ( 1280 ) ; return 0 } GL . stringCache [ name_ ] = ret ; return ret } function _glGetStringi ( name , index ) { if ( GL . currentContext . version < 2 ) { GL . recordError ( 1282 ) ; return 0 } var stringiCache = GL . stringiCache [ name ] ; if ( stringiCache ) { if ( index < 0 | | index > = stringiCache . length ) { GL . recordError ( 1281 ) ; return 0 } return stringiCache [ index ] } switch ( name ) { case 7939 : var exts = GLctx . getSupportedExtensions ( ) ; var gl_exts = [ ] ; for ( var i = 0 ; i < exts . length ; + + i ) { gl_exts . push ( stringToNewUTF8 ( exts [ i ] ) ) ; gl_exts . push ( stringToNewUTF8 ( " GL_ " + exts [ i ] ) ) } stringiCache = GL . stringiCache [ name ] = gl_exts ; if ( index < 0 | | index > = stringiCache . length ) { GL . recordError ( 1281 ) ; return 0 } return stringiCache [ index ] ; default : GL . recordError ( 1280 ) ; return 0 } } function _glGetUniformBlockIndex ( program , uniformBlockName ) { return GLctx [ " getUniformBlockIndex " ] ( GL . programs [ program ] , UTF8ToString ( uniformBlockName ) ) } function _glGetUniformLocation ( program , name ) { name = UTF8ToString ( name ) ; var arrayIndex = 0 ; if ( name [ name . length - 1 ] = = " ] " ) { var leftBrace = name . lastIndexOf ( " [ " ) ; arrayIndex = name [ leftBrace + 1 ] ! = " ] " ? parseInt ( name . slice ( leftBrace + 1 ) ) : 0 ; name = name . slice ( 0 , leftBrace ) } var uniformInfo = GL . programInfos [ program ] & & GL . programInfos [ program ] . uniforms [ name ] ; if ( uniformInfo & & arrayIndex > = 0 & & arrayIndex < uniformInfo [ 0 ] ) { return uniformInfo [ 1 ] + arrayIndex } else { return - 1 } } function _glGetVertexAttribiv ( index , pname , params ) { emscriptenWebGLGetVertexAttrib ( index , pname , params , " FloatToInteger " ) } function _glHint ( x0 , x1 ) { GLctx [ " hint " ] ( x0 , x1 ) } function _glInvalidateFramebuffer ( target , numAttachments , attachments ) { var list = __tempFixedLengthArray [ numAttachments ] ; for ( var i = 0 ; i < numAttachments ; i + + ) { list [ i ] = HEAP32 [ attachments + i * 4 > > 2 ] } GLctx [ " invalidateFramebuffer " ] ( target , list ) } function _glInvalidateSubFramebuffer ( target , numAttachments , attachments , x , y , width , height ) { var list = __tempFixedLengthArray [ numAttachments ] ; for ( var i = 0 ; i < numAttachments ; i + + ) { list [ i ] = HEAP32 [ attachments + i * 4 > > 2 ] } GLctx [ " invalidateSubFramebuffer " ] ( target , list , x , y , width , height ) } function _glIsEnabled ( x0 ) { return GLctx [ " isEnabled " ] ( x0 ) } function _glLinkProgram ( program ) { GLctx . linkProgram ( GL . programs [ program ] ) ; GL . populateUniformTable ( program ) } function _glPixelStorei ( pname , param ) { if ( pname = = 3317 ) { GL . unpackAlignment = param } GLctx . pixelStorei ( pname , param ) } function _glPolygonOffset ( x0 , x1 ) { GLctx [ " polygonOffset " ] ( x0 , x1 ) } function _glReadPixels ( x , y , width , height , format , type , pixels ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { if ( GLctx . currentPixelPackBufferBinding ) { GLctx . readPixels ( x , y , width , height , format , type , pixels ) } else { GLctx . readPixels ( x , y , width , height , format , type , __heapObjectForWebGLType ( type ) , pixels > > ( __heapAccessShiftForWebGLType [ type ] | 0 ) ) } return } var pixelData = emscriptenWebGLGetTexPixelData ( type , format , width , height , pixels , format ) ; if ( ! pixelData ) { GL . recordError ( 1280 ) ; return } GLctx . readPixels ( x , y , width , height , format , type , pixelData ) } function _glRenderbufferStorage ( x0 , x1 , x2 , x3 ) { GLctx [ " renderbufferStorage " ] ( x0 , x1 , x2 , x3 ) } function _glRenderbufferStorageMultisample ( x0 , x1 , x2 , x3 , x4 ) { GLctx [ " renderbufferStorageMultisample " ] ( x0 , x1 , x2 , x3 , x4 ) } function _glSamplerParameteri ( sampler , pname , param ) { GLctx [ " samplerParameteri " ] ( GL . samplers [ sampler ] , pname , param ) } function _glScissor ( x0 , x1 , x2 , x3 ) { GLctx [ " scissor " ] ( x0 , x1 , x2 , x3 ) } function _glShaderSource ( shader , count , string , length ) { var source = GL . getSource ( shader , count , string , length ) ; GLctx . shaderSource ( GL . shaders [ shader ] , source ) } function _glTexParameteri ( x0 , x1 , x2 ) { GLctx [ " texParameteri " ] ( x0 , x1 , x2 ) } function _glTexStorage2D ( x0 , x1 , x2 , x3 , x4 ) { GLctx [ " texStorage2D " ] ( x0 , x1 , x2 , x3 , x4 ) } function _glTexStorage2DMultisample ( ) { err ( " missing function : glTexStorage2DMultisample " ) ; abort ( - 1 ) } function _glTexStorage3D ( x0 , x1 , x2 , x3 , x4 , x5 ) { GLctx [ " texStorage3D " ] ( x0 , x1 , x2 , x3 , x4 , x5 ) } function _glTexSubImage2D ( target , level , xoffset , yoffset , width , height , format , type , pixels ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { if ( GLctx . currentPixelUnpackBufferBinding ) { GLctx . texSubImage2D ( target , level , xoffset , yoffset , width , height , format , type , pixels ) } else if ( pixels ! = 0 ) { GLctx . texSubImage2D ( target , level , xoffset , yoffset , width , height , format , type , __heapObjectForWebGLType ( type ) , pixels > > ( __heapAccessShiftForWebGLType [ type ] | 0 ) ) } else { GLctx . texSubImage2D ( target , level , xoffset , yoffset , width , height , format , type , null ) } return } var pixelData = null ; if ( pixels ) pixelData = emscriptenWebGLGetTexPixelData ( type , format , width , height , pixels , 0 ) ; GLctx . texSubImage2D ( target , level , xoffset , yoffset , width , height , format , type , pixelData ) } function _glTexSubImage3D ( target , level , xoffset , yoffset , zoffset , width , height , depth , format , type , pixels ) { if ( GLctx . currentPixelUnpackBufferBinding ) { GLctx [ " texSubImage3D " ] ( target , level , xoffset , yoffset , zoffset , width , height , depth , format , type , pixels ) } else if ( pixels ! = 0 ) { GLctx [ " texSubImage3D " ] ( target , level , xoffset , yoffset , zoffset , width , height , depth , format , type , __heapObjectForWebGLType ( type ) , pixels > > ( __heapAccessShiftForWebGLType [ type ] | 0 ) ) } else { GLctx [ " texSubImage3D " ] ( target , level , xoffset , yoffset , zoffset , width , height , depth , format , type , null ) } } function _glUniform1f ( location , v0 ) { GLctx . uniform1f ( GL . uniforms [ location ] , v0 ) } function _glUniform1i ( location , v0 ) { GLctx . uniform1i ( GL . uniforms [ location ] , v0 ) } function _glUniform4f ( location , v0 , v1 , v2 , v3 ) { GLctx . uniform4f ( GL . uniforms [ location ] , v0 , v1 , v2 , v3 ) } function _glUniformBlockBinding ( program , uniformBlockIndex , uniformBlockBinding ) { program = GL . programs [ program ] ; GLctx [ " uniformBlockBinding " ] ( program , uniformBlockIndex , uniformBlockBinding ) } function _glUseProgram ( program ) { GLctx . useProgram ( GL . programs [ program ] ) } function _glVertexAttribIPointer ( index , size , type , stride , ptr ) { GLctx [ " vertexAttribIPointer " ] ( index , size , type , stride , ptr ) } function _glVertexAttribPointer ( index , size , type , normalized , stride , ptr ) { GLctx . vertexAttribPointer ( index , size , type , ! ! normalized , stride , ptr ) } function _glViewport ( x0 , x1 , x2 , x3 ) { GLctx [ " viewport " ] ( x0 , x1 , x2 , x3 ) } function _glWaitSync ( sync , flags , timeoutLo , timeoutHi ) { timeoutLo = timeoutLo > > > 0 ; timeoutHi = timeoutHi > > > 0 ; var timeout = timeoutLo = = 4294967295 & & timeoutHi = = 4294967295 ? - 1 : makeBigInt ( timeoutLo , timeoutHi , true ) ; GLctx . waitSync ( GL . syncs [ sync ] , flags , timeout ) } function _llvm_exp2_f32 ( x ) { return Math . pow ( 2 , x ) } function _llvm_exp2_f64 ( a0 ) { return _llvm_exp2_f32 ( a0 ) } function _llvm_log2_f32 ( x ) { return Math . log ( x ) / Math . LN2 } function _llvm_trap ( ) { abort ( " trap ! " ) } function _emscripten_memcpy_big ( dest , src , num ) { HEAPU8 . set ( HEAPU8 . subarray ( src , src + num ) , dest ) } function _pthread_cond_destroy ( ) { return 0 } function _pthread_cond_signal ( ) { return 0 } function _pthread_cond_timedwait ( ) { return 0 } function _pthread_cond_wait ( ) { return 0 } function _pthread_create ( ) { return 11 } function _pthread_join ( ) { } function _sysconf ( name ) { switch ( name ) { case 30 : return PAGE_SIZE ; case 85 : var maxHeapSize = 2 * 1024 * 1024 * 1024 - 65536 ; return maxHeapSize / PAGE_SIZE ; case 132 : case 133 : case 12 : case 137 : case 138 : case 15 : case 235 : case 16 : case 17 : case 18 : case 19 : case 20 : case 149 : case 13 : case 10 : case 236 : case 153 : case 9 : case 21 : case 22 : case 159 : case 154 : case 14 : case 77 : case 78 : case 139 : case 80 : case 81 : case 82 : case 68 : case 67 : case 164 : case 11 : case 29 : case 47 : case 48 : case 95 : case 52 : case 51 : case 46 : return 200809 ; case 79 : return 0 ; case 27 : case 246 : case 127 : case 128 : case 23 : case 24 : case 160 : case 161 : case 181 : case 182 : case 242 : case 183 : case 184 : case 243 : case 244 : case 245 : case 165 : case 178 : case 179 : case 49 : case 50 : case 168 : case 169 : case 175 : case 170 : case 171 : case 172 : case 97 : case 76 : case 32 : case 173 : case 35 : return - 1 ; case 176 : case 177 : case 7 : case 155 : case 8 : case 157 : case 125 : case 126 : case 92 : case 93 : case 129 : case 130 : case 131 : case 94 : case 91 : return 1 ; case 74 : case 60 : case 69 : case 70 : case 4 : return 1024 ; case 31 : case 42 : case 72 : return 32 ; case 87 : case 26 : case 33 : return 2147483647 ; case 34 : case 1 : return 47839 ; case 38 : case 36 : return 99 ; case 43 : case 37 : return 2048 ; case 0 : return 2097152 ; case 3 : return 65536 ; case 28 : return 32768 ; case 44 : return 32767 ; case 75 : return 16384 ; case 39 : return 1e3 ; case 89 : return 700 ; case 71 : return 256 ; case 40 : return 255 ; case 2 : return 100 ; case 180 : return 64 ; case 25 : return 20 ; case 5 : return 16 ; case 6 : return 6 ; case 73 : return 4 ; case 84 : { if ( typeof navigator = = = " object " ) return navigator [ " hardwareConcurrency " ] | | 1 ; return 1 } } ___setErrNo ( 22 ) ; return - 1 } FS . staticInit ( ) ; if ( ENVIRONMENT_IS_NODE ) { var fs = require ( " fs " ) ; var NODEJS_PATH = require ( " path " ) ; NODEFS . staticInit ( ) } InternalError = Module [ " InternalError " ] = extendError ( Error , " InternalError " ) ; embind_init_charCodes ( ) ; BindingError = Module [ " BindingError " ] = extendError ( Error , " BindingError " ) ; init_ClassHandle ( ) ; init_RegisteredPointer ( ) ; init_embind ( ) ; UnboundTypeError = Module [ " UnboundTypeError " ] = extendError ( Error , " UnboundTypeError " ) ; init_emval ( ) ; if ( ENVIRONMENT_IS_NODE ) { _emscripten_get_now = function _emscripten_get_now_actual ( ) { var t = process [ " hrtime " ] ( ) ; return t [ 0 ] * 1e3 + t [ 1 ] / 1e6 } } else if ( typeof dateNow ! = = " undefined " ) { _emscripten_get_now = dateNow } else if ( typeof self = = = " object " & & self [ " performance " ] & & typeof self [ " performance " ] [ " now " ] = = = " function " ) { _emscripten_get_now = function ( ) { return self [ " performance " ] [ " now " ] ( ) } } else if ( typeof performance = = = " object " & & typeof performance [ " now " ] = = = " function " ) { _emscripten_get_now = function ( ) { return performance [ " now " ] ( ) } } else { _emscripten_get_now = Date . now } var GLctx ; GL . init ( ) ; for ( var i = 0 ; i < 32 ; i + + ) __tempFixedLengthArray . push ( new Array ( i ) ) ; var ASSERTIONS = false ; function intArrayFromString ( stringy , dontAddNull , length ) { var len = length > 0 ? length : lengthBytesUTF8 ( stringy ) + 1 ; var u8array = new Array ( len ) ; var numBytesWritten = stringToUTF8Array ( stringy , u8array , 0 , u8array . length ) ; if ( dontAddNull ) u8array . length = numBytesWritten ; return u8array } var asmGlobalArg = { } ; var asmLibraryArg = { " abort " : abort , " setTempRet0 " : setTempRet0 , " getTempRet0 " : getTempRet0 , " ClassHandle " : ClassHandle , " ClassHandle_clone " : ClassHandle_clone , " ClassHandle_delete " : ClassHandle_delete , " ClassHandle_deleteLater " : ClassHandle_deleteLater , " ClassHandle_isAliasOf " : ClassHandle_isAliasOf , " ClassHandle_isDeleted " : ClassHandle_isDeleted , " RegisteredClass " : RegisteredClass , " RegisteredPointer " : RegisteredPointer , " RegisteredPointer_deleteObject " : RegisteredPointer_deleteObject , " RegisteredPointer_destructor " : RegisteredPointer_destructor , " RegisteredPointer_fromWireType " : RegisteredPointer_fromWireType , " RegisteredPointer_getPointee " : RegisteredPointer_getPointee , " __ZSt18uncaught_exceptionv " : __ZSt18uncaught_exceptionv , " ___atomic_compare_exchange_8 " : ___atomic_compare_exchange_8 , " ___cxa_begin_catch " : ___cxa_begin_catch , " ___cxa_find_matching_catch " : ___cxa_find_matching_catch , " ___cxa_free_exception " : ___cxa_free_exception , " ___cxa_pure_virtual " : ___cxa_pure_virtual , " ___gxx_personality_v0 " : ___gxx_personality_v0 , " ___resumeException " : ___resumeException , " ___setErrNo " : ___setErrNo , " ___syscall140 " : ___syscall140 , " ___syscall146 " : ___syscall146 , " ___syscall221 " : ___syscall221 , " ___syscall3 " : ___syscall3 , " ___syscall5 " : ___syscall5 , " ___syscall54 " : ___syscall54 , " ___syscall6 " : ___syscall6 , " __computeUnpackAlignedImageSize " : __computeUnpackAlignedImageSize , " __embind_finalize_value_array " : __embind_finalize_value_array , " __embind_finalize_value_object " : __embind_finalize_value_object , " __embind_register_bool " : __embind_register_bool , " __embind_register_class " : __embind_register_class , " __embind_register_class_class_function " : __embind_register_class_class_function , " __embind_register_class_constructor " : __embind_register_class_constructor , " __embind_register_class_function " : __embind_register_class_function , " __embind_register_class_property " : __embind_register_class_property , " __embind_register_emval " : __embind_register_emval , " __embind_register_enum " : __embind_register_enum , " __embind_register_enum_value " : __embind_register_enum_value , " __embind_register_float " : __embind_register_float , " __embind_register_function " : __embind_register_function , " __embind_register_integer " : __embind_register_integer , " __embind_register_memory_view " : __embind_register_memory_view , " __embind_register_std_string " : __embind_register_std_string , " __embind_register_std_wstring " : __embind_register_std_wstring , " __embind_register_value_array " : __embind_register_value_array , " __embind_register_value_array_element " : __embind_register_value_array_element , " __embind_register_value_object " : __embind_register_value_object , " __embind_register_value_object_field " : __embind_register_value_object_field , " __embind_register_void " : __embind_register_void , " __emval_as " : __emval_as , " __emval_decref " : __emval_decref , " __emval_get_property " : __emval_get_property , " __emval_incref " : __emval_incref , " __emval_new_cstring " : __emval_new_cstring , " __emval_register " : __emval_register , " __emval_run_destructors " : __emval_run_destructors , " __emval_take_value " : __emval_take_value , " __glGenObject " : __glGenObject , " __heapObjectForWebGLType " : __heapObjectForWebGLType , " _abort " : _abort , " _clock_gettime " : _clock_gettime , " _embind_repr " : _embind_repr , " _emscripten_get_heap_size " : _emscripten_get_heap_size , " _emscripten_get_now " : _emscripten_get_now , " _emscripten_get_now_is_monotonic " : _emscripten_get_now_is_monotonic , " _emscripten_glActiveTexture " : _emscripten_glActiveTexture , " _emscripten_glAttachShader " : _emscripten_glAttachShader , " _emscripten_glBeginQuery " : _emscripten_glBeginQuery , " _emscripten_glBeginQueryEXT " : _emscripten_glBeginQueryEXT , " _emscripten_glBeginTransformFeedback " : _emscripten_glBeginTransformFeedback , " _emscripten_glBindAttribLocation " : _emscripten_glBindAttribLocation , " _emscripten_glBindBuffer " : _emscripten_glBindBuffer , " _emscripten_glBindBufferBase " : _emscripten_glBindBufferBase , " _emscripten_glBindBufferRange " : _emscripten_glBindBufferRange , " _emscripten_glBindFramebuffer " : _emscripten_glBindFramebuffer , " _emscripten_glBindRenderbuffer " : _emscripten_glBindRenderbuffer , " _emscripten_glBindSampler " : _emscripten_glBindSampler , " _emscripten_glBindTexture " : _emscripten_glBindTexture , " _emscripten_glBindTransformFeedback " : _emscripten_glBindTransformFeedback , " _emscripten_glBindVertexArray " : _emscripten_glBindVertexArray , " _emscripten_glBindVertexArrayOES " : _emscripten_glBindVertexArrayOES , " _emscripten_glBlendColor " : _emscripten_glBlendColor , " _emscripten_glBlendEquation " : _emscripten_glBlendEquation , " _emscripten_glBlendEquationSeparate " : _emscripten_glBlendEquationSeparate , " _emscripten_glBlendFunc " : _emscripten_glBlendFunc , " _emscripten_glBlendFuncSeparate " : _emscripten_glBlendFuncSeparate , " _emscripten_glBlitFramebuffer " : _emscripten_glBlitFramebuffer , " _emscripten_glBufferData " : _emscripten_glBufferData , " _emscripten_glBufferSubData " : _emscripten_glBufferSubData , " _emscripten_glCheckFramebufferStatus " : _emscripten_glCheckFramebufferStatus , " _emscripten_glClear " : _emscripten_glClear , " _emscripten_glClearBufferfi " : _emscripten_glClearBufferfi , " _emscripten_glClearBufferfv " : _emscripten_glClearBufferfv , " _emscripten_glClearBufferiv " : _emscripten_glClearBufferiv , " _emscripten_glClearBufferuiv " : _emscripten_glClearBufferuiv , " _emscripten_glClearColor " : _emscripten_glClearColor , " _emscripten_glClearDepthf " : _emscripten_glClearDepthf , " _emscripten_glClearStencil " : _emscripten_glClearStencil , " _emscripten_glClientWaitSync " : _emscripten_glClientWaitSync , " _emscripten_glColorMask " : _emscripten_glColorMask , " _emscripten_glCompileShader " : _emscripten_glCompileShader , " _emscripten_glCompressedTexImage2D " : _emscripten_glCompressedTexImage2D , " _emscripten_glCompressedTexImage3D " : _emscripten_glCompressedTexImage3D , " _emscripten_glCompressedTexSubImage2D " : _emscripten_glCompressedTexSubImage2D , " _emscripten_glCompressedTexSubImage3D " : _emscripten_glCompressedTexSubImage3D , " _emscripten_glCopyBufferSubData " : _emscripten_glCopyBufferSubData , " _emscripten_glCopyTexImage2D " : _emscripten_glCopyTexImage2D , " _emscripten_glCopyTexSubImage2D " : _emscripten_glCopyTexSubImage2D , " _emscripten_glCopyTexSubImage3D " : _emscripten_glCopyTexSubImage3D , " _emscripten_glCreateProgram " : _emscripten_glCreateProgram , " _emscripten_glCreateShader " : _emscripten_glCreateShader , " _emscripten_glCullFace " : _emscripten_glCullFace , " _emscripten_glDeleteBuffers " : _emscripten_glDeleteBuffers , " _emscripten_glDeleteFramebuffers " : _emscripten_glDeleteFramebuffers , " _emscripten_glDeleteProgram " : _emscripten_glDeleteProgram , " _emscripten_glDeleteQueries " : _emscripten_glDeleteQueries , " _emscripten_glDeleteQueriesEXT " : _emscripten_glDeleteQueriesEXT , " _emscripten_glDeleteRenderbuffers " : _emscripten_glDeleteRenderbuffers , " _emscripten_glDeleteSamplers " : _emscripten_glDeleteSamplers , " _emscripten_glDeleteShader " : _emscripten_glDeleteShader , " _emscripten_glDeleteSync " : _emscripten_glDeleteSync , " _emscripten_glDeleteTextures " : _emscripten_glDeleteTextures , " _emscripten_glDeleteTransformFeedbacks " : _emscripten_glDeleteTransformFeedbacks , " _emscripten_glDeleteVertexArrays " : _emscripten_glDeleteVertexArrays , " _emscripten_glDeleteVertexArraysOES " : _emscripten_glDeleteVertexArraysOES , " _emscripten_glDepthFunc " : _emscripten_glDepthFunc , " _emscripten_glDepthMask " : _emscripten_glDepthMask , " _emscripten_glDepthRangef " : _emscripten_glDepthRangef , " _emscripten_glDetachShader " : _emscripten_glDetachShader , " _emscripten_glDisable " : _emscripten_glDisable , " _emscripten_glDisableVertexAttribArray " : _emscripten_glDisableVertexAttribArray , " _emscripten_glDrawArrays " : _emscripten_glDrawArrays , " _emscripten_glDrawArraysInstanced " : _emscripten_glDrawArraysInstanced , " _emscripten_glDrawArraysInstancedANGLE " : _emscripten_glDrawArraysInstancedANGLE , " _emscripten_glDrawArraysInstancedARB " : _emscripten_glDrawArraysInstancedARB , " _emscripten_glDrawArraysInstancedEXT " : _emscripten_glDrawArraysInstancedEXT , " _emscripten_glDrawArraysInstancedNV " : _emscripten_glDrawArraysInstancedNV , " _emscripten_glDrawBuffers " : _emscripten_glDrawBuffers , " _emscripten_glDrawBuffersEXT " : _emscripten_glDrawBuffersEXT , " _emscripten_glDrawBuffersWEBGL " : _emscripten_glDrawBuffersWEBGL , " _emscripten_glDrawElements " : _emscripten_glDrawElements , " _emscripten_glDrawElementsInstanced " : _emscripten_glDrawElementsInstanced , " _emscripten_glDrawElementsInstancedANGLE " : _emscripten_glDrawElementsInstancedANGLE , " _emscripten_glDrawElementsInstancedARB " : _emscripten_glDrawElementsInstancedARB , " _emscripten_glDrawElementsInstancedEXT " : _emscripten_glDrawElementsInstancedEXT , " _emscripten_glDrawElementsInstancedNV " : _emscripten_glDrawElementsInstancedNV , " _emscripten_glDrawRangeElements " : _emscripten_glDrawRangeElements , " _emscripten_glEnable " : _emscripten_glEnable , " _emscripten_glEnableVertexAttribArray " : _emscripten_glEnableVertexAttribArray , " _emscripten_glEndQuery " : _emscripten_glEndQuery , " _emscripten_glEndQueryEXT " : _emscripten_glEndQueryEXT , " _emscripten_glEndTransformFeedback " : _emscripten_glEndTransformFeedback , " _emscripten_glFenceSync " : _emscripten_glFenceSync , " _emscripten_glFinish " : _emscripten_glFinish , " _emscripten_glFlush " : _emscripten_glFlush , " _emscripten_glFlushMappedBufferRange " : _emscripten_glFlushMappedBufferRange , " _emscripten_glFramebufferRenderbuffer " : _emscripten_glFramebufferRenderbuffer , " _emscripten_glFramebufferTexture2D " : _emscripten_glFramebufferTexture2D , " _emscripten_glFramebufferTextureLayer " : _emscripten_glFramebufferTextureLayer , " _emscripten_glFrontFace " : _emscripten_glFrontFace , " _emscripten_glGenBuffers " : _emscripten_glGenBuffers , " _emscripten_glGenFramebuffers " : _emscripten_glGenFramebuffers , " _emscripten_glGenQueries " : _emscripten_glGenQueries , " _emscripten_glGenQueriesEXT " : _emscripten_glGenQueriesEXT , " _emscripten_glGenRenderbuffers " : _emscripten_glGenRenderbuffers , " _emscripten_glGenSamplers " : _emscripten_glGenSamplers , " _emscripten_glGenTextures " : _emscripten_glGenTextures , " _emscripten_glGenTransformFeedbacks " : _emscripten_glGenTransformFeedbacks , " _emscripten_glGenVertexArrays " : _emscripten_glGenVertexArrays , " _emscripten_glGenVertexArraysOES " : _emscripten_glGenVertexArraysOES , " _emscripten_glGenerateMipmap " : _emscripten_glGenerateMipmap , " _emscripten_glGetActiveAttrib " : _emscripten_glGetActiveAttrib , " _emscripten_glGetActiveUniform " : _emscripten_glGetActiveUniform , " _emscripten_glGetActiveUniformBlockName " : _emscripten_glGetActiveUniformBlockName , " _emscripten_glGetActiveUniformBlockiv " : _emscripten_glGetActiveUniformBlockiv , " _emscripten_glGetActiveUniformsiv " : _emscripten_glGetActiveUniformsiv , " _emscripten_glGetAttachedShaders " : _emscripten_glGetAttachedShaders , " _emscripten_glGetAttribLocation " : _emscripten_glGetAttribLocation , " _emscripten_glGetBooleanv " : _emscripten_glGetBooleanv , " _emscripten_glGetBufferParameteri64v " : _emscripten_glGetBufferParameteri64v , " _emscripten_glGetBufferParameteriv " : _emscripten_glGetBufferParameteriv , " _emscripten_glGetBufferPointerv " : _emscripten_glGetBufferPointerv , " _emscripten_glGetError " : _emscripten_glGetError , " _emscripten_glGetFloatv " : _emscripten_glGetFloatv , " _emscripten_glGetFragDataLocation " : _emscripten_glGetFragDataLocation , " _emscripten_glGetFramebufferAttachmentParameteriv " : _emscripten_glGetFramebufferAttachmentParameteriv , " _emscripten_glGetInteger64i_v " : _emscripten_glGetInteger64i_v , " _emscripten_glGetInteger64v " : _emscripten_glGetInteger64v , " _emscripten_glGetIntegeri_v " : _emscripten_glGetIntegeri_v , " _emscripten_glGetIntegerv " : _emscripten_glGetIntegerv , " _emscripten_glGetInternalformativ " : _emscripten_glGetInternalformativ , " _emscripten_glGetProgramBinary " : _emscripten_glGetProgramBinary , " _emscripten_glGetProgramInfoLog " : _emscripten_glGetProgramInfoLog , " _emscripten_glGetProgramiv " : _emscripten_glGetProgramiv , " _emscripten_glGetQueryObjecti64vEXT " : _emscripten_glGetQueryObjecti64vEXT , " _emscripten_glGetQueryObjectivEXT " : _emscripten_glGetQueryObjectivEXT , " _emscripten_glGetQueryObjectui64vEXT " : _emscripten_glGetQueryObjectui64vEXT , " _emscripten_glGetQueryObjectuiv " : _emscripten_glGetQueryObjectuiv , " _emscripten_glGetQueryObjectuivEXT " : _emscripten_glGetQueryObjectuivEXT , " _emscripten_glGetQueryiv " : _emscripten_glGetQueryiv , " _emscripten_glGetQueryivEXT " : _emscripten_glGetQueryivEXT , " _emscripten_glGetRenderbufferParameteriv " : _emscripten_glGetRenderbufferParameteriv , " _emscripten_glGetSamplerParameterfv " : _emscripten_glGetSamplerParameterfv , " _emscripten_glGetSamplerParameteriv " : _emscripten_glGetSamplerParameteriv , " _emscripten_glGetShaderInfoLog " : _emscripten_glGetShaderInfoLog , " _emscripten_glGetShaderPrecisionFormat " : _emscripten_glGetShaderPrecisionFormat , " _emscripten_glGetShaderSource " : _emscripten_glGetShaderSource , " _emscripten_glGetShaderiv " : _emscripten_glGetShaderiv , " _emscripten_glGetString " : _emscripten_glGetString , " _emscripten_glGetStringi " : _emscripten_glGetStringi , " _emscripten_glGetSynciv " : _emscripten_glGetSynciv , " _emscripten_glGetTexParameterfv " : _emscripten_glGetTexParameterfv , " _emscripten_glGetTexParameteriv " : _emscripten_glGetTexParameteriv , " _emscripten_glGetTransformFeedbackVarying " : _emscripten_glGetTransformFeedbackVarying , " _emscripten_glGetUniformBlockIndex " : _emscripten_glGetUniformBlockIndex , " _emscripten_glGetUniformIndices " : _emscripten_glGetUniformIndices , " _emscripten_glGetUniformLocation " : _emscripten_glGetUniformLocation , " _emscripten_glGetUniformfv " : _emscripten_glGetUniformfv , " _emscripten_glGetUniformiv " : _emscripten_glGetUniformiv , " _emscripten_glGetUniformuiv " : _emscripten_glGetUniformuiv , " _emscripten_glGetVertexAttribIiv " : _emscripten_glGetVertexAttribIiv , " _emscripten_glGetVertexAttribIuiv " : _emscripten_glGetVertexAttribIuiv , " _emscripten_glGetVertexAttribPointerv " : _emscripten_glGetVertexAttribPointerv , " _emscripten_glGetVertexAttribfv " : _emscripten_glGetVertexAttribfv , " _emscripten_glGetVertexAttribiv " : _emscripten_glGetVertexAttribiv , " _emscripten_glHint " : _emscripten_glHint , " _emscripten_glInvalidateFramebuffer " : _emscripten_glInvalidateFramebuffer , " _emscripten_glInvalidateSubFramebuffer " : _emscripten_glInvalidateSubFramebuffer , " _emscripten_glIsBuffer " : _emscripten_glIsBuffer , " _emscripten_glIsEnabled " : _emscripten_glIsEnabled , " _emscripten_glIsFramebuffer " : _emscripten_glIsFramebuffer , " _emscripten_glIsProgram " : _emscripten_glIsProgram , " _emscripten_glIsQuery " : _emscripten_glIsQuery , " _emscripten_glIsQueryEXT " : _emscripten_glIsQueryEXT , " _emscripten_glIsRenderbuffer " : _emscripten_glIsRenderbuffer , " _emscripten_glIsSampler " : _emscripten_glIsSampler , " _emscripten_glIsShader " : _emscripten_glIsShader , " _emscripten_glIsSync " : _emscripten_glIsSync , " _emscripten_glIsTexture " : _emscripten_glIsTexture , " _emscripten_glIsTransformFeedback " : _emscripten_glIsTransformFeedback , " _emscripten_glIsVertexArray " : _emscripten_glIsVertexArray , " _emscripten_glIsVertexArrayOES " : _emscripten_glIsVertexArrayOES , " _emscripten_glLineWidth " : _emscripten_glLineWidth , " _emscripten_glLinkProgram " : _emscripten_glLinkProgram , " _emscripten_glMapBufferRange " : _emscripten_glMapBufferRange , " _emscripten_glPauseTransformFeedback " : _emscripten_glPauseTransformFeedback , " _emscripten_glPixelStorei " : _emscripten_glPixelStorei , " _emscripten_glPolygonOffset " : _emscripten_glPolygonOffset , " _emscripten_glProgramBinary " : _emscripten_glProgramBinary , " _emscripten_glProgramParameteri " : _emscripten_glProgramParameteri , " _emscripten_glQueryCounterEXT " : _emscripten_glQueryCounterEXT , " _emscripten_glReadBuffer " : _emscripten_glReadBuffer , " _emscripten_glReadPixels " : _emscripten_glReadPixels , " _emscripten_glReleaseShaderCompiler " : _emscripten_glReleaseShaderCompiler , " _emscripten_glRenderbufferStorage " : _emscripten_glRenderbufferStorage , " _emscripten_glRenderbufferStorageMultisample " : _emscripten_glRenderbufferStorageMultisample , " _emscripten_glResumeTransformFeedback " : _emscripten_glResumeTransformFeedback , " _emscripten_glSampleCoverage " : _emscripten_glSampleCoverage , " _emscripten_glSamplerParameterf " : _emscripten_glSamplerParameterf , " _emscripten_glSamplerParameterfv " : _emscripten_glSamplerParameterfv , " _emscripten_glSamplerParameteri " : _emscripten_glSamplerParameteri , " _emscripten_glSamplerParameteriv " : _emscripten_glSamplerParameteriv , " _emscripten_glScissor " : _emscripten_glScissor , " _emscripten_glShaderBinary " : _emscripten_glShaderBinary , " _emscripten_glShaderSource " : _emscripten_glShaderSource , " _emscripten_glStencilFunc " : _emscripten_glStencilFunc , " _emscripten_glStencilFuncSeparate " : _emscripten_glStencilFuncSeparate , " _emscripten_glStencilMask " : _emscripten_glStencilMask , " _emscripten_glStencilMaskSeparate " : _emscripten_glStencilMaskSeparate , " _emscripten_glStencilOp " : _emscripten_glStencilOp , " _emscripten_glStencilOpSeparate " : _emscripten_glStencilOpSeparate , " _emscripten_glTexImage2D " : _emscripten_glTexImage2D , " _emscripten_glTexImage3D " : _emscripten_glTexImage3D , " _emscripten_glTexParameterf " : _emscripten_glTexParameterf , " _emscripten_glTexParameterfv " : _emscripten_glTexParameterfv , " _emscripten_glTexParameteri " : _emscripten_glTexParameteri , " _emscripten_glTexParameteriv " : _emscripten_glTexParameteriv , " _emscripten_glTexStorage2D " : _emscripten_glTexStorage2D , " _emscripten_glTexStorage3D " : _emscripten_glTexStorage3D , " _emscripten_glTexSubImage2D " : _emscripten_glTexSubImage2D , " _emscripten_glTexSubImage3D " : _emscripten_glTexSubImage3D , " _emscripten_glTransformFeedbackVaryings " : _emscripten_glTransformFeedbackVaryings , " _emscripten_glUniform1f " : _emscripten_glUniform1f , " _emscripten_glUniform1fv " : _emscripten_glUniform1fv , " _emscripten_glUniform1i " : _emscripten_glUniform1i , " _emscripten_glUniform1iv " : _emscripten_glUniform1iv , " _emscripten_glUniform1ui " : _emscripten_glUniform1ui , " _emscripten_glUniform1uiv " : _emscripten_glUniform1uiv , " _emscripten_glUniform2f " : _emscripten_glUniform2f , " _emscripten_glUniform2fv " : _emscripten_glUniform2fv , " _emscripten_glUniform2i " : _emscripten_glUniform2i , " _emscripten_glUniform2iv " : _emscripten_glUniform2iv , " _emscripten_glUniform2ui " : _emscripten_glUniform2ui , " _emscripten_glUniform2uiv " : _emscripten_glUniform2uiv , " _emscripten_glUniform3f " : _emscripten_glUniform3f , " _emscripten_glUniform3fv " : _emscripten_glUniform3fv , " _emscripten_glUniform3i " : _emscripten_glUniform3i , " _emscripten_glUniform3iv " : _emscripten_glUniform3iv , " _emscripten_glUniform3ui " : _emscripten_glUniform3ui , " _emscripten_glUniform3uiv " : _emscripten_glUniform3uiv , " _emscripten_glUniform4f " : _emscripten_glUniform4f , " _emscripten_glUniform4fv " : _emscripten_glUniform4fv , " _emscripten_glUniform4i " : _emscripten_glUniform4i , " _emscripten_glUniform4iv " : _emscripten_glUniform4iv , " _emscripten_glUniform4ui " : _emscripten_glUniform4ui , " _emscripten_glUniform4uiv " : _emscripten_glUniform4uiv , " _emscripten_glUniformBlockBinding " : _emscripten_glUniformBlockBinding , " _emscripten_glUniformMatrix2fv " : _emscripten_glUniformMatrix2fv , " _emscripten_glUniformMatrix2x3fv " : _emscripten_glUniformMatrix2x3fv , " _emscripten_glUniformMatrix2x4fv " : _emscripten_glUniformMatrix2x4fv , " _emscripten_glUniformMatrix3fv " : _emscripten_glUniformMatrix3fv , " _emscripten_glUniformMatrix3x2fv " : _emscripten_glUniformMatrix3x2fv , " _emscripten_glUniformMatrix3x4fv " : _emscripten_glUniformMatrix3x4fv , " _emscripten_glUniformMatrix4fv " : _emscripten_glUniformMatrix4fv , " _emscripten_glUniformMatrix4x2fv " : _emscripten_glUniformMatrix4x2fv , " _emscripten_glUniformMatrix4x3fv " : _emscripten_glUniformMatrix4x3fv , " _emscripten_glUnmapBuffer " : _emscripten_glUnmapBuffer , " _emscripten_glUseProgram " : _emscripten_glUseProgram , " _emscripten_glValidateProgram " : _emscripten_glValidateProgram , " _emscripten_glVertexAttrib1f " : _emscripten_glVertexAttrib1f , " _emscripten_glVertexAttrib1fv " : _emscripten_glVertexAttrib1fv , " _emscripten_glVertexAttrib2f " : _emscripten_glVertexAttrib2f , " _emscripten_glVertexAttrib2fv " : _emscripten_glVertexAttrib2fv , " _emscripten_glVertexAttrib3f " : _emscripten_glVertexAttrib3f , " _emscripten_glVertexAttrib3fv " : _emscripten_glVertexAttrib3fv , " _emscripten_glVertexAttrib4f " : _emscripten_glVertexAttrib4f , " _emscripten_glVertexAttrib4fv " : _emscripten_glVertexAttrib4fv , " _emscripten_glVertexAttribDivisor " : _emscripten_glVertexAttribDivisor , " _emscripten_glVertexAttribDivisorANGLE " : _emscripten_glVertexAttribDivisorANGLE , " _emscripten_glVertexAttribDivisorARB " : _emscripten_glVertexAttribDivisorARB , " _emscripten_glVertexAttribDivisorEXT " : _emscripten_glVertexAttribDivisorEXT , " _emscripten_glVertexAttribDivisorNV " : _emscripten_glVertexAttribDivisorNV , " _emscripten_glVertexAttribI4i " : _emscripten_glVertexAttribI4i , " _emscripten_glVertexAttribI4iv " : _emscripten_glVertexAttribI4iv , " _emscripten_glVertexAttribI4ui " : _emscripten_glVertexAttribI4ui , " _emscripten_glVertexAttribI4uiv " : _emscripten_glVertexAttribI4uiv , " _emscripten_glVertexAttribIPointer " : _emscripten_glVertexAttribIPointer , " _emscripten_glVertexAttribPointer " : _emscripten_glVertexAttribPointer , " _emscripten_glViewport " : _emscripten_glViewport , " _emscripten_glWaitSync " : _emscripten_glWaitSync , " _emscripten_memcpy_big " : _emscripten_memcpy_big , " _emscripten_resize_heap " : _emscripten_resize_heap , " _glActiveTexture " : _glActiveTexture , " _glAttachShader " : _glAttachShader , " _glBindBuffer " : _glBindBuffer , " _glBindBufferRange " : _glBindBufferRange , " _glBindFramebuffer " : _glBindFramebuffer , " _glBindRenderbuffer " : _glBindRenderbuffer , " _glBindSampler " : _glBindSampler , " _glBindTexture " : _glBindTexture , " _glBindVertexArray " : _glBindVertexArray , " _glBlendEquationSeparate " : _glBlendEquationSeparate , " _glBlendFuncSeparate " : _glBlendFuncSeparate , " _glBlitFramebuffer " : _glBlitFramebuffer , " _glBufferData " : _glBufferData , " _glBufferSubData " : _glBufferSubData , " _glClear " : _glClear , " _glClearColor " : _glClearColor , " _glClearDepthf " : _glClearDepthf , " _glClearStencil " : _glClearStencil , " _glColorMask " : _glColorMask , " _glCompileShader " : _glCompileShader , " _glCompressedTexSubImage2D " : _glCompressedTexSubImage2D , " _glCompressedTexSubImage3D " : _glCompressedTexSubImage3D , " _glCreateProgram " : _glCreateProgram , " _glCreateShader " : _glCreateShader , " _glCullFace " : _glCullFace , " _glDeleteBuffers " : _glDeleteBuffers , " _glDeleteFramebuffers " : _glDeleteFramebuffers , " _glDeleteProgram " : _glDeleteProgram , " _glDeleteRenderbuffers " : _glDeleteRenderbuffers , " _glDeleteSamplers " : _glDeleteSamplers , " _glDeleteShader " : _glDeleteShader , " _glDeleteSync " : _glDeleteSync , " _glDeleteTextures " : _glDeleteTextures , " _glDeleteVertexArrays " : _glDeleteVertexArrays , " _glDepthFunc " : _glDepthFunc , " _glDepthMask " : _glDepthMask , " _glDetachShader " : _glDetachShader , " _glDisable " : _glDisable , " _glDisableVertexAttribArray " : _glDisableVertexAttribArray , " _glDrawArrays " : _glDrawArrays , " _glDrawElements " : _glDrawElements , " _glDrawRangeElements " : _glDrawRangeElements , " _glEnable " : _glEnable , " _glEnableVertexAttribArray " : _glEnableVertexAttribArray , " _glFenceSync " : _glFenceSync , " _glFlush " : _glFlush , " _glFramebufferRenderbuffer " : _glFramebufferRenderbuffer , " _glFramebufferTexture2D " : _glFramebufferTexture2D , " _glFramebufferTextureLayer " : _glFramebufferTextureLayer , " _glFrontFace " : _glFrontFace , " _glGenBuffers " : _glGenBuffers , " _glGenFramebuffers " : _glGenFramebuffers , " _glGenRenderbuffers " : _glGenRenderbuffers , " _glGenSamplers " : _glGenSamplers , " _glGenTextures " : _glGenTextures , " _glGenVertexArrays " : _glGenVertexArrays , " _glGenerateMipmap " : _glGenerateMipmap , " _glGetError " : _glGetError , " _glGetFloatv " : _glGetFloatv , " _glGetIntegerv " : _glGetIntegerv , " _glGetProgramInfoLog " : _glGetProgramInfoLog , " _glGetProgramiv " : _glGetProgramiv , " _glGetShaderInfoLog " : _glGetShaderInfoLog , " _glGetShaderiv " : _glGetShaderiv , " _glGetString " : _glGetString , " _glGetStringi " : _glGetStringi , " _glGetUniformBlockIndex " : _glGetUniformBlockIndex , " _glGetUniformLocation " : _glGetUniformLocation , " _glGetVertexAttribiv " : _glGetVertexAttribiv , " _glHint " : _glHint , " _glInvalidateFramebuffer " : _glInvalidateFramebuffer , " _glInvalidateSubFramebuffer " : _glInvalidateSubFramebuffer , " _glIsEnabled " : _glIsEnabled , " _glLinkProgram " : _glLinkProgram , " _glPixelStorei " : _glPixelStorei , " _glPolygonOffset " : _glPolygonOffset , " _glReadPixels " : _glReadPixels , " _glRenderbufferStorage " : _glRenderbufferStorage , " _glRenderbufferStorageMultisample " : _glRenderbufferStorageMultisample , " _glSamplerParameteri " : _glSamplerParameteri , " _glScissor " : _glScissor , " _glShaderSource " : _glShaderSource , " _glTexParameteri " : _glTexParameteri , " _glTexStorage2D " : _glTexStorage2D , " _glTexStorage2DMultisample " : _glTexStorage2DMultisample , " _glTexStorage3D " : _glTexStorage3D , " _glTexSubImage2D " : _glTexSubImage2D , " _glTexSubImage3D " : _glTexSubImage3D , " _glUniform1f " : _glUniform1f , " _glUniform1i " : _glUniform1i , " _glUniform4f " : _glUniform4f , " _glUniformBlockBinding " : _glUniformBlockBinding , " _glUseProgram " : _glUseProgram , " _glVertexAttribIPointer " : _glVertexAttribIPointer , " _glVertexAttribPointer " : _glVertexAttribPointer , " _glViewport " : _glViewport , " _glWaitSync " : _glWaitSync , " _llvm_exp2_f32 " : _llvm_exp2_f32 , " _llvm_exp2_f64 " : _llvm_exp2_f64 , " _llvm_log2_f32 " : _llvm_log2_f32 , " _llvm_trap " : _llvm_trap , " _pthread_cond_destroy " : _pthread_cond_destroy , " _pthread_cond_signal " : _pthread_cond_signal , " _pthread_cond_timedwait " : _pthread_cond_timedwait , " _pthread_cond_wait " : _pthread_cond_wait , " _pthread_create " : _pthread_create , " _pthread_join " : _pthread_join , " _sysconf " : _sysconf , " abortOnCannotGrowMemory " : abortOnCannotGrowMemory , " constNoSmartPtrRawPointerToWireType " : constNoSmartPtrRawPointerToWireType , " count_emval_handles " : count_emval_handles , " craftInvokerFunction " : craftInvokerFunction , " createNamedFunction " : createNamedFunction , " downcastPointer " : downcastPointer , " embind__requireFunction " : embind__requireFunction , " embind_init_charCodes " : embind_init_charCodes , " emscriptenWebGLGet " : emscriptenWebGLGet , " emscriptenWebGLGetIndexed " : emscriptenWebGLGetIndexed , " emscriptenWebGLGetTexPixelData " : emscriptenWebGLGetTexPixelData , " emscriptenWebGLGetUniform " : emscriptenWebGLGetUniform , " emscriptenWebGLGetVertexAttrib " : emscriptenWebGLGetVertexAttrib , " emscripten_realloc_buffer " : emscripten_realloc_buffer , " ensureOverloadTable " : ensureOverloadTable , " enumReadValueFromPointer " : enumReadValueFromPointer , " exposePublicSymbol " : exposePublicSymbol , " extendError " : extendError , " floatReadValueFromPointer " : floatReadValueFromPointer , " flushPendingDeletes " : flushPendingDeletes , " genericPointerToWireType " : genericPointerToWireType , " getBasestPointer " : getBasestPointer , " getInheritedInstance " : getInheritedInstance , " getInheritedInstanceCount " : getInheritedInstanceCount , " getLiveInheritedInstances " : getLiveInheritedInstances , " getShiftFromSize " : getShiftFromSize , " getStringOrSymbol " : getStringOrSymbol , " getTypeName " : getTypeName , " get_first_emval " : get_first_emval , " heap32VectorToArray " : heap32VectorToArray , " init_ClassHandle " : init_ClassHandle , " init_RegisteredPointer " : init_RegisteredPointer , " init_embind " : init_embind , " init_emval " : init_emval , " integerReadValueFromPointer " : integerReadValueFromPointer , " makeClassHandle " : makeClassHandle , " makeLegalFunctionName " : makeLegalFunctionName , " new_ " : new_ , " nonConstNoSmartPtrRawPointerToWireType " : nonConstNoSmartPtrRawPointerToWireType , " readLatin1String " : readLatin1String , " registerType " : registerType , " replacePublicSymbol " : replacePublicSymbol , " requireHandle " : requireHandle , " requireRegisteredType " : requireRegisteredType , " runDestructor " : runDestructor , " runDestructors " : runDestructors , " setDelayFunction " : setDelayFunction , " shallowCopyInternalPointer " : shallowCopyInternalPointer , " simpleReadValueFromPointer " : simpleReadValueFromPointer , " stringToNewUTF8 " : stringToNewUTF8 , " throwBindingError " : throwBindingError , " throwInstanceAlreadyDeleted " : throwInstanceAlreadyDeleted , " throwInternalError " : throwInternalError , " throwUnboundTypeError " : throwUnboundTypeError , " upcastPointer " : upcastPointer , " validateThis " : validateThis , " whenDependentTypesAreResolved " : whenDependentTypesAreResolved , " tempDoublePtr " : tempDoublePtr , " DYNAMICTOP_PTR " : DYNAMICTOP_PTR } ; var asm = Module [ " asm " ] ( asmGlobalArg , asmLibraryArg , buffer ) ; Module [ " asm " ] = asm ; var ___errno_location = Module [ " ___errno_location " ] = function ( ) { return Module [ " asm " ] [ " ___errno_location " ] . apply ( null , arguments ) } ; var ___getTypeName = Module [ " ___getTypeName " ] = function ( ) { return Module [ " asm " ] [ " ___getTypeName " ] . apply ( null , arguments ) } ; var _emscripten_GetProcAddress = Module [ " _emscripten_GetProcAddress " ] = function ( ) { return Module [ " asm " ] [ " _emscripten_GetProcAddress " ] . apply ( null , arguments ) } ; var _emscripten_replace_memory = Module [ " _emscripten_replace_memory " ] = function ( ) { return Module [ " asm " ] [ " _emscripten_replace_memory " ] . apply ( null , arguments ) } ; var _free = Module [ " _free " ] = function ( ) { return Module [ " asm " ] [ " _free " ] . apply ( null , arguments ) } ; var _llvm_round_f32 = Module [ " _llvm_round_f32 " ] = function ( ) { return Module [ " asm " ] [ " _llvm_round_f32 " ] . apply ( null , arguments ) } ; var _malloc = Module [ " _malloc " ] = function ( ) { return Module [ " asm " ] [ " _malloc " ] . apply ( null , arguments ) } ; var _memcpy = Module [ " _memcpy " ] = function ( ) { return Module [ " asm " ] [ " _memcpy " ] . apply ( null , arguments ) } ; var _memmove = Module [ " _memmove " ] = function ( ) { return Module [ " asm " ] [ " _memmove " ] . apply ( null , arguments ) } ; var _memset = Module [ " _memset " ] = function ( ) { return Module [ " asm " ] [ " _memset " ] . apply ( null , arguments ) } ; var _pthread_cond_broadcast = Module [ " _pthread_cond_broadcast " ] = function ( ) { return Module [ " asm " ] [ " _pthread_cond_broadcast " ] . apply ( null , arguments ) } ; var _sbrk = Module [ " _sbrk " ] = function ( ) { return Module [ " asm " ] [ " _sbrk " ] . apply ( null , arguments ) } ; var _strstr = Module [ " _strstr " ] = function ( ) { return Module [ " asm " ] [ " _strstr " ] . apply ( null , arguments ) } ; var establishStackSpace = Module [ " establishStackSpace " ] = function ( ) { return Module [ " asm " ] [ " establishStackSpace " ] . apply ( null , arguments ) } ; var globalCtors = Module [ " globalCtors " ] = function ( ) { return Module [ " asm " ] [ " globalCtors " ] . apply ( null , arguments ) } ; var stackAlloc = Module [ " stackAlloc " ] = function ( ) { return Module [ " asm " ] [ " stackAlloc " ] . apply ( null , arguments ) } ; var stackRestore = Module [ " stackRestore " ] = function ( ) { return Module [ " asm " ] [ " stackRestore " ] . apply ( null , arguments ) } ; var stackSave = Module [ " stackSave " ] = function ( ) { return Module [ " asm " ] [ " stackSave " ] . apply ( null , arguments ) } ; var dynCall_dii = Module [ " dynCall_dii " ] = function ( ) { return Module [ " asm " ] [ " dynCall_dii " ] . apply ( null , arguments ) } ; var dynCall_fi = Module [ " dynCall_fi " ] = function ( ) { return Module [ " asm " ] [ " dynCall_fi " ] . apply ( null , arguments ) } ; var dynCall_fii = Module [ " dynCall_fii " ] = function ( ) { return Module [ " asm " ] [ " dynCall_fii " ] . apply ( null , arguments ) } ; var dynCall_fiii = Module [ " dynCall_fiii " ] = function ( ) { return Module [ " asm " ] [ " dynCall_fiii " ] . apply ( null , arguments ) } ; var dynCall_i = Module [ " dynCall_i " ] = function ( ) { return Module [ " asm " ] [ " dynCall_i " ] . apply ( null , arguments ) } ; var dynCall_ii = Module [ " dynCall_ii " ] = function ( ) { return Module [ " asm " ] [ " dynCall_ii " ] . apply ( null , arguments ) } ; var dynCall_iif = Module [ " dynCall_iif " ] = function ( ) { return Module [ " asm " ] [ " dynCall_iif " ] . apply ( null , arguments ) } ; var dynCall_iiff = Module [ " dynCall_iiff " ] = function ( ) { return Module [ " asm " ] [ " dynCall_iiff " ] . apply ( null , arguments ) } ; var dynCall_iii = Module [ " dynCall_iii " ] = function ( ) { return Module [ " asm " ] [ " dynCall_iii " ] . apply ( null , arguments ) } ; var dynCall_iiif = Module [ " dynCall_iiif " ] = function ( ) { return Module [ " asm " ] [ " dynCall_iiif " ] . apply ( null , arguments ) } ; var dynCall_iiiff = Module [ " dynCall_iiiff " ] = function ( ) { return Module [ " asm " ] [ " dynCall_iiiff " ] . apply ( null , arguments ) } ; var dynCall_iiii = Module [ " dynCall_iiii " ] = function ( ) { return Module [ " asm " ] [ " dynCall_iiii " ] . apply ( null , arguments ) } ; var dynCall_iiiii = Module [ " dynCall_iiiii " ] = function ( ) { return Module [ " asm " ] [ " dynCall_iiiii " ] . apply ( null , arguments ) } ; var dynCall_iiiiii = Module [ " dynCall_iiiiii " ] = function ( ) { return Module [ " asm " ] [ " dynCall_iiiiii " ] . apply ( null , arguments ) } ; var dynCall_iiiiiii = Module [ " dynCall_iiiiiii " ] = function ( ) { return Module [ " asm " ] [ " dynCall_iiiiiii " ] . apply ( null , arguments ) } ; var dynCall_iiiiiiii = Module [ " dynCall_iiiiiiii " ] = function ( ) { return Module [ " asm " ] [ " dynCall_iiiiiiii " ] . apply ( null , arguments ) } ; var dynCall_iiij = Module [ " dynCall_iiij " ] = function ( ) { return Module [ " asm " ] [ " dynCall_iiij " ] . apply ( null , arguments ) } ; var dynCall_jii = Module [ " dynCall_jii " ] = function ( ) { return Module [ " asm " ] [ " dynCall_jii " ] . apply ( null , arguments ) } ; var dynCall_v = Module [ " dynCall_v " ] = function ( ) { return Module [ " asm " ] [ " dynCall_v " ] . apply ( null , arguments ) } ; var dynCall_vf = Module [ " dynCall_vf " ] = function ( ) { return Module [ " asm " ] [ " dynCall_vf " ] . apply ( null , arguments ) } ; var dynCall_vff = Module [ " dynCall_vff " ] = function ( ) { return Module [ " asm " ] [ " dynCall_vff " ] . apply ( null , arguments ) } ; var dynCall_vffff = Module [ " dynCall_vffff " ] = function ( ) { return Module [ " asm " ] [ " dynCall_vffff " ] . apply ( null , arguments ) } ; var dynCall_vfi = Module [ " dynCall_vfi " ] = function ( ) { return Module [ " asm " ] [ " dynCall_vfi " ] . apply ( null , arguments ) } ; var dynCall_vi = Module [ " dynCall_vi " ] = function ( ) { return Module [ " asm " ] [ " dynCall_vi " ] . apply ( null , arguments ) } ; var dynCall_viddd = Module [ " dynCall_viddd " ] = function ( ) { return Module [ " asm " ] [ " dynCall_viddd " ] . apply ( null , arguments ) } ; var dynCall_viddddi = Module [ " dynCall_viddddi " ] = function ( ) { return Module [ " asm " ] [ " dynCall_viddddi " ] . apply ( null , arguments ) } ; var dynCall_vif = Module [ " dynCall_vif " ] = function ( ) { return Module [ " asm " ] [ " dynCall_vif " ] . apply ( null , arguments ) } ; var dynCall_viff = Module [ " dynCall_viff " ] = function ( ) { return Module [ " asm " ] [ " dynCall_viff " ] . apply ( null , arguments ) } ; var dynCall_vifff = Module [ " dynCall_vifff " ] = function ( ) { return Module [ " asm " ] [ " dynCall_vifff " ] . apply ( null , arguments ) } ; var dynCall_viffff = Module [ " dynCall_viffff " ] = function ( ) { return Module [ " asm " ] [ " dynCall_viffff " ] . apply ( null , arguments ) } ; var dynCall_vii = Module [ " dynCall_vii " ] = function ( ) { return Module [ " asm " ] [ " dynCall_vii " ] . apply ( null , arguments ) } ; var dynCall_viid = Module [ " dynCall_viid " ] = function ( ) { return Module [ " asm " ] [ " dynCall_viid " ] . apply ( null , arguments ) } ; var dynCall_viidd = Module [ " dynCall_viidd " ] = function ( ) { return Module [ " asm " ] [ " dynCall_viidd " ] . apply ( null , arguments ) } ; var dynCall_viiddd = Module [ " dynCall_viiddd " ] = function ( ) { return Module [ " asm " ] [ " dynCall_viiddd " ] . apply ( null , arguments ) } ; var dynCall_viidddddd = Module [ " dynCall_viidddddd " ] = function ( ) { return Module [ " asm " ] [ " dynCall_viidddddd " ] . apply ( null , arguments ) } ; var dynCall_viiddddi = Module [ " dynCall_viiddddi " ] = function ( ) { return Module [ " asm " ] [ " dynCall_viiddddi " ] . apply ( null , arguments ) } ; var dynCall_viif = Module [ " dynCall_viif " ] = function ( ) { return Module [ " asm " ] [ " dynCall_viif " ] . apply ( null , arguments ) } ; var dynCall_viiff = Module [ " dynCall_viiff " ] = function ( ) { return Module [ " asm " ] [ " dynCall_viiff " ] . apply ( null , arguments ) } ; var dynCall_viifff = Module [ " dynCall_viifff " ] = function ( ) { return Module [ " asm " ] [ " dynCall_viifff " ] . apply ( null , arguments ) } ; var dynCall_viifi = Module [ " dynCall_viifi " ] = function ( ) { return Module [ " asm " ] [ " dynCall_viifi " ] . apply ( null , arguments ) } ; var dynCall_viii = Module [ " dynCall_viii " ] = function ( ) { return Module [ " asm " ] [ " dynCall_viii " ] . apply ( null , arguments ) } ; var dynCall_viiidd = Module [ " dynCall_viiidd " ] = function ( ) { return Module [ " asm " ] [ " dynCall_viiidd " ] . apply ( null , arguments ) } ; var dynCall_viiidddddd = Module [ " dynCall_viiidddddd " ] = function ( ) { return Module [ " asm " ] [ " dynCall_viiidddddd " ] . apply ( null , arguments ) } ; var dynCall_viiif = Module [ " dynCall_viiif " ] = function ( ) { return Module [ " asm " ] [ " dynCall_viiif " ] . apply ( null , arguments ) } ; var dynCall_viiii = Module [ " dynCall_viiii " ] = function ( ) { return Module [ " asm " ] [ " dynCall_viiii " ] . apply ( null , arguments ) } ; var dynCall_viiiii = Module [ " dynCall_viiiii " ] = function ( ) { return Module [ " asm " ] [ " dynCall_viiiii " ] . apply ( null , arguments ) } ; var dynCall_viiiiii = Module [ " dynCall_viiiiii " ] = function ( ) { return Module [ " asm " ] [ " dynCall_viiiiii " ] . apply ( null , arguments ) } ; var dynCall_viiiiiii = Module [ " dynCall_viiiiiii " ] = function ( ) { return Module [ " asm " ] [ " dynCall_viiiiiii " ] . apply ( null , arguments ) } ; var dynCall_viiiiiiii = Module [ " dynCall_viiiiiiii " ] = function ( ) { return Module [ " asm " ] [ " dynCall_viiiiiiii " ] . apply ( null , arguments ) } ; var dynCall_viiiiiiiii = Module [ " dynCall_viiiiiiiii " ] = function ( ) { return Module [ " asm " ] [ " dynCall_viiiiiiiii " ] . apply ( null , arguments ) } ; var dynCall_viiiiiiiiii = Module [ " dynCall_viiiiiiiiii " ] = function ( ) { return Module [ " asm " ] [ " dynCall_viiiiiiiiii " ] . apply ( null , arguments ) } ; var dynCall_viiiiiiiiiii = Module [ " dynCall_viiiiiiiiiii " ] = function ( ) { return Module [ " asm " ] [ " dynCall_viiiiiiiiiii " ] . apply ( null , arguments ) } ; var dynCall_viij = Module [ " dynCall_viij " ] = function ( ) { return Module [ " asm " ] [ " dynCall_viij " ] . apply ( null , arguments ) } ; var dynCall_vij = Module [ " dynCall_vij " ] = function ( ) { return Module [ " asm " ] [ " dynCall_vij " ] . apply ( null , arguments ) } ; Module [ " asm " ] = asm ; function ExitStatus ( status ) { this . name = " ExitStatus " ; this . message = " Program terminated with exit ( " + status + " ) " ; this . status = status } ExitStatus . prototype = new Error ; ExitStatus . prototype . constructor = ExitStatus ; dependenciesFulfilled = function runCaller ( ) { if ( ! Module [ " calledRun " ] ) run ( ) ; if ( ! Module [ " calledRun " ] ) dependenciesFulfilled = runCaller } ; function run ( args ) { args = args | | Module [ " arguments " ] ; if ( runDependencies > 0 ) { return } preRun ( ) ; if ( runDependencies > 0 ) return ; if ( Module [ " calledRun " ] ) return ; function doRun ( ) { if ( Module [ " calledRun " ] ) return ; Module [ " calledRun " ] = true ; if ( ABORT ) return ; ensureInitRuntime ( ) ; preMain ( ) ; if ( Module [ " onRuntimeInitialized " ] ) Module [ " onRuntimeInitialized " ] ( ) ; postRun ( ) } if ( Module [ " setStatus " ] ) { Module [ " setStatus " ] ( " Running . . . " ) ; setTimeout ( function ( ) { setTimeout ( function ( ) { Module [ " setStatus " ] ( " " ) } , 1 ) ; doRun ( ) } , 1 ) } else { doRun ( ) } } Module [ " run " ] = run ; function abort ( what ) { if ( Module [ " onAbort " ] ) { Module [ " onAbort " ] ( what ) } if ( what ! = = undefined ) { out ( what ) ; err ( what ) ; what = JSON . stringify ( what ) } else { what = " " } ABORT = true ; EXITSTATUS = 1 ; throw " abort ( " + what + " ) . Build with - s ASSERTIONS = 1 for more info . " } Module [ " abort " ] = abort ; if ( Module [ " preInit " ] ) { if ( typeof Module [ " preInit " ] = = " function " ) Module [ " preInit " ] = [ Module [ " preInit " ] ] ; while ( Module [ " preInit " ] . length > 0 ) { Module [ " preInit " ] . pop ( ) ( ) } } Module [ " noExitRuntime " ] = true ; run ( ) ; Filament . remainingInitializationTasks = 1 ; Filament . init = function ( assets , onready ) { Filament . onReady = onready ; Filament . remainingInitializationTasks + = assets . length ; Filament . assets = { } ; if ( typeof glMatrix ! = = " undefined " ) { Filament . loadMathExtensions ( ) } Filament . fetch ( assets , null , function ( name ) { if ( - - Filament . remainingInitializationTasks = = 0 & & Filament . onReady ) { Filament . onReady ( ) } } ) } ; Filament . postRun = function ( ) { Filament . loadClassExtensions ( ) ; if ( - - Filament . remainingInitializationTasks = = 0 & & Filament . onReady ) { Filament . onReady ( ) } } ; Filament . fetch = function ( assets , onDone , onFetched ) { var remainingAssets = assets . length ; assets . forEach ( function ( name ) { const lower = name . toLowerCase ( ) ; if ( lower . endsWith ( " . jpeg " ) | | lower . endsWith ( " . jpg " ) ) { var img = new Image ; img . src = name ; img . decoding = " async " ; img . onload = function ( ) { Filament . assets [ name ] = img ; if ( onFetched ) { onFetched ( name ) } if ( - - remainingAssets = = = 0 & & onDone ) { onDone ( ) } } ; return } fetch ( name ) . then ( function ( response ) { if ( ! response . ok ) { throw new Error ( name ) } return response . arrayBuffer ( ) } ) . then ( function ( arrayBuffer ) { Filament . assets [ name ] = new Uint8Array ( arrayBuffer ) ; if ( onFetched ) { onFetched ( name ) } if ( - - remainingAssets = = = 0 & & onDone ) { onDone ( ) } } ) } ) } ; function getBufferDescriptor ( buffer ) { if ( " string " = = typeof buffer | | buffer instanceof String ) { buffer = Filament . assets [ buffer ] } if ( buffer . buffer instanceof ArrayBuffer ) { buffer = Filament . Buffer ( buffer ) } return buffer } Filament . loadClassExtensions = function ( ) { Filament . Engine . create = function ( canvas , options ) { const defaults = { majorVersion : 2 , minorVersion : 0 , antialias : false , depth : false , alpha : false } ; options = Object . assign ( defaults , options ) ; const ctx = canvas . getContext ( " webgl2 " , options ) ; const handle = GL . registerContext ( ctx , options ) ; GL . makeContextCurrent ( handle ) ; ctx . getExtension ( " WEBGL_compressed_texture_s3tc " ) ; ctx . getExtension ( " WEBGL_compressed_texture_astc " ) ; ctx . getExtension ( " WEBGL_compressed_texture_etc " ) ; return Filament . Engine . _create ( ) } ; Filament . Engine . prototype . createMaterial = function ( buffer ) { buffer = getBufferDescriptor ( buffer ) ; const result = this . _createMaterial ( buffer ) ; buffer . delete ( ) ; return result } ; Filament . Engine . prototype . createTextureFromKtx = function ( buffer , options ) { buffer = getBufferDescriptor ( buffer ) ; const result = Filament . _createTextureFromKtx ( buffer , this , options ) ; buffer . delete ( ) ; return result } ; Filament . Engine . prototype . createIblFromKtx = function ( buffer , options ) { buffer = getBufferDescriptor ( buffer ) ; const result = Filament . _createIblFromKtx ( buffer , this , options ) ; buffer . delete ( ) ; return result } ; Filament . Engine . prototype . createSkyFromKtx = function ( buffer , options ) { options = options | | { " rgbm " : true } ; const skytex = this . createTextureFromKtx ( buffer , options ) ; return Filament . Skybox . Builder ( ) . environment ( skytex ) . build ( this ) } ; Filament . Engine . prototype . createTextureFromPng = function ( buffer , options ) { buffer = getBufferDescriptor ( buffer ) ; const result = Filament . _createTextureFromPng ( buffer , this , options ) ; buffer . delete ( ) ; return result } ; Filament . Engine . prototype . createTextureFromJpeg = function ( image , options ) { if ( " string " = = typeof image | | image instanceof String ) { image = Filament . assets [ image ] } return Filament . _createTextureFromJpeg ( image , this , options ) } ; Filament . Engine . prototype . loadFilamesh = function ( buffer , definstance , matinstances ) { buffer = getBufferDescriptor ( buffer ) ; const result = Filament . _loadFilamesh ( this , buffer , definstance , matinstances ) ; buffer . delete ( ) ; return result } ; Filament . Engine . prototype . createAssetLoader = function ( ) { const materials = new Filament . gltfio $ UbershaderLoader ( this ) ; return new Filament . gltfio $ AssetLoader ( this , materials ) } ; Filament . VertexBuffer . prototype . setBufferAt = function ( engine , bufferIndex , buffer ) { buffer = getBufferDescriptor ( buffer ) ; this . _setBufferAt ( engine , bufferIndex , buffer ) ; buffer . delete ( ) } ; Filament . IndexBuffer . prototype . setBuffer = function ( engine , buffer ) { buffer = getBufferDescriptor ( buffer ) ; this . _setBuffer ( engine , buffer ) ; buffer . delete ( ) } ; Filament . RenderableManager $ Builder . prototype . build = Filament . LightManager $ Builder . prototype . build = function ( engine , entity ) { const result = this . _build ( engine , entity ) ; this . delete ( ) ; return result } ; Filament . VertexBuffer $ Builder . prototype . build = Filament . IndexBuffer $ Builder . prototype . build = Filament . Texture $ Builder . prototype . build = Filament . IndirectLight $ Builder . prototype . build = Filament . Skybox $ Builder . prototype . build = function ( engine ) { const result = this . _build ( engine ) ; this . delete ( ) ; return result } ; Filament . KtxBundle . prototype . getBlob = function ( index ) { const blob = this . _getBlob ( index ) ; const result = blob . getBytes ( ) ; blob . delete ( ) ; return result } ; Filament . KtxBundle . prototype . getCubeBlob = function ( miplevel ) { const blob = this . _getCubeBlob ( miplevel ) ; const result = blob . getBytes ( ) ; blob . delete ( ) ; return result } ; Filament . Texture . prototype . setImage = function ( engine , level , pbd ) { this . _setImage ( engine , level , pbd ) ; pbd . delete ( ) } ; Filament . Texture . prototype . setImageCube = function ( engine , level , pbd ) { this . _setImageCube ( engine , level , pbd ) ; pbd . delete ( ) } ; Filament . SurfaceOrientation $ Builder . prototype . build = function ( ) { const result = this . _build ( ) ; this . delete ( ) ; return result } ; Filament . gltfio $ AssetLoader . prototype . createAssetFromJson = function ( buffer ) { buffer = getBufferDescriptor ( buffer ) ; const result = this . _createAssetFromJson ( buffer ) ; buffer . delete ( ) ; return result } ; Filament . gltfio $ AssetLoader . prototype . createAssetFromBinary = function ( buffer ) { buffer = getBufferDescriptor ( buffer ) ; const result = this . _createAssetFromBinary ( buffer ) ; buffer . delete ( ) ; return result } ; Filament . gltfio $ FilamentAsset . prototype . loadResources = function ( onDone , onFetched ) { const asset = this ; const engine = this . getEngine ( ) ; const urlkeys = this . getResourceUrls ( ) ; const urlset = new Set ; for ( var i = 0 ; i < urlkeys . size ( ) ; i + + ) { const url = urlkeys . get ( i ) ; if ( url ) { urlset . add ( url ) } } const resourceLoader = new Filament . gltfio $ ResourceLoader ( engine ) ; Filament . fetch ( Array . from ( urlset ) , function ( ) { const finalize = function ( ) { resourceLoader . loadResources ( asset ) ; window . requestAnimationFrame ( function ( ) { window . requestAnimationFrame ( function ( ) { resourceLoader . delete ( ) } ) } ) } ; if ( onDone ) { onDone ( finalize ) } else { finalize ( ) } } , function ( name ) { var buffer = getBufferDescriptor ( name ) ; resourceLoader . addResourceData ( name , buffer ) ; buffer . delete ( ) ; if ( onFetched ) { onFetched ( name ) } } ) } } ; Filament . Buffer = function ( typedarray ) { console . assert ( typedarray . buffer instanceof ArrayBuffer ) ; console . assert ( typedarray . byteLength > 0 ) ; if ( Filament . HEAPU32 . buffer = = typedarray . buffer ) { typedarray = new Uint8Array ( typedarray ) } const ta = typedarray ; const bd = new Filament . driver $ BufferDescriptor ( ta ) ; const uint8array = new Uint8Array ( ta . buffer , ta . byteOffset , ta . byteLength ) ; bd . getBytes ( ) . set ( uint8array ) ; return bd } ; Filament . PixelBuffer = function ( typedarray , format , datatype ) { console . assert ( typedarray . buffer instanceof ArrayBuffer ) ; console . assert ( typedarray . byteLength > 0 ) ; if ( Filament . HEAPU32 . buffer = = typedarray . buffer ) { typedarray = new Uint8Array ( typedarray ) } const ta = typedarray ; const bd = new Filament . driver $ PixelBufferDescriptor ( ta , format , datatype ) ; const uint8array = new Uint8Array ( ta . buffer , ta . byteOffset , ta . byteLength ) ; bd . getBytes ( ) . set ( uint8array ) ; return bd } ; Filament . CompressedPixelBuffer = function ( typedarray , cdatatype , faceSize ) { console . assert ( typedarray . buffer instanceof ArrayBuffer ) ; console . assert ( typedarray . byteLength > 0 ) ; faceSize = faceSize | | typedarray . byteLength ; if ( Filament . HEAPU32 . buffer = = typedarray . buffer ) { typedarray = new Uint8Array ( typedarray ) } const ta = typedarray ; const bd = new Filament . driver $ PixelBufferDescriptor ( ta , cdatatype , faceSize , true ) ; const uint8array = new Uint8Array ( ta . buffer , ta . byteOffset , ta . byteLength ) ; bd . getBytes ( ) . set ( uint8array ) ; return bd } ; Filament . _loadFilamesh = function ( engine , buffer , definstance , matinstances ) { matinstances = matinstances | | { } ; const registry = new Filament . MeshReader $ MaterialRegistry ; for ( var key in matinstances ) { registry . set ( key , matinstances [ key ] ) } if ( definstance ) { registry . set ( " DefaultMaterial " , definstance ) } const mesh = Filament . MeshReader . loadMeshFromBuffer ( engine , buffer , registry ) ; const keys = registry . keys ( ) ; for ( var i = 0 ; i < keys . size ( ) ; i + + ) { const key = keys . get ( i ) ; const minstance = registry . get ( key ) ; matinstances [ key ] = minstance } return { " renderable " : mesh . renderable ( ) , " vertexBuffer " : mesh . vertexBuffer ( ) , " indexBuffer " : mesh . indexBuffer ( ) } } ; Filament . IcoSphere = function ( nsubdivs ) { const X = . 5257311121191336 ; const Z = . 8506508083520399 ; const N = 0 ; this . vertices = new Float32Array ( [ - X , + N , + Z , + X , + N , + Z , - X , + N , - Z , + X , + N , - Z , + N , + Z , + X , + N , + Z , - X , + N , - Z , + X , + N , - Z , - X , + Z , + X , + N , - Z , + X , + N , + Z , - X , + N , - Z , - X , + N ] ) ; this . triangles = new Uint16Array ( [ 1 , 4 , 0 , 4 , 9 , 0 , 4 , 5 , 9 , 8 , 5 , 4 , 1 , 8 , 4 , 1 , 10 , 8 , 10 , 3 , 8 , 8 , 3 , 5 , 3 , 2 , 5 , 3 , 7 , 2 , 3 , 10 , 7 , 10 , 6 , 7 , 6 , 11 , 7 , 6 , 0 , 11 , 6 , 1 , 0 , 10 , 1 , 6 , 11 , 0 , 9 , 2 , 11 , 9 , 5 , 2 , 9 , 11 , 2 , 7 ] ) ; if ( nsubdivs ) { while ( nsubdivs - - > 0 ) { this . subdivide ( ) } } const nverts = this . vertices . length / 3 ; const normals = Filament . _malloc ( this . vertices . length * this . vertices . BYTES_PER_ELEMENT ) ; Module . HEAPU8 . set ( new Uint8Array ( this . vertices . buffer ) , normals ) ; const sob = new Filament . SurfaceOrientation $ Builder ; sob . vertexCount ( nverts ) ; sob . normals ( normals , 0 ) ; const orientation = sob . build ( ) ; Filament . _free ( normals ) ; const quatsBufferSize = 8 * nverts ; const quatsBuffer = Filament . _malloc ( quatsBufferSize ) ; orientation . getQuats ( quatsBuffer , nverts , Filament . VertexBuffer $ AttributeType . SHORT4 ) ; const tangentsMemory = Module . HEAPU8 . subarray ( quatsBuffer , quatsBuffer + quatsBufferSize ) . slice ( ) . buffer ; Filament . _free ( quatsBuffer ) ; this . tangents = new Int16Array ( tangentsMemory ) ; orientation . delete ( ) } ; Filament . IcoSphere . prototype . subdivide = function ( ) { const srctris = this . triangles ; const srcverts = this . vertices ; const nsrctris = srctris . length / 3 ; const ndsttris = nsrctris * 4 ; const nsrcverts = srcverts . length / 3 ; const ndstverts = nsrcverts + nsrctris * 3 ; const dsttris = new Uint16Array ( ndsttris * 3 ) ; const dstverts = new Float32Array ( ndstverts * 3 ) ; dstverts . set ( srcverts ) ; var srcind = 0 , dstind = 0 , i3 = nsrcverts * 3 , i4 = i3 + 3 , i5 = i4 + 3 ; for ( var tri = 0 ; tri < nsrctris ; tri + + , i3 + = 9 , i4 + = 9 , i5 + = 9 ) { const i0 = srctris [ srcind + + ] * 3 ; const i1 = srctris [ srcind + + ] * 3 ; const i2 = srctris [ srcind + + ] * 3 ; const v0 = srcverts . subarray ( i0 , i0 + 3 ) ; const v1 = srcverts . subarray ( i1 , i1 + 3 ) ; const v2 = srcverts . subarray ( i2 , i2 + 3 ) ; const v3 = dstverts . subarray ( i3 , i3 + 3 ) ; const v4 = dstverts . subarray ( i4 , i4 + 3 ) ; const v5 = dstverts . subarray ( i5 , i5 + 3 ) ; vec3 . normalize ( v3 , vec3 . add ( v3 , v0 , v1 ) ) ; vec3 . normalize ( v4 , vec3 . add ( v4 , v1 , v2 ) ) ; vec3 . normalize ( v5 , vec3 . add ( v5 , v2 , v0 ) ) ; dsttris [ dstind + + ] = i0 / 3 ; dsttris [ dstind + + ] = i3 / 3 ; dsttris [ dstind + + ] = i5 / 3 ; dsttris [ dstind + + ] = i3 / 3 ; dsttris [ dstind + + ] = i1 / 3 ; dsttris [ dstind + + ] = i4 / 3 ; dsttris [ dstind + + ] = i5 / 3 ; dsttris [ dstind + + ] = i3 / 3 ; dsttris [ dstind + + ] = i4 / 3 ; dsttris [ dstind + + ] = i2 / 3 ; dsttris [ dstind + + ] = i5 / 3 ; dsttris [ dstind + + ] = i4 / 3 } this . triangles = dsttris ; this . vertices = dstverts } ; function clamp ( v , least , most ) { return Math . max ( Math . min ( most , v ) , least ) } Filament . packSnorm16 = function ( value ) { return Math . round ( clamp ( value , - 1 , 1 ) * 32767 ) } ; Filament . loadMathExtensions = function ( ) { vec4 . packSnorm16 = function ( out , src ) { out [ 0 ] = Filament . packSnorm16 ( src [ 0 ] ) ; out [ 1 ] = Filament . packSnorm16 ( src [ 1 ] ) ; out [ 2 ] = Filament . packSnorm16 ( src [ 2 ] ) ; out [ 3 ] = Filament . packSnorm16 ( src [ 3 ] ) ; return out } ; const fromRotationZ = mat3 . fromRotation ; mat3 . fromRotation = function ( out , radians , axis ) { if ( axis ) { return mat3 . fromMat4 ( out , mat4 . fromRotation ( mat4 . create ( ) , radians , axis ) ) } return fromRotationZ ( out , radians ) } } ; Filament . _createTextureFromKtx = function ( ktxdata , engine , options ) { options = options | | { } ; const ktx = options [ " ktx " ] | | new Filament . KtxBundle ( ktxdata ) ; const rgbm = ! ! options [ " rgbm " ] ; const srgb = ! ! options [ " srgb " ] ; return Filament . KtxUtility $ createTexture ( engine , ktx , srgb , rgbm ) } ; Filament . _createIblFromKtx = function ( ktxdata , engine , options ) { options = options | | { " rgbm " : true } ; const iblktx = options [ " ktx " ] = new Filament . KtxBundle ( ktxdata ) ; const ibltex = Filament . _createTextureFromKtx ( ktxdata , engine , options ) ; const shstring = iblktx . getMetadata ( " sh " ) ; const shfloats = shstring . split ( / \ s / , 9 * 3 ) . map ( parseFloat ) ; return Filament . IndirectLight . Builder ( ) . reflections ( ibltex ) . irradianceSh ( 3 , shfloats ) . build ( engine ) } ; Filament . _createTextureFromPng = function ( pngdata , engine , options ) { const Sampler = Filament . Texture $ Sampler ; const TextureFormat = Filament . Texture $ InternalFormat ; const PixelDataFormat = Filament . PixelDataFormat ; options = options | | { } ; const srgb = ! ! options [ " srgb " ] ; const rgbm = ! ! options [ " rgbm " ] ; const noalpha = ! ! options [ " noalpha " ] ; const nomips = ! ! options [ " nomips " ] ; const decodedpng = Filament . decodePng ( pngdata , noalpha ? 3 : 4 ) ; var texformat , pbformat , pbtype ; if ( noalpha ) { texformat = srgb ? TextureFormat . SRGB8 : TextureFormat . RGB8 ; pbformat = PixelDataFormat . RGB ; pbtype = Filament . PixelDataType . UBYTE } else { texformat = srgb ? TextureFormat . SRGB8_A8 : TextureFormat . RGBA8 ; pbformat = rgbm ? PixelDataFormat . RGBM : PixelDataFormat . RGBA ; pbtype = Filament . PixelDataType . UBYTE } const tex = Filament . Texture . Builder ( ) . width ( decodedpng . width ) . height ( decodedpng . height ) . levels ( nomips ? 1 : 255 ) . sampler ( Sampler . SAMPLER_2D ) . format ( texformat ) . rgbm ( rgbm ) . build ( engine ) ; const pixelbuffer = Filament . PixelBuffer ( decodedpng . data . getBytes ( ) , pbformat , pbtype ) ; tex . setImage ( engine , 0 , pixelbuffer ) ; if ( ! nomips ) { tex . generateMipmaps ( engine ) } return tex } ; Filament . _createTextureFromJpeg = function ( image , engine , options ) { options = options | | { } ; const srgb = ! ! options [ " srgb " ] ; const nomips = ! ! options [ " nomips " ] ; var context2d = document . createElement ( " canvas " ) . getContext ( " 2d " ) ; context2d . canvas . width = image . width ; context2d . canvas . height = image . height ; context2d . width = image . width ; context2d . height = image . height ; context2d . globalCompositeOperation = " copy " ; context2d . drawImage ( image , 0 , 0 ) ; var imgdata = context2d . getImageData ( 0 , 0 , image . width , image . height ) . data . buffer ; var decodedjpeg = new Uint8Array ( imgdata ) ; const TF = Filament . Texture $ InternalFormat ; const texformat = srgb ? TF . SRGB8_A8 : TF . RGBA8 ; const pbformat = Filament . PixelDataFormat . RGBA ; const pbtype = Filament . PixelDataType . UBYTE ; const tex = Filament . Texture . Builder ( ) . width ( image . width ) . height ( image . height ) . levels ( nomips ? 1 : 255 ) . sampler ( Filament . Texture $ Sampler . SAMPLER_2D ) . format ( texformat ) . build ( engine ) ; const pixelbuffer = Filament . PixelBuffer ( decodedjpeg , pbformat , pbtype ) ; tex . setImage ( engine , 0 , pixelbuffer ) ; if ( ! nomips ) { tex . generateMipmaps ( engine ) } return tex } ; Filament . getSupportedFormats = function ( ) { if ( Filament . supportedFormats ) { return Filament . supportedFormats } const options = { majorVersion : 2 , minorVersion : 0 } ; var ctx = document . createElement ( " canvas " ) . getContext ( " webgl2 " , options ) ; const result = { s3tc : false , astc : false , etc : false } ; var exts = ctx . getSupportedExtensions ( ) , nexts = exts . length , i ; for ( i = 0 ; i < nexts ; i + + ) { var ext = exts [ i ] ; if ( ext = = " WEBGL_compressed_texture_s3tc " ) { result . s3tc = true } else if ( ext = = " WEBGL_compressed_texture_astc " ) { result . astc = true } else if ( ext = = " WEBGL_compressed_texture_etc " ) { result . etc = true } } return Filament . supportedFormats = result } ; Filament . getSupportedFormatSuffix = function ( desiredFormats ) { desiredFormats = desiredFormats . split ( " " ) ; var exts = Filament . getSupportedFormats ( ) ; for ( var key in exts ) { if ( exts [ key ] & & desiredFormats . includes ( key ) ) { return " _ " + key } } return " " } ; <nl> + var Module = typeof Filament ! = = " undefined " ? Filament : { } ; var moduleOverrides = { } ; var key ; for ( key in Module ) { if ( Module . hasOwnProperty ( key ) ) { moduleOverrides [ key ] = Module [ key ] } } Module [ " arguments " ] = [ ] ; Module [ " thisProgram " ] = " . / this . program " ; Module [ " quit " ] = function ( status , toThrow ) { throw toThrow } ; Module [ " preRun " ] = [ ] ; Module [ " postRun " ] = [ ] ; var ENVIRONMENT_IS_WEB = false ; var ENVIRONMENT_IS_WORKER = false ; var ENVIRONMENT_IS_NODE = false ; var ENVIRONMENT_IS_SHELL = false ; ENVIRONMENT_IS_WEB = typeof window = = = " object " ; ENVIRONMENT_IS_WORKER = typeof importScripts = = = " function " ; ENVIRONMENT_IS_NODE = typeof process = = = " object " & & typeof require = = = " function " & & ! ENVIRONMENT_IS_WEB & & ! ENVIRONMENT_IS_WORKER ; ENVIRONMENT_IS_SHELL = ! ENVIRONMENT_IS_WEB & & ! ENVIRONMENT_IS_NODE & & ! ENVIRONMENT_IS_WORKER ; var scriptDirectory = " " ; function locateFile ( path ) { if ( Module [ " locateFile " ] ) { return Module [ " locateFile " ] ( path , scriptDirectory ) } else { return scriptDirectory + path } } if ( ENVIRONMENT_IS_NODE ) { scriptDirectory = __dirname + " / " ; var nodeFS ; var nodePath ; Module [ " read " ] = function shell_read ( filename , binary ) { var ret ; if ( ! nodeFS ) nodeFS = require ( " fs " ) ; if ( ! nodePath ) nodePath = require ( " path " ) ; filename = nodePath [ " normalize " ] ( filename ) ; ret = nodeFS [ " readFileSync " ] ( filename ) ; return binary ? ret : ret . toString ( ) } ; Module [ " readBinary " ] = function readBinary ( filename ) { var ret = Module [ " read " ] ( filename , true ) ; if ( ! ret . buffer ) { ret = new Uint8Array ( ret ) } assert ( ret . buffer ) ; return ret } ; if ( process [ " argv " ] . length > 1 ) { Module [ " thisProgram " ] = process [ " argv " ] [ 1 ] . replace ( / \ \ / g , " / " ) } Module [ " arguments " ] = process [ " argv " ] . slice ( 2 ) ; process [ " on " ] ( " uncaughtException " , function ( ex ) { if ( ! ( ex instanceof ExitStatus ) ) { throw ex } } ) ; process [ " on " ] ( " unhandledRejection " , abort ) ; Module [ " quit " ] = function ( status ) { process [ " exit " ] ( status ) } ; Module [ " inspect " ] = function ( ) { return " [ Emscripten Module object ] " } } else if ( ENVIRONMENT_IS_SHELL ) { if ( typeof read ! = " undefined " ) { Module [ " read " ] = function shell_read ( f ) { return read ( f ) } } Module [ " readBinary " ] = function readBinary ( f ) { var data ; if ( typeof readbuffer = = = " function " ) { return new Uint8Array ( readbuffer ( f ) ) } data = read ( f , " binary " ) ; assert ( typeof data = = = " object " ) ; return data } ; if ( typeof scriptArgs ! = " undefined " ) { Module [ " arguments " ] = scriptArgs } else if ( typeof arguments ! = " undefined " ) { Module [ " arguments " ] = arguments } if ( typeof quit = = = " function " ) { Module [ " quit " ] = function ( status ) { quit ( status ) } } } else if ( ENVIRONMENT_IS_WEB | | ENVIRONMENT_IS_WORKER ) { if ( ENVIRONMENT_IS_WORKER ) { scriptDirectory = self . location . href } else if ( document . currentScript ) { scriptDirectory = document . currentScript . src } if ( scriptDirectory . indexOf ( " blob : " ) ! = = 0 ) { scriptDirectory = scriptDirectory . substr ( 0 , scriptDirectory . lastIndexOf ( " / " ) + 1 ) } else { scriptDirectory = " " } Module [ " read " ] = function shell_read ( url ) { var xhr = new XMLHttpRequest ; xhr . open ( " GET " , url , false ) ; xhr . send ( null ) ; return xhr . responseText } ; if ( ENVIRONMENT_IS_WORKER ) { Module [ " readBinary " ] = function readBinary ( url ) { var xhr = new XMLHttpRequest ; xhr . open ( " GET " , url , false ) ; xhr . responseType = " arraybuffer " ; xhr . send ( null ) ; return new Uint8Array ( xhr . response ) } } Module [ " readAsync " ] = function readAsync ( url , onload , onerror ) { var xhr = new XMLHttpRequest ; xhr . open ( " GET " , url , true ) ; xhr . responseType = " arraybuffer " ; xhr . onload = function xhr_onload ( ) { if ( xhr . status = = 200 | | xhr . status = = 0 & & xhr . response ) { onload ( xhr . response ) ; return } onerror ( ) } ; xhr . onerror = onerror ; xhr . send ( null ) } ; Module [ " setWindowTitle " ] = function ( title ) { document . title = title } } else { } var out = Module [ " print " ] | | ( typeof console ! = = " undefined " ? console . log . bind ( console ) : typeof print ! = = " undefined " ? print : null ) ; var err = Module [ " printErr " ] | | ( typeof printErr ! = = " undefined " ? printErr : typeof console ! = = " undefined " & & console . warn . bind ( console ) | | out ) ; for ( key in moduleOverrides ) { if ( moduleOverrides . hasOwnProperty ( key ) ) { Module [ key ] = moduleOverrides [ key ] } } moduleOverrides = undefined ; var STACK_ALIGN = 16 ; function dynamicAlloc ( size ) { var ret = HEAP32 [ DYNAMICTOP_PTR > > 2 ] ; var end = ret + size + 15 & - 16 ; if ( end < = _emscripten_get_heap_size ( ) ) { HEAP32 [ DYNAMICTOP_PTR > > 2 ] = end } else { var success = _emscripten_resize_heap ( end ) ; if ( ! success ) return 0 } return ret } function getNativeTypeSize ( type ) { switch ( type ) { case " i1 " : case " i8 " : return 1 ; case " i16 " : return 2 ; case " i32 " : return 4 ; case " i64 " : return 8 ; case " float " : return 4 ; case " double " : return 8 ; default : { if ( type [ type . length - 1 ] = = = " * " ) { return 4 } else if ( type [ 0 ] = = = " i " ) { var bits = parseInt ( type . substr ( 1 ) ) ; assert ( bits % 8 = = = 0 , " getNativeTypeSize invalid bits " + bits + " , type " + type ) ; return bits / 8 } else { return 0 } } } } function warnOnce ( text ) { if ( ! warnOnce . shown ) warnOnce . shown = { } ; if ( ! warnOnce . shown [ text ] ) { warnOnce . shown [ text ] = 1 ; err ( text ) } } var asm2wasmImports = { " f64 - rem " : function ( x , y ) { return x % y } , " debugger " : function ( ) { debugger } } ; var jsCallStartIndex = 1 ; var functionPointers = new Array ( 0 ) ; var funcWrappers = { } ; function makeBigInt ( low , high , unsigned ) { return unsigned ? + ( low > > > 0 ) + + ( high > > > 0 ) * 4294967296 : + ( low > > > 0 ) + + ( high | 0 ) * 4294967296 } function dynCall ( sig , ptr , args ) { if ( args & & args . length ) { return Module [ " dynCall_ " + sig ] . apply ( null , [ ptr ] . concat ( args ) ) } else { return Module [ " dynCall_ " + sig ] . call ( null , ptr ) } } var tempRet0 = 0 ; var setTempRet0 = function ( value ) { tempRet0 = value } ; var getTempRet0 = function ( ) { return tempRet0 } ; if ( typeof WebAssembly ! = = " object " ) { err ( " no native wasm support detected " ) } var wasmMemory ; var wasmTable ; var ABORT = false ; var EXITSTATUS = 0 ; function assert ( condition , text ) { if ( ! condition ) { abort ( " Assertion failed : " + text ) } } function getCFunc ( ident ) { var func = Module [ " _ " + ident ] ; assert ( func , " Cannot call unknown function " + ident + " , make sure it is exported " ) ; return func } function ccall ( ident , returnType , argTypes , args , opts ) { var toC = { " string " : function ( str ) { var ret = 0 ; if ( str ! = = null & & str ! = = undefined & & str ! = = 0 ) { var len = ( str . length < < 2 ) + 1 ; ret = stackAlloc ( len ) ; stringToUTF8 ( str , ret , len ) } return ret } , " array " : function ( arr ) { var ret = stackAlloc ( arr . length ) ; writeArrayToMemory ( arr , ret ) ; return ret } } ; function convertReturnValue ( ret ) { if ( returnType = = = " string " ) return UTF8ToString ( ret ) ; if ( returnType = = = " boolean " ) return Boolean ( ret ) ; return ret } var func = getCFunc ( ident ) ; var cArgs = [ ] ; var stack = 0 ; if ( args ) { for ( var i = 0 ; i < args . length ; i + + ) { var converter = toC [ argTypes [ i ] ] ; if ( converter ) { if ( stack = = = 0 ) stack = stackSave ( ) ; cArgs [ i ] = converter ( args [ i ] ) } else { cArgs [ i ] = args [ i ] } } } var ret = func . apply ( null , cArgs ) ; ret = convertReturnValue ( ret ) ; if ( stack ! = = 0 ) stackRestore ( stack ) ; return ret } function setValue ( ptr , value , type , noSafe ) { type = type | | " i8 " ; if ( type . charAt ( type . length - 1 ) = = = " * " ) type = " i32 " ; switch ( type ) { case " i1 " : HEAP8 [ ptr > > 0 ] = value ; break ; case " i8 " : HEAP8 [ ptr > > 0 ] = value ; break ; case " i16 " : HEAP16 [ ptr > > 1 ] = value ; break ; case " i32 " : HEAP32 [ ptr > > 2 ] = value ; break ; case " i64 " : tempI64 = [ value > > > 0 , ( tempDouble = value , + Math_abs ( tempDouble ) > = 1 ? tempDouble > 0 ? ( Math_min ( + Math_floor ( tempDouble / 4294967296 ) , 4294967295 ) | 0 ) > > > 0 : ~ ~ + Math_ceil ( ( tempDouble - + ( ~ ~ tempDouble > > > 0 ) ) / 4294967296 ) > > > 0 : 0 ) ] , HEAP32 [ ptr > > 2 ] = tempI64 [ 0 ] , HEAP32 [ ptr + 4 > > 2 ] = tempI64 [ 1 ] ; break ; case " float " : HEAPF32 [ ptr > > 2 ] = value ; break ; case " double " : HEAPF64 [ ptr > > 3 ] = value ; break ; default : abort ( " invalid type for setValue : " + type ) } } var ALLOC_NONE = 3 ; function allocate ( slab , types , allocator , ptr ) { var zeroinit , size ; if ( typeof slab = = = " number " ) { zeroinit = true ; size = slab } else { zeroinit = false ; size = slab . length } var singleType = typeof types = = = " string " ? types : null ; var ret ; if ( allocator = = ALLOC_NONE ) { ret = ptr } else { ret = [ _malloc , stackAlloc , dynamicAlloc ] [ allocator ] ( Math . max ( size , singleType ? 1 : types . length ) ) } if ( zeroinit ) { var stop ; ptr = ret ; assert ( ( ret & 3 ) = = 0 ) ; stop = ret + ( size & ~ 3 ) ; for ( ; ptr < stop ; ptr + = 4 ) { HEAP32 [ ptr > > 2 ] = 0 } stop = ret + size ; while ( ptr < stop ) { HEAP8 [ ptr + + > > 0 ] = 0 } return ret } if ( singleType = = = " i8 " ) { if ( slab . subarray | | slab . slice ) { HEAPU8 . set ( slab , ret ) } else { HEAPU8 . set ( new Uint8Array ( slab ) , ret ) } return ret } var i = 0 , type , typeSize , previousType ; while ( i < size ) { var curr = slab [ i ] ; type = singleType | | types [ i ] ; if ( type = = = 0 ) { i + + ; continue } if ( type = = " i64 " ) type = " i32 " ; setValue ( ret + i , curr , type ) ; if ( previousType ! = = type ) { typeSize = getNativeTypeSize ( type ) ; previousType = type } i + = typeSize } return ret } var UTF8Decoder = typeof TextDecoder ! = = " undefined " ? new TextDecoder ( " utf8 " ) : undefined ; function UTF8ArrayToString ( u8Array , idx , maxBytesToRead ) { var endIdx = idx + maxBytesToRead ; var endPtr = idx ; while ( u8Array [ endPtr ] & & ! ( endPtr > = endIdx ) ) + + endPtr ; if ( endPtr - idx > 16 & & u8Array . subarray & & UTF8Decoder ) { return UTF8Decoder . decode ( u8Array . subarray ( idx , endPtr ) ) } else { var str = " " ; while ( idx < endPtr ) { var u0 = u8Array [ idx + + ] ; if ( ! ( u0 & 128 ) ) { str + = String . fromCharCode ( u0 ) ; continue } var u1 = u8Array [ idx + + ] & 63 ; if ( ( u0 & 224 ) = = 192 ) { str + = String . fromCharCode ( ( u0 & 31 ) < < 6 | u1 ) ; continue } var u2 = u8Array [ idx + + ] & 63 ; if ( ( u0 & 240 ) = = 224 ) { u0 = ( u0 & 15 ) < < 12 | u1 < < 6 | u2 } else { u0 = ( u0 & 7 ) < < 18 | u1 < < 12 | u2 < < 6 | u8Array [ idx + + ] & 63 } if ( u0 < 65536 ) { str + = String . fromCharCode ( u0 ) } else { var ch = u0 - 65536 ; str + = String . fromCharCode ( 55296 | ch > > 10 , 56320 | ch & 1023 ) } } } return str } function UTF8ToString ( ptr , maxBytesToRead ) { return ptr ? UTF8ArrayToString ( HEAPU8 , ptr , maxBytesToRead ) : " " } function stringToUTF8Array ( str , outU8Array , outIdx , maxBytesToWrite ) { if ( ! ( maxBytesToWrite > 0 ) ) return 0 ; var startIdx = outIdx ; var endIdx = outIdx + maxBytesToWrite - 1 ; for ( var i = 0 ; i < str . length ; + + i ) { var u = str . charCodeAt ( i ) ; if ( u > = 55296 & & u < = 57343 ) { var u1 = str . charCodeAt ( + + i ) ; u = 65536 + ( ( u & 1023 ) < < 10 ) | u1 & 1023 } if ( u < = 127 ) { if ( outIdx > = endIdx ) break ; outU8Array [ outIdx + + ] = u } else if ( u < = 2047 ) { if ( outIdx + 1 > = endIdx ) break ; outU8Array [ outIdx + + ] = 192 | u > > 6 ; outU8Array [ outIdx + + ] = 128 | u & 63 } else if ( u < = 65535 ) { if ( outIdx + 2 > = endIdx ) break ; outU8Array [ outIdx + + ] = 224 | u > > 12 ; outU8Array [ outIdx + + ] = 128 | u > > 6 & 63 ; outU8Array [ outIdx + + ] = 128 | u & 63 } else { if ( outIdx + 3 > = endIdx ) break ; outU8Array [ outIdx + + ] = 240 | u > > 18 ; outU8Array [ outIdx + + ] = 128 | u > > 12 & 63 ; outU8Array [ outIdx + + ] = 128 | u > > 6 & 63 ; outU8Array [ outIdx + + ] = 128 | u & 63 } } outU8Array [ outIdx ] = 0 ; return outIdx - startIdx } function stringToUTF8 ( str , outPtr , maxBytesToWrite ) { return stringToUTF8Array ( str , HEAPU8 , outPtr , maxBytesToWrite ) } function lengthBytesUTF8 ( str ) { var len = 0 ; for ( var i = 0 ; i < str . length ; + + i ) { var u = str . charCodeAt ( i ) ; if ( u > = 55296 & & u < = 57343 ) u = 65536 + ( ( u & 1023 ) < < 10 ) | str . charCodeAt ( + + i ) & 1023 ; if ( u < = 127 ) + + len ; else if ( u < = 2047 ) len + = 2 ; else if ( u < = 65535 ) len + = 3 ; else len + = 4 } return len } var UTF16Decoder = typeof TextDecoder ! = = " undefined " ? new TextDecoder ( " utf - 16le " ) : undefined ; function writeArrayToMemory ( array , buffer ) { HEAP8 . set ( array , buffer ) } function writeAsciiToMemory ( str , buffer , dontAddNull ) { for ( var i = 0 ; i < str . length ; + + i ) { HEAP8 [ buffer + + > > 0 ] = str . charCodeAt ( i ) } if ( ! dontAddNull ) HEAP8 [ buffer > > 0 ] = 0 } function demangle ( func ) { return func } function demangleAll ( text ) { var regex = / __Z [ \ w \ d_ ] + / g ; return text . replace ( regex , function ( x ) { var y = demangle ( x ) ; return x = = = y ? x : y + " [ " + x + " ] " } ) } function jsStackTrace ( ) { var err = new Error ; if ( ! err . stack ) { try { throw new Error ( 0 ) } catch ( e ) { err = e } if ( ! err . stack ) { return " ( no stack trace available ) " } } return err . stack . toString ( ) } function stackTrace ( ) { var js = jsStackTrace ( ) ; if ( Module [ " extraStackTrace " ] ) js + = " \ n " + Module [ " extraStackTrace " ] ( ) ; return demangleAll ( js ) } var PAGE_SIZE = 16384 ; var WASM_PAGE_SIZE = 65536 ; function alignUp ( x , multiple ) { if ( x % multiple > 0 ) { x + = multiple - x % multiple } return x } var buffer , HEAP8 , HEAPU8 , HEAP16 , HEAPU16 , HEAP32 , HEAPU32 , HEAPF32 , HEAPF64 ; function updateGlobalBuffer ( buf ) { Module [ " buffer " ] = buffer = buf } function updateGlobalBufferViews ( ) { Module [ " HEAP8 " ] = HEAP8 = new Int8Array ( buffer ) ; Module [ " HEAP16 " ] = HEAP16 = new Int16Array ( buffer ) ; Module [ " HEAP32 " ] = HEAP32 = new Int32Array ( buffer ) ; Module [ " HEAPU8 " ] = HEAPU8 = new Uint8Array ( buffer ) ; Module [ " HEAPU16 " ] = HEAPU16 = new Uint16Array ( buffer ) ; Module [ " HEAPU32 " ] = HEAPU32 = new Uint32Array ( buffer ) ; Module [ " HEAPF32 " ] = HEAPF32 = new Float32Array ( buffer ) ; Module [ " HEAPF64 " ] = HEAPF64 = new Float64Array ( buffer ) } var STACK_BASE = 530144 , DYNAMIC_BASE = 5773024 , DYNAMICTOP_PTR = 529888 ; var TOTAL_STACK = 5242880 ; var TOTAL_MEMORY = Module [ " TOTAL_MEMORY " ] | | 16777216 ; if ( TOTAL_MEMORY < TOTAL_STACK ) err ( " TOTAL_MEMORY should be larger than TOTAL_STACK , was " + TOTAL_MEMORY + " ! ( TOTAL_STACK = " + TOTAL_STACK + " ) " ) ; if ( Module [ " buffer " ] ) { buffer = Module [ " buffer " ] } else { if ( typeof WebAssembly = = = " object " & & typeof WebAssembly . Memory = = = " function " ) { wasmMemory = new WebAssembly . Memory ( { " initial " : TOTAL_MEMORY / WASM_PAGE_SIZE } ) ; buffer = wasmMemory . buffer } else { buffer = new ArrayBuffer ( TOTAL_MEMORY ) } Module [ " buffer " ] = buffer } updateGlobalBufferViews ( ) ; HEAP32 [ DYNAMICTOP_PTR > > 2 ] = DYNAMIC_BASE ; function callRuntimeCallbacks ( callbacks ) { while ( callbacks . length > 0 ) { var callback = callbacks . shift ( ) ; if ( typeof callback = = " function " ) { callback ( ) ; continue } var func = callback . func ; if ( typeof func = = = " number " ) { if ( callback . arg = = = undefined ) { Module [ " dynCall_v " ] ( func ) } else { Module [ " dynCall_vi " ] ( func , callback . arg ) } } else { func ( callback . arg = = = undefined ? null : callback . arg ) } } } var __ATPRERUN__ = [ ] ; var __ATINIT__ = [ ] ; var __ATMAIN__ = [ ] ; var __ATPOSTRUN__ = [ ] ; var runtimeInitialized = false ; var runtimeExited = false ; function preRun ( ) { if ( Module [ " preRun " ] ) { if ( typeof Module [ " preRun " ] = = " function " ) Module [ " preRun " ] = [ Module [ " preRun " ] ] ; while ( Module [ " preRun " ] . length ) { addOnPreRun ( Module [ " preRun " ] . shift ( ) ) } } callRuntimeCallbacks ( __ATPRERUN__ ) } function ensureInitRuntime ( ) { if ( runtimeInitialized ) return ; runtimeInitialized = true ; if ( ! Module [ " noFSInit " ] & & ! FS . init . initialized ) FS . init ( ) ; TTY . init ( ) ; callRuntimeCallbacks ( __ATINIT__ ) } function preMain ( ) { FS . ignorePermissions = false ; callRuntimeCallbacks ( __ATMAIN__ ) } function exitRuntime ( ) { runtimeExited = true } function postRun ( ) { if ( Module [ " postRun " ] ) { if ( typeof Module [ " postRun " ] = = " function " ) Module [ " postRun " ] = [ Module [ " postRun " ] ] ; while ( Module [ " postRun " ] . length ) { addOnPostRun ( Module [ " postRun " ] . shift ( ) ) } } callRuntimeCallbacks ( __ATPOSTRUN__ ) } function addOnPreRun ( cb ) { __ATPRERUN__ . unshift ( cb ) } function addOnPostRun ( cb ) { __ATPOSTRUN__ . unshift ( cb ) } var Math_abs = Math . abs ; var Math_ceil = Math . ceil ; var Math_floor = Math . floor ; var Math_min = Math . min ; var runDependencies = 0 ; var runDependencyWatcher = null ; var dependenciesFulfilled = null ; function getUniqueRunDependency ( id ) { return id } function addRunDependency ( id ) { runDependencies + + ; if ( Module [ " monitorRunDependencies " ] ) { Module [ " monitorRunDependencies " ] ( runDependencies ) } } function removeRunDependency ( id ) { runDependencies - - ; if ( Module [ " monitorRunDependencies " ] ) { Module [ " monitorRunDependencies " ] ( runDependencies ) } if ( runDependencies = = 0 ) { if ( runDependencyWatcher ! = = null ) { clearInterval ( runDependencyWatcher ) ; runDependencyWatcher = null } if ( dependenciesFulfilled ) { var callback = dependenciesFulfilled ; dependenciesFulfilled = null ; callback ( ) } } } Module [ " preloadedImages " ] = { } ; Module [ " preloadedAudios " ] = { } ; var dataURIPrefix = " data : application / octet - stream ; base64 , " ; function isDataURI ( filename ) { return String . prototype . startsWith ? filename . startsWith ( dataURIPrefix ) : filename . indexOf ( dataURIPrefix ) = = = 0 } var wasmBinaryFile = " filament . wasm " ; if ( ! isDataURI ( wasmBinaryFile ) ) { wasmBinaryFile = locateFile ( wasmBinaryFile ) } function getBinary ( ) { try { if ( Module [ " wasmBinary " ] ) { return new Uint8Array ( Module [ " wasmBinary " ] ) } if ( Module [ " readBinary " ] ) { return Module [ " readBinary " ] ( wasmBinaryFile ) } else { throw " both async and sync fetching of the wasm failed " } } catch ( err ) { abort ( err ) } } function getBinaryPromise ( ) { if ( ! Module [ " wasmBinary " ] & & ( ENVIRONMENT_IS_WEB | | ENVIRONMENT_IS_WORKER ) & & typeof fetch = = = " function " ) { return fetch ( wasmBinaryFile , { credentials : " same - origin " } ) . then ( function ( response ) { if ( ! response [ " ok " ] ) { throw " failed to load wasm binary file at ' " + wasmBinaryFile + " ' " } return response [ " arrayBuffer " ] ( ) } ) . catch ( function ( ) { return getBinary ( ) } ) } return new Promise ( function ( resolve , reject ) { resolve ( getBinary ( ) ) } ) } function createWasm ( env ) { var info = { " env " : env , " global " : { " NaN " : NaN , Infinity : Infinity } , " global . Math " : Math , " asm2wasm " : asm2wasmImports } ; function receiveInstance ( instance , module ) { var exports = instance . exports ; Module [ " asm " ] = exports ; removeRunDependency ( " wasm - instantiate " ) } addRunDependency ( " wasm - instantiate " ) ; if ( Module [ " instantiateWasm " ] ) { try { return Module [ " instantiateWasm " ] ( info , receiveInstance ) } catch ( e ) { err ( " Module . instantiateWasm callback failed with error : " + e ) ; return false } } function receiveInstantiatedSource ( output ) { receiveInstance ( output [ " instance " ] ) } function instantiateArrayBuffer ( receiver ) { getBinaryPromise ( ) . then ( function ( binary ) { return WebAssembly . instantiate ( binary , info ) } ) . then ( receiver , function ( reason ) { err ( " failed to asynchronously prepare wasm : " + reason ) ; abort ( reason ) } ) } if ( ! Module [ " wasmBinary " ] & & typeof WebAssembly . instantiateStreaming = = = " function " & & ! isDataURI ( wasmBinaryFile ) & & typeof fetch = = = " function " ) { WebAssembly . instantiateStreaming ( fetch ( wasmBinaryFile , { credentials : " same - origin " } ) , info ) . then ( receiveInstantiatedSource , function ( reason ) { err ( " wasm streaming compile failed : " + reason ) ; err ( " falling back to ArrayBuffer instantiation " ) ; instantiateArrayBuffer ( receiveInstantiatedSource ) } ) } else { instantiateArrayBuffer ( receiveInstantiatedSource ) } return { } } Module [ " asm " ] = function ( global , env , providedBuffer ) { env [ " memory " ] = wasmMemory ; env [ " table " ] = wasmTable = new WebAssembly . Table ( { " initial " : 2128 , " maximum " : 2128 , " element " : " anyfunc " } ) ; env [ " __memory_base " ] = 1024 ; env [ " __table_base " ] = 0 ; var exports = createWasm ( env ) ; return exports } ; __ATINIT__ . push ( { func : function ( ) { globalCtors ( ) } } ) ; var tempDoublePtr = 530128 ; function ___atomic_compare_exchange_8 ( ptr , expected , desiredl , desiredh , weak , success_memmodel , failure_memmodel ) { var pl = HEAP32 [ ptr > > 2 ] ; var ph = HEAP32 [ ptr + 4 > > 2 ] ; var el = HEAP32 [ expected > > 2 ] ; var eh = HEAP32 [ expected + 4 > > 2 ] ; if ( pl = = = el & & ph = = = eh ) { HEAP32 [ ptr > > 2 ] = desiredl ; HEAP32 [ ptr + 4 > > 2 ] = desiredh ; return 1 } else { HEAP32 [ expected > > 2 ] = pl ; HEAP32 [ expected + 4 > > 2 ] = ph ; return 0 } } function ___atomic_fetch_sub_8 ( ptr , vall , valh , memmodel ) { var l = HEAP32 [ ptr > > 2 ] ; var h = HEAP32 [ ptr + 4 > > 2 ] ; HEAP32 [ ptr > > 2 ] = _i64Subtract ( l , h , vall , valh ) ; HEAP32 [ ptr + 4 > > 2 ] = getTempRet0 ( ) ; return ( setTempRet0 ( h ) , l ) | 0 } function __ZSt18uncaught_exceptionv ( ) { return ! ! __ZSt18uncaught_exceptionv . uncaught_exception } function ___cxa_free_exception ( ptr ) { try { return _free ( ptr ) } catch ( e ) { } } var EXCEPTIONS = { last : 0 , caught : [ ] , infos : { } , deAdjust : function ( adjusted ) { if ( ! adjusted | | EXCEPTIONS . infos [ adjusted ] ) return adjusted ; for ( var key in EXCEPTIONS . infos ) { var ptr = + key ; var adj = EXCEPTIONS . infos [ ptr ] . adjusted ; var len = adj . length ; for ( var i = 0 ; i < len ; i + + ) { if ( adj [ i ] = = = adjusted ) { return ptr } } } return adjusted } , addRef : function ( ptr ) { if ( ! ptr ) return ; var info = EXCEPTIONS . infos [ ptr ] ; info . refcount + + } , decRef : function ( ptr ) { if ( ! ptr ) return ; var info = EXCEPTIONS . infos [ ptr ] ; assert ( info . refcount > 0 ) ; info . refcount - - ; if ( info . refcount = = = 0 & & ! info . rethrown ) { if ( info . destructor ) { Module [ " dynCall_vi " ] ( info . destructor , ptr ) } delete EXCEPTIONS . infos [ ptr ] ; ___cxa_free_exception ( ptr ) } } , clearRef : function ( ptr ) { if ( ! ptr ) return ; var info = EXCEPTIONS . infos [ ptr ] ; info . refcount = 0 } } ; function ___cxa_begin_catch ( ptr ) { var info = EXCEPTIONS . infos [ ptr ] ; if ( info & & ! info . caught ) { info . caught = true ; __ZSt18uncaught_exceptionv . uncaught_exception - - } if ( info ) info . rethrown = false ; EXCEPTIONS . caught . push ( ptr ) ; EXCEPTIONS . addRef ( EXCEPTIONS . deAdjust ( ptr ) ) ; return ptr } function ___cxa_pure_virtual ( ) { ABORT = true ; throw " Pure virtual function called ! " } function ___resumeException ( ptr ) { if ( ! EXCEPTIONS . last ) { EXCEPTIONS . last = ptr } throw ptr } function ___cxa_find_matching_catch ( ) { var thrown = EXCEPTIONS . last ; if ( ! thrown ) { return ( setTempRet0 ( 0 ) , 0 ) | 0 } var info = EXCEPTIONS . infos [ thrown ] ; var throwntype = info . type ; if ( ! throwntype ) { return ( setTempRet0 ( 0 ) , thrown ) | 0 } var typeArray = Array . prototype . slice . call ( arguments ) ; var pointer = Module [ " ___cxa_is_pointer_type " ] ( throwntype ) ; if ( ! ___cxa_find_matching_catch . buffer ) ___cxa_find_matching_catch . buffer = _malloc ( 4 ) ; HEAP32 [ ___cxa_find_matching_catch . buffer > > 2 ] = thrown ; thrown = ___cxa_find_matching_catch . buffer ; for ( var i = 0 ; i < typeArray . length ; i + + ) { if ( typeArray [ i ] & & Module [ " ___cxa_can_catch " ] ( typeArray [ i ] , throwntype , thrown ) ) { thrown = HEAP32 [ thrown > > 2 ] ; info . adjusted . push ( thrown ) ; return ( setTempRet0 ( typeArray [ i ] ) , thrown ) | 0 } } thrown = HEAP32 [ thrown > > 2 ] ; return ( setTempRet0 ( throwntype ) , thrown ) | 0 } function ___gxx_personality_v0 ( ) { } function ___setErrNo ( value ) { if ( Module [ " ___errno_location " ] ) HEAP32 [ Module [ " ___errno_location " ] ( ) > > 2 ] = value ; return value } var PATH = { splitPath : function ( filename ) { var splitPathRe = / ^ ( \ / ? | ) ( [ \ s \ S ] * ? ) ( ( ? : \ . { 1 , 2 } | [ ^ \ / ] + ? | ) ( \ . [ ^ . \ / ] * | ) ) ( ? : [ \ / ] * ) $ / ; return splitPathRe . exec ( filename ) . slice ( 1 ) } , normalizeArray : function ( parts , allowAboveRoot ) { var up = 0 ; for ( var i = parts . length - 1 ; i > = 0 ; i - - ) { var last = parts [ i ] ; if ( last = = = " . " ) { parts . splice ( i , 1 ) } else if ( last = = = " . . " ) { parts . splice ( i , 1 ) ; up + + } else if ( up ) { parts . splice ( i , 1 ) ; up - - } } if ( allowAboveRoot ) { for ( ; up ; up - - ) { parts . unshift ( " . . " ) } } return parts } , normalize : function ( path ) { var isAbsolute = path . charAt ( 0 ) = = = " / " , trailingSlash = path . substr ( - 1 ) = = = " / " ; path = PATH . normalizeArray ( path . split ( " / " ) . filter ( function ( p ) { return ! ! p } ) , ! isAbsolute ) . join ( " / " ) ; if ( ! path & & ! isAbsolute ) { path = " . " } if ( path & & trailingSlash ) { path + = " / " } return ( isAbsolute ? " / " : " " ) + path } , dirname : function ( path ) { var result = PATH . splitPath ( path ) , root = result [ 0 ] , dir = result [ 1 ] ; if ( ! root & & ! dir ) { return " . " } if ( dir ) { dir = dir . substr ( 0 , dir . length - 1 ) } return root + dir } , basename : function ( path ) { if ( path = = = " / " ) return " / " ; var lastSlash = path . lastIndexOf ( " / " ) ; if ( lastSlash = = = - 1 ) return path ; return path . substr ( lastSlash + 1 ) } , extname : function ( path ) { return PATH . splitPath ( path ) [ 3 ] } , join : function ( ) { var paths = Array . prototype . slice . call ( arguments , 0 ) ; return PATH . normalize ( paths . join ( " / " ) ) } , join2 : function ( l , r ) { return PATH . normalize ( l + " / " + r ) } , resolve : function ( ) { var resolvedPath = " " , resolvedAbsolute = false ; for ( var i = arguments . length - 1 ; i > = - 1 & & ! resolvedAbsolute ; i - - ) { var path = i > = 0 ? arguments [ i ] : FS . cwd ( ) ; if ( typeof path ! = = " string " ) { throw new TypeError ( " Arguments to path . resolve must be strings " ) } else if ( ! path ) { return " " } resolvedPath = path + " / " + resolvedPath ; resolvedAbsolute = path . charAt ( 0 ) = = = " / " } resolvedPath = PATH . normalizeArray ( resolvedPath . split ( " / " ) . filter ( function ( p ) { return ! ! p } ) , ! resolvedAbsolute ) . join ( " / " ) ; return ( resolvedAbsolute ? " / " : " " ) + resolvedPath | | " . " } , relative : function ( from , to ) { from = PATH . resolve ( from ) . substr ( 1 ) ; to = PATH . resolve ( to ) . substr ( 1 ) ; function trim ( arr ) { var start = 0 ; for ( ; start < arr . length ; start + + ) { if ( arr [ start ] ! = = " " ) break } var end = arr . length - 1 ; for ( ; end > = 0 ; end - - ) { if ( arr [ end ] ! = = " " ) break } if ( start > end ) return [ ] ; return arr . slice ( start , end - start + 1 ) } var fromParts = trim ( from . split ( " / " ) ) ; var toParts = trim ( to . split ( " / " ) ) ; var length = Math . min ( fromParts . length , toParts . length ) ; var samePartsLength = length ; for ( var i = 0 ; i < length ; i + + ) { if ( fromParts [ i ] ! = = toParts [ i ] ) { samePartsLength = i ; break } } var outputParts = [ ] ; for ( var i = samePartsLength ; i < fromParts . length ; i + + ) { outputParts . push ( " . . " ) } outputParts = outputParts . concat ( toParts . slice ( samePartsLength ) ) ; return outputParts . join ( " / " ) } } ; var TTY = { ttys : [ ] , init : function ( ) { } , shutdown : function ( ) { } , register : function ( dev , ops ) { TTY . ttys [ dev ] = { input : [ ] , output : [ ] , ops : ops } ; FS . registerDevice ( dev , TTY . stream_ops ) } , stream_ops : { open : function ( stream ) { var tty = TTY . ttys [ stream . node . rdev ] ; if ( ! tty ) { throw new FS . ErrnoError ( ERRNO_CODES . ENODEV ) } stream . tty = tty ; stream . seekable = false } , close : function ( stream ) { stream . tty . ops . flush ( stream . tty ) } , flush : function ( stream ) { stream . tty . ops . flush ( stream . tty ) } , read : function ( stream , buffer , offset , length , pos ) { if ( ! stream . tty | | ! stream . tty . ops . get_char ) { throw new FS . ErrnoError ( ERRNO_CODES . ENXIO ) } var bytesRead = 0 ; for ( var i = 0 ; i < length ; i + + ) { var result ; try { result = stream . tty . ops . get_char ( stream . tty ) } catch ( e ) { throw new FS . ErrnoError ( ERRNO_CODES . EIO ) } if ( result = = = undefined & & bytesRead = = = 0 ) { throw new FS . ErrnoError ( ERRNO_CODES . EAGAIN ) } if ( result = = = null | | result = = = undefined ) break ; bytesRead + + ; buffer [ offset + i ] = result } if ( bytesRead ) { stream . node . timestamp = Date . now ( ) } return bytesRead } , write : function ( stream , buffer , offset , length , pos ) { if ( ! stream . tty | | ! stream . tty . ops . put_char ) { throw new FS . ErrnoError ( ERRNO_CODES . ENXIO ) } try { for ( var i = 0 ; i < length ; i + + ) { stream . tty . ops . put_char ( stream . tty , buffer [ offset + i ] ) } } catch ( e ) { throw new FS . ErrnoError ( ERRNO_CODES . EIO ) } if ( length ) { stream . node . timestamp = Date . now ( ) } return i } } , default_tty_ops : { get_char : function ( tty ) { if ( ! tty . input . length ) { var result = null ; if ( ENVIRONMENT_IS_NODE ) { var BUFSIZE = 256 ; var buf = new Buffer ( BUFSIZE ) ; var bytesRead = 0 ; var isPosixPlatform = process . platform ! = " win32 " ; var fd = process . stdin . fd ; if ( isPosixPlatform ) { var usingDevice = false ; try { fd = fs . openSync ( " / dev / stdin " , " r " ) ; usingDevice = true } catch ( e ) { } } try { bytesRead = fs . readSync ( fd , buf , 0 , BUFSIZE , null ) } catch ( e ) { if ( e . toString ( ) . indexOf ( " EOF " ) ! = - 1 ) bytesRead = 0 ; else throw e } if ( usingDevice ) { fs . closeSync ( fd ) } if ( bytesRead > 0 ) { result = buf . slice ( 0 , bytesRead ) . toString ( " utf - 8 " ) } else { result = null } } else if ( typeof window ! = " undefined " & & typeof window . prompt = = " function " ) { result = window . prompt ( " Input : " ) ; if ( result ! = = null ) { result + = " \ n " } } else if ( typeof readline = = " function " ) { result = readline ( ) ; if ( result ! = = null ) { result + = " \ n " } } if ( ! result ) { return null } tty . input = intArrayFromString ( result , true ) } return tty . input . shift ( ) } , put_char : function ( tty , val ) { if ( val = = = null | | val = = = 10 ) { out ( UTF8ArrayToString ( tty . output , 0 ) ) ; tty . output = [ ] } else { if ( val ! = 0 ) tty . output . push ( val ) } } , flush : function ( tty ) { if ( tty . output & & tty . output . length > 0 ) { out ( UTF8ArrayToString ( tty . output , 0 ) ) ; tty . output = [ ] } } } , default_tty1_ops : { put_char : function ( tty , val ) { if ( val = = = null | | val = = = 10 ) { err ( UTF8ArrayToString ( tty . output , 0 ) ) ; tty . output = [ ] } else { if ( val ! = 0 ) tty . output . push ( val ) } } , flush : function ( tty ) { if ( tty . output & & tty . output . length > 0 ) { err ( UTF8ArrayToString ( tty . output , 0 ) ) ; tty . output = [ ] } } } } ; var MEMFS = { ops_table : null , mount : function ( mount ) { return MEMFS . createNode ( null , " / " , 16384 | 511 , 0 ) } , createNode : function ( parent , name , mode , dev ) { if ( FS . isBlkdev ( mode ) | | FS . isFIFO ( mode ) ) { throw new FS . ErrnoError ( ERRNO_CODES . EPERM ) } if ( ! MEMFS . ops_table ) { MEMFS . ops_table = { dir : { node : { getattr : MEMFS . node_ops . getattr , setattr : MEMFS . node_ops . setattr , lookup : MEMFS . node_ops . lookup , mknod : MEMFS . node_ops . mknod , rename : MEMFS . node_ops . rename , unlink : MEMFS . node_ops . unlink , rmdir : MEMFS . node_ops . rmdir , readdir : MEMFS . node_ops . readdir , symlink : MEMFS . node_ops . symlink } , stream : { llseek : MEMFS . stream_ops . llseek } } , file : { node : { getattr : MEMFS . node_ops . getattr , setattr : MEMFS . node_ops . setattr } , stream : { llseek : MEMFS . stream_ops . llseek , read : MEMFS . stream_ops . read , write : MEMFS . stream_ops . write , allocate : MEMFS . stream_ops . allocate , mmap : MEMFS . stream_ops . mmap , msync : MEMFS . stream_ops . msync } } , link : { node : { getattr : MEMFS . node_ops . getattr , setattr : MEMFS . node_ops . setattr , readlink : MEMFS . node_ops . readlink } , stream : { } } , chrdev : { node : { getattr : MEMFS . node_ops . getattr , setattr : MEMFS . node_ops . setattr } , stream : FS . chrdev_stream_ops } } } var node = FS . createNode ( parent , name , mode , dev ) ; if ( FS . isDir ( node . mode ) ) { node . node_ops = MEMFS . ops_table . dir . node ; node . stream_ops = MEMFS . ops_table . dir . stream ; node . contents = { } } else if ( FS . isFile ( node . mode ) ) { node . node_ops = MEMFS . ops_table . file . node ; node . stream_ops = MEMFS . ops_table . file . stream ; node . usedBytes = 0 ; node . contents = null } else if ( FS . isLink ( node . mode ) ) { node . node_ops = MEMFS . ops_table . link . node ; node . stream_ops = MEMFS . ops_table . link . stream } else if ( FS . isChrdev ( node . mode ) ) { node . node_ops = MEMFS . ops_table . chrdev . node ; node . stream_ops = MEMFS . ops_table . chrdev . stream } node . timestamp = Date . now ( ) ; if ( parent ) { parent . contents [ name ] = node } return node } , getFileDataAsRegularArray : function ( node ) { if ( node . contents & & node . contents . subarray ) { var arr = [ ] ; for ( var i = 0 ; i < node . usedBytes ; + + i ) arr . push ( node . contents [ i ] ) ; return arr } return node . contents } , getFileDataAsTypedArray : function ( node ) { if ( ! node . contents ) return new Uint8Array ; if ( node . contents . subarray ) return node . contents . subarray ( 0 , node . usedBytes ) ; return new Uint8Array ( node . contents ) } , expandFileStorage : function ( node , newCapacity ) { var prevCapacity = node . contents ? node . contents . length : 0 ; if ( prevCapacity > = newCapacity ) return ; var CAPACITY_DOUBLING_MAX = 1024 * 1024 ; newCapacity = Math . max ( newCapacity , prevCapacity * ( prevCapacity < CAPACITY_DOUBLING_MAX ? 2 : 1 . 125 ) | 0 ) ; if ( prevCapacity ! = 0 ) newCapacity = Math . max ( newCapacity , 256 ) ; var oldContents = node . contents ; node . contents = new Uint8Array ( newCapacity ) ; if ( node . usedBytes > 0 ) node . contents . set ( oldContents . subarray ( 0 , node . usedBytes ) , 0 ) ; return } , resizeFileStorage : function ( node , newSize ) { if ( node . usedBytes = = newSize ) return ; if ( newSize = = 0 ) { node . contents = null ; node . usedBytes = 0 ; return } if ( ! node . contents | | node . contents . subarray ) { var oldContents = node . contents ; node . contents = new Uint8Array ( new ArrayBuffer ( newSize ) ) ; if ( oldContents ) { node . contents . set ( oldContents . subarray ( 0 , Math . min ( newSize , node . usedBytes ) ) ) } node . usedBytes = newSize ; return } if ( ! node . contents ) node . contents = [ ] ; if ( node . contents . length > newSize ) node . contents . length = newSize ; else while ( node . contents . length < newSize ) node . contents . push ( 0 ) ; node . usedBytes = newSize } , node_ops : { getattr : function ( node ) { var attr = { } ; attr . dev = FS . isChrdev ( node . mode ) ? node . id : 1 ; attr . ino = node . id ; attr . mode = node . mode ; attr . nlink = 1 ; attr . uid = 0 ; attr . gid = 0 ; attr . rdev = node . rdev ; if ( FS . isDir ( node . mode ) ) { attr . size = 4096 } else if ( FS . isFile ( node . mode ) ) { attr . size = node . usedBytes } else if ( FS . isLink ( node . mode ) ) { attr . size = node . link . length } else { attr . size = 0 } attr . atime = new Date ( node . timestamp ) ; attr . mtime = new Date ( node . timestamp ) ; attr . ctime = new Date ( node . timestamp ) ; attr . blksize = 4096 ; attr . blocks = Math . ceil ( attr . size / attr . blksize ) ; return attr } , setattr : function ( node , attr ) { if ( attr . mode ! = = undefined ) { node . mode = attr . mode } if ( attr . timestamp ! = = undefined ) { node . timestamp = attr . timestamp } if ( attr . size ! = = undefined ) { MEMFS . resizeFileStorage ( node , attr . size ) } } , lookup : function ( parent , name ) { throw FS . genericErrors [ ERRNO_CODES . ENOENT ] } , mknod : function ( parent , name , mode , dev ) { return MEMFS . createNode ( parent , name , mode , dev ) } , rename : function ( old_node , new_dir , new_name ) { if ( FS . isDir ( old_node . mode ) ) { var new_node ; try { new_node = FS . lookupNode ( new_dir , new_name ) } catch ( e ) { } if ( new_node ) { for ( var i in new_node . contents ) { throw new FS . ErrnoError ( ERRNO_CODES . ENOTEMPTY ) } } } delete old_node . parent . contents [ old_node . name ] ; old_node . name = new_name ; new_dir . contents [ new_name ] = old_node ; old_node . parent = new_dir } , unlink : function ( parent , name ) { delete parent . contents [ name ] } , rmdir : function ( parent , name ) { var node = FS . lookupNode ( parent , name ) ; for ( var i in node . contents ) { throw new FS . ErrnoError ( ERRNO_CODES . ENOTEMPTY ) } delete parent . contents [ name ] } , readdir : function ( node ) { var entries = [ " . " , " . . " ] ; for ( var key in node . contents ) { if ( ! node . contents . hasOwnProperty ( key ) ) { continue } entries . push ( key ) } return entries } , symlink : function ( parent , newname , oldpath ) { var node = MEMFS . createNode ( parent , newname , 511 | 40960 , 0 ) ; node . link = oldpath ; return node } , readlink : function ( node ) { if ( ! FS . isLink ( node . mode ) ) { throw new FS . ErrnoError ( ERRNO_CODES . EINVAL ) } return node . link } } , stream_ops : { read : function ( stream , buffer , offset , length , position ) { var contents = stream . node . contents ; if ( position > = stream . node . usedBytes ) return 0 ; var size = Math . min ( stream . node . usedBytes - position , length ) ; if ( size > 8 & & contents . subarray ) { buffer . set ( contents . subarray ( position , position + size ) , offset ) } else { for ( var i = 0 ; i < size ; i + + ) buffer [ offset + i ] = contents [ position + i ] } return size } , write : function ( stream , buffer , offset , length , position , canOwn ) { canOwn = false ; if ( ! length ) return 0 ; var node = stream . node ; node . timestamp = Date . now ( ) ; if ( buffer . subarray & & ( ! node . contents | | node . contents . subarray ) ) { if ( canOwn ) { node . contents = buffer . subarray ( offset , offset + length ) ; node . usedBytes = length ; return length } else if ( node . usedBytes = = = 0 & & position = = = 0 ) { node . contents = new Uint8Array ( buffer . subarray ( offset , offset + length ) ) ; node . usedBytes = length ; return length } else if ( position + length < = node . usedBytes ) { node . contents . set ( buffer . subarray ( offset , offset + length ) , position ) ; return length } } MEMFS . expandFileStorage ( node , position + length ) ; if ( node . contents . subarray & & buffer . subarray ) node . contents . set ( buffer . subarray ( offset , offset + length ) , position ) ; else { for ( var i = 0 ; i < length ; i + + ) { node . contents [ position + i ] = buffer [ offset + i ] } } node . usedBytes = Math . max ( node . usedBytes , position + length ) ; return length } , llseek : function ( stream , offset , whence ) { var position = offset ; if ( whence = = = 1 ) { position + = stream . position } else if ( whence = = = 2 ) { if ( FS . isFile ( stream . node . mode ) ) { position + = stream . node . usedBytes } } if ( position < 0 ) { throw new FS . ErrnoError ( ERRNO_CODES . EINVAL ) } return position } , allocate : function ( stream , offset , length ) { MEMFS . expandFileStorage ( stream . node , offset + length ) ; stream . node . usedBytes = Math . max ( stream . node . usedBytes , offset + length ) } , mmap : function ( stream , buffer , offset , length , position , prot , flags ) { if ( ! FS . isFile ( stream . node . mode ) ) { throw new FS . ErrnoError ( ERRNO_CODES . ENODEV ) } var ptr ; var allocated ; var contents = stream . node . contents ; if ( ! ( flags & 2 ) & & ( contents . buffer = = = buffer | | contents . buffer = = = buffer . buffer ) ) { allocated = false ; ptr = contents . byteOffset } else { if ( position > 0 | | position + length < stream . node . usedBytes ) { if ( contents . subarray ) { contents = contents . subarray ( position , position + length ) } else { contents = Array . prototype . slice . call ( contents , position , position + length ) } } allocated = true ; ptr = _malloc ( length ) ; if ( ! ptr ) { throw new FS . ErrnoError ( ERRNO_CODES . ENOMEM ) } buffer . set ( contents , ptr ) } return { ptr : ptr , allocated : allocated } } , msync : function ( stream , buffer , offset , length , mmapFlags ) { if ( ! FS . isFile ( stream . node . mode ) ) { throw new FS . ErrnoError ( ERRNO_CODES . ENODEV ) } if ( mmapFlags & 2 ) { return 0 } var bytesWritten = MEMFS . stream_ops . write ( stream , buffer , 0 , length , offset , false ) ; return 0 } } } ; var IDBFS = { dbs : { } , indexedDB : function ( ) { if ( typeof indexedDB ! = = " undefined " ) return indexedDB ; var ret = null ; if ( typeof window = = = " object " ) ret = window . indexedDB | | window . mozIndexedDB | | window . webkitIndexedDB | | window . msIndexedDB ; assert ( ret , " IDBFS used , but indexedDB not supported " ) ; return ret } , DB_VERSION : 21 , DB_STORE_NAME : " FILE_DATA " , mount : function ( mount ) { return MEMFS . mount . apply ( null , arguments ) } , syncfs : function ( mount , populate , callback ) { IDBFS . getLocalSet ( mount , function ( err , local ) { if ( err ) return callback ( err ) ; IDBFS . getRemoteSet ( mount , function ( err , remote ) { if ( err ) return callback ( err ) ; var src = populate ? remote : local ; var dst = populate ? local : remote ; IDBFS . reconcile ( src , dst , callback ) } ) } ) } , getDB : function ( name , callback ) { var db = IDBFS . dbs [ name ] ; if ( db ) { return callback ( null , db ) } var req ; try { req = IDBFS . indexedDB ( ) . open ( name , IDBFS . DB_VERSION ) } catch ( e ) { return callback ( e ) } if ( ! req ) { return callback ( " Unable to connect to IndexedDB " ) } req . onupgradeneeded = function ( e ) { var db = e . target . result ; var transaction = e . target . transaction ; var fileStore ; if ( db . objectStoreNames . contains ( IDBFS . DB_STORE_NAME ) ) { fileStore = transaction . objectStore ( IDBFS . DB_STORE_NAME ) } else { fileStore = db . createObjectStore ( IDBFS . DB_STORE_NAME ) } if ( ! fileStore . indexNames . contains ( " timestamp " ) ) { fileStore . createIndex ( " timestamp " , " timestamp " , { unique : false } ) } } ; req . onsuccess = function ( ) { db = req . result ; IDBFS . dbs [ name ] = db ; callback ( null , db ) } ; req . onerror = function ( e ) { callback ( this . error ) ; e . preventDefault ( ) } } , getLocalSet : function ( mount , callback ) { var entries = { } ; function isRealDir ( p ) { return p ! = = " . " & & p ! = = " . . " } function toAbsolute ( root ) { return function ( p ) { return PATH . join2 ( root , p ) } } var check = FS . readdir ( mount . mountpoint ) . filter ( isRealDir ) . map ( toAbsolute ( mount . mountpoint ) ) ; while ( check . length ) { var path = check . pop ( ) ; var stat ; try { stat = FS . stat ( path ) } catch ( e ) { return callback ( e ) } if ( FS . isDir ( stat . mode ) ) { check . push . apply ( check , FS . readdir ( path ) . filter ( isRealDir ) . map ( toAbsolute ( path ) ) ) } entries [ path ] = { timestamp : stat . mtime } } return callback ( null , { type : " local " , entries : entries } ) } , getRemoteSet : function ( mount , callback ) { var entries = { } ; IDBFS . getDB ( mount . mountpoint , function ( err , db ) { if ( err ) return callback ( err ) ; try { var transaction = db . transaction ( [ IDBFS . DB_STORE_NAME ] , " readonly " ) ; transaction . onerror = function ( e ) { callback ( this . error ) ; e . preventDefault ( ) } ; var store = transaction . objectStore ( IDBFS . DB_STORE_NAME ) ; var index = store . index ( " timestamp " ) ; index . openKeyCursor ( ) . onsuccess = function ( event ) { var cursor = event . target . result ; if ( ! cursor ) { return callback ( null , { type : " remote " , db : db , entries : entries } ) } entries [ cursor . primaryKey ] = { timestamp : cursor . key } ; cursor . continue ( ) } } catch ( e ) { return callback ( e ) } } ) } , loadLocalEntry : function ( path , callback ) { var stat , node ; try { var lookup = FS . lookupPath ( path ) ; node = lookup . node ; stat = FS . stat ( path ) } catch ( e ) { return callback ( e ) } if ( FS . isDir ( stat . mode ) ) { return callback ( null , { timestamp : stat . mtime , mode : stat . mode } ) } else if ( FS . isFile ( stat . mode ) ) { node . contents = MEMFS . getFileDataAsTypedArray ( node ) ; return callback ( null , { timestamp : stat . mtime , mode : stat . mode , contents : node . contents } ) } else { return callback ( new Error ( " node type not supported " ) ) } } , storeLocalEntry : function ( path , entry , callback ) { try { if ( FS . isDir ( entry . mode ) ) { FS . mkdir ( path , entry . mode ) } else if ( FS . isFile ( entry . mode ) ) { FS . writeFile ( path , entry . contents , { canOwn : true } ) } else { return callback ( new Error ( " node type not supported " ) ) } FS . chmod ( path , entry . mode ) ; FS . utime ( path , entry . timestamp , entry . timestamp ) } catch ( e ) { return callback ( e ) } callback ( null ) } , removeLocalEntry : function ( path , callback ) { try { var lookup = FS . lookupPath ( path ) ; var stat = FS . stat ( path ) ; if ( FS . isDir ( stat . mode ) ) { FS . rmdir ( path ) } else if ( FS . isFile ( stat . mode ) ) { FS . unlink ( path ) } } catch ( e ) { return callback ( e ) } callback ( null ) } , loadRemoteEntry : function ( store , path , callback ) { var req = store . get ( path ) ; req . onsuccess = function ( event ) { callback ( null , event . target . result ) } ; req . onerror = function ( e ) { callback ( this . error ) ; e . preventDefault ( ) } } , storeRemoteEntry : function ( store , path , entry , callback ) { var req = store . put ( entry , path ) ; req . onsuccess = function ( ) { callback ( null ) } ; req . onerror = function ( e ) { callback ( this . error ) ; e . preventDefault ( ) } } , removeRemoteEntry : function ( store , path , callback ) { var req = store . delete ( path ) ; req . onsuccess = function ( ) { callback ( null ) } ; req . onerror = function ( e ) { callback ( this . error ) ; e . preventDefault ( ) } } , reconcile : function ( src , dst , callback ) { var total = 0 ; var create = [ ] ; Object . keys ( src . entries ) . forEach ( function ( key ) { var e = src . entries [ key ] ; var e2 = dst . entries [ key ] ; if ( ! e2 | | e . timestamp > e2 . timestamp ) { create . push ( key ) ; total + + } } ) ; var remove = [ ] ; Object . keys ( dst . entries ) . forEach ( function ( key ) { var e = dst . entries [ key ] ; var e2 = src . entries [ key ] ; if ( ! e2 ) { remove . push ( key ) ; total + + } } ) ; if ( ! total ) { return callback ( null ) } var errored = false ; var completed = 0 ; var db = src . type = = = " remote " ? src . db : dst . db ; var transaction = db . transaction ( [ IDBFS . DB_STORE_NAME ] , " readwrite " ) ; var store = transaction . objectStore ( IDBFS . DB_STORE_NAME ) ; function done ( err ) { if ( err ) { if ( ! done . errored ) { done . errored = true ; return callback ( err ) } return } if ( + + completed > = total ) { return callback ( null ) } } transaction . onerror = function ( e ) { done ( this . error ) ; e . preventDefault ( ) } ; create . sort ( ) . forEach ( function ( path ) { if ( dst . type = = = " local " ) { IDBFS . loadRemoteEntry ( store , path , function ( err , entry ) { if ( err ) return done ( err ) ; IDBFS . storeLocalEntry ( path , entry , done ) } ) } else { IDBFS . loadLocalEntry ( path , function ( err , entry ) { if ( err ) return done ( err ) ; IDBFS . storeRemoteEntry ( store , path , entry , done ) } ) } } ) ; remove . sort ( ) . reverse ( ) . forEach ( function ( path ) { if ( dst . type = = = " local " ) { IDBFS . removeLocalEntry ( path , done ) } else { IDBFS . removeRemoteEntry ( store , path , done ) } } ) } } ; var NODEFS = { isWindows : false , staticInit : function ( ) { NODEFS . isWindows = ! ! process . platform . match ( / ^ win / ) ; var flags = process [ " binding " ] ( " constants " ) ; if ( flags [ " fs " ] ) { flags = flags [ " fs " ] } NODEFS . flagsForNodeMap = { 1024 : flags [ " O_APPEND " ] , 64 : flags [ " O_CREAT " ] , 128 : flags [ " O_EXCL " ] , 0 : flags [ " O_RDONLY " ] , 2 : flags [ " O_RDWR " ] , 4096 : flags [ " O_SYNC " ] , 512 : flags [ " O_TRUNC " ] , 1 : flags [ " O_WRONLY " ] } } , bufferFrom : function ( arrayBuffer ) { return Buffer . alloc ? Buffer . from ( arrayBuffer ) : new Buffer ( arrayBuffer ) } , mount : function ( mount ) { assert ( ENVIRONMENT_IS_NODE ) ; return NODEFS . createNode ( null , " / " , NODEFS . getMode ( mount . opts . root ) , 0 ) } , createNode : function ( parent , name , mode , dev ) { if ( ! FS . isDir ( mode ) & & ! FS . isFile ( mode ) & & ! FS . isLink ( mode ) ) { throw new FS . ErrnoError ( ERRNO_CODES . EINVAL ) } var node = FS . createNode ( parent , name , mode ) ; node . node_ops = NODEFS . node_ops ; node . stream_ops = NODEFS . stream_ops ; return node } , getMode : function ( path ) { var stat ; try { stat = fs . lstatSync ( path ) ; if ( NODEFS . isWindows ) { stat . mode = stat . mode | ( stat . mode & 292 ) > > 2 } } catch ( e ) { if ( ! e . code ) throw e ; throw new FS . ErrnoError ( ERRNO_CODES [ e . code ] ) } return stat . mode } , realPath : function ( node ) { var parts = [ ] ; while ( node . parent ! = = node ) { parts . push ( node . name ) ; node = node . parent } parts . push ( node . mount . opts . root ) ; parts . reverse ( ) ; return PATH . join . apply ( null , parts ) } , flagsForNode : function ( flags ) { flags & = ~ 2097152 ; flags & = ~ 2048 ; flags & = ~ 32768 ; flags & = ~ 524288 ; var newFlags = 0 ; for ( var k in NODEFS . flagsForNodeMap ) { if ( flags & k ) { newFlags | = NODEFS . flagsForNodeMap [ k ] ; flags ^ = k } } if ( ! flags ) { return newFlags } else { throw new FS . ErrnoError ( ERRNO_CODES . EINVAL ) } } , node_ops : { getattr : function ( node ) { var path = NODEFS . realPath ( node ) ; var stat ; try { stat = fs . lstatSync ( path ) } catch ( e ) { if ( ! e . code ) throw e ; throw new FS . ErrnoError ( ERRNO_CODES [ e . code ] ) } if ( NODEFS . isWindows & & ! stat . blksize ) { stat . blksize = 4096 } if ( NODEFS . isWindows & & ! stat . blocks ) { stat . blocks = ( stat . size + stat . blksize - 1 ) / stat . blksize | 0 } return { dev : stat . dev , ino : stat . ino , mode : stat . mode , nlink : stat . nlink , uid : stat . uid , gid : stat . gid , rdev : stat . rdev , size : stat . size , atime : stat . atime , mtime : stat . mtime , ctime : stat . ctime , blksize : stat . blksize , blocks : stat . blocks } } , setattr : function ( node , attr ) { var path = NODEFS . realPath ( node ) ; try { if ( attr . mode ! = = undefined ) { fs . chmodSync ( path , attr . mode ) ; node . mode = attr . mode } if ( attr . timestamp ! = = undefined ) { var date = new Date ( attr . timestamp ) ; fs . utimesSync ( path , date , date ) } if ( attr . size ! = = undefined ) { fs . truncateSync ( path , attr . size ) } } catch ( e ) { if ( ! e . code ) throw e ; throw new FS . ErrnoError ( ERRNO_CODES [ e . code ] ) } } , lookup : function ( parent , name ) { var path = PATH . join2 ( NODEFS . realPath ( parent ) , name ) ; var mode = NODEFS . getMode ( path ) ; return NODEFS . createNode ( parent , name , mode ) } , mknod : function ( parent , name , mode , dev ) { var node = NODEFS . createNode ( parent , name , mode , dev ) ; var path = NODEFS . realPath ( node ) ; try { if ( FS . isDir ( node . mode ) ) { fs . mkdirSync ( path , node . mode ) } else { fs . writeFileSync ( path , " " , { mode : node . mode } ) } } catch ( e ) { if ( ! e . code ) throw e ; throw new FS . ErrnoError ( ERRNO_CODES [ e . code ] ) } return node } , rename : function ( oldNode , newDir , newName ) { var oldPath = NODEFS . realPath ( oldNode ) ; var newPath = PATH . join2 ( NODEFS . realPath ( newDir ) , newName ) ; try { fs . renameSync ( oldPath , newPath ) } catch ( e ) { if ( ! e . code ) throw e ; throw new FS . ErrnoError ( ERRNO_CODES [ e . code ] ) } } , unlink : function ( parent , name ) { var path = PATH . join2 ( NODEFS . realPath ( parent ) , name ) ; try { fs . unlinkSync ( path ) } catch ( e ) { if ( ! e . code ) throw e ; throw new FS . ErrnoError ( ERRNO_CODES [ e . code ] ) } } , rmdir : function ( parent , name ) { var path = PATH . join2 ( NODEFS . realPath ( parent ) , name ) ; try { fs . rmdirSync ( path ) } catch ( e ) { if ( ! e . code ) throw e ; throw new FS . ErrnoError ( ERRNO_CODES [ e . code ] ) } } , readdir : function ( node ) { var path = NODEFS . realPath ( node ) ; try { return fs . readdirSync ( path ) } catch ( e ) { if ( ! e . code ) throw e ; throw new FS . ErrnoError ( ERRNO_CODES [ e . code ] ) } } , symlink : function ( parent , newName , oldPath ) { var newPath = PATH . join2 ( NODEFS . realPath ( parent ) , newName ) ; try { fs . symlinkSync ( oldPath , newPath ) } catch ( e ) { if ( ! e . code ) throw e ; throw new FS . ErrnoError ( ERRNO_CODES [ e . code ] ) } } , readlink : function ( node ) { var path = NODEFS . realPath ( node ) ; try { path = fs . readlinkSync ( path ) ; path = NODEJS_PATH . relative ( NODEJS_PATH . resolve ( node . mount . opts . root ) , path ) ; return path } catch ( e ) { if ( ! e . code ) throw e ; throw new FS . ErrnoError ( ERRNO_CODES [ e . code ] ) } } } , stream_ops : { open : function ( stream ) { var path = NODEFS . realPath ( stream . node ) ; try { if ( FS . isFile ( stream . node . mode ) ) { stream . nfd = fs . openSync ( path , NODEFS . flagsForNode ( stream . flags ) ) } } catch ( e ) { if ( ! e . code ) throw e ; throw new FS . ErrnoError ( ERRNO_CODES [ e . code ] ) } } , close : function ( stream ) { try { if ( FS . isFile ( stream . node . mode ) & & stream . nfd ) { fs . closeSync ( stream . nfd ) } } catch ( e ) { if ( ! e . code ) throw e ; throw new FS . ErrnoError ( ERRNO_CODES [ e . code ] ) } } , read : function ( stream , buffer , offset , length , position ) { if ( length = = = 0 ) return 0 ; try { return fs . readSync ( stream . nfd , NODEFS . bufferFrom ( buffer . buffer ) , offset , length , position ) } catch ( e ) { throw new FS . ErrnoError ( ERRNO_CODES [ e . code ] ) } } , write : function ( stream , buffer , offset , length , position ) { try { return fs . writeSync ( stream . nfd , NODEFS . bufferFrom ( buffer . buffer ) , offset , length , position ) } catch ( e ) { throw new FS . ErrnoError ( ERRNO_CODES [ e . code ] ) } } , llseek : function ( stream , offset , whence ) { var position = offset ; if ( whence = = = 1 ) { position + = stream . position } else if ( whence = = = 2 ) { if ( FS . isFile ( stream . node . mode ) ) { try { var stat = fs . fstatSync ( stream . nfd ) ; position + = stat . size } catch ( e ) { throw new FS . ErrnoError ( ERRNO_CODES [ e . code ] ) } } } if ( position < 0 ) { throw new FS . ErrnoError ( ERRNO_CODES . EINVAL ) } return position } } } ; var WORKERFS = { DIR_MODE : 16895 , FILE_MODE : 33279 , reader : null , mount : function ( mount ) { assert ( ENVIRONMENT_IS_WORKER ) ; if ( ! WORKERFS . reader ) WORKERFS . reader = new FileReaderSync ; var root = WORKERFS . createNode ( null , " / " , WORKERFS . DIR_MODE , 0 ) ; var createdParents = { } ; function ensureParent ( path ) { var parts = path . split ( " / " ) ; var parent = root ; for ( var i = 0 ; i < parts . length - 1 ; i + + ) { var curr = parts . slice ( 0 , i + 1 ) . join ( " / " ) ; if ( ! createdParents [ curr ] ) { createdParents [ curr ] = WORKERFS . createNode ( parent , parts [ i ] , WORKERFS . DIR_MODE , 0 ) } parent = createdParents [ curr ] } return parent } function base ( path ) { var parts = path . split ( " / " ) ; return parts [ parts . length - 1 ] } Array . prototype . forEach . call ( mount . opts [ " files " ] | | [ ] , function ( file ) { WORKERFS . createNode ( ensureParent ( file . name ) , base ( file . name ) , WORKERFS . FILE_MODE , 0 , file , file . lastModifiedDate ) } ) ; ( mount . opts [ " blobs " ] | | [ ] ) . forEach ( function ( obj ) { WORKERFS . createNode ( ensureParent ( obj [ " name " ] ) , base ( obj [ " name " ] ) , WORKERFS . FILE_MODE , 0 , obj [ " data " ] ) } ) ; ( mount . opts [ " packages " ] | | [ ] ) . forEach ( function ( pack ) { pack [ " metadata " ] . files . forEach ( function ( file ) { var name = file . filename . substr ( 1 ) ; WORKERFS . createNode ( ensureParent ( name ) , base ( name ) , WORKERFS . FILE_MODE , 0 , pack [ " blob " ] . slice ( file . start , file . end ) ) } ) } ) ; return root } , createNode : function ( parent , name , mode , dev , contents , mtime ) { var node = FS . createNode ( parent , name , mode ) ; node . mode = mode ; node . node_ops = WORKERFS . node_ops ; node . stream_ops = WORKERFS . stream_ops ; node . timestamp = ( mtime | | new Date ) . getTime ( ) ; assert ( WORKERFS . FILE_MODE ! = = WORKERFS . DIR_MODE ) ; if ( mode = = = WORKERFS . FILE_MODE ) { node . size = contents . size ; node . contents = contents } else { node . size = 4096 ; node . contents = { } } if ( parent ) { parent . contents [ name ] = node } return node } , node_ops : { getattr : function ( node ) { return { dev : 1 , ino : undefined , mode : node . mode , nlink : 1 , uid : 0 , gid : 0 , rdev : undefined , size : node . size , atime : new Date ( node . timestamp ) , mtime : new Date ( node . timestamp ) , ctime : new Date ( node . timestamp ) , blksize : 4096 , blocks : Math . ceil ( node . size / 4096 ) } } , setattr : function ( node , attr ) { if ( attr . mode ! = = undefined ) { node . mode = attr . mode } if ( attr . timestamp ! = = undefined ) { node . timestamp = attr . timestamp } } , lookup : function ( parent , name ) { throw new FS . ErrnoError ( ERRNO_CODES . ENOENT ) } , mknod : function ( parent , name , mode , dev ) { throw new FS . ErrnoError ( ERRNO_CODES . EPERM ) } , rename : function ( oldNode , newDir , newName ) { throw new FS . ErrnoError ( ERRNO_CODES . EPERM ) } , unlink : function ( parent , name ) { throw new FS . ErrnoError ( ERRNO_CODES . EPERM ) } , rmdir : function ( parent , name ) { throw new FS . ErrnoError ( ERRNO_CODES . EPERM ) } , readdir : function ( node ) { var entries = [ " . " , " . . " ] ; for ( var key in node . contents ) { if ( ! node . contents . hasOwnProperty ( key ) ) { continue } entries . push ( key ) } return entries } , symlink : function ( parent , newName , oldPath ) { throw new FS . ErrnoError ( ERRNO_CODES . EPERM ) } , readlink : function ( node ) { throw new FS . ErrnoError ( ERRNO_CODES . EPERM ) } } , stream_ops : { read : function ( stream , buffer , offset , length , position ) { if ( position > = stream . node . size ) return 0 ; var chunk = stream . node . contents . slice ( position , position + length ) ; var ab = WORKERFS . reader . readAsArrayBuffer ( chunk ) ; buffer . set ( new Uint8Array ( ab ) , offset ) ; return chunk . size } , write : function ( stream , buffer , offset , length , position ) { throw new FS . ErrnoError ( ERRNO_CODES . EIO ) } , llseek : function ( stream , offset , whence ) { var position = offset ; if ( whence = = = 1 ) { position + = stream . position } else if ( whence = = = 2 ) { if ( FS . isFile ( stream . node . mode ) ) { position + = stream . node . size } } if ( position < 0 ) { throw new FS . ErrnoError ( ERRNO_CODES . EINVAL ) } return position } } } ; var FS = { root : null , mounts : [ ] , devices : { } , streams : [ ] , nextInode : 1 , nameTable : null , currentPath : " / " , initialized : false , ignorePermissions : true , trackingDelegate : { } , tracking : { openFlags : { READ : 1 , WRITE : 2 } } , ErrnoError : null , genericErrors : { } , filesystems : null , syncFSRequests : 0 , handleFSError : function ( e ) { if ( ! ( e instanceof FS . ErrnoError ) ) throw e + " : " + stackTrace ( ) ; return ___setErrNo ( e . errno ) } , lookupPath : function ( path , opts ) { path = PATH . resolve ( FS . cwd ( ) , path ) ; opts = opts | | { } ; if ( ! path ) return { path : " " , node : null } ; var defaults = { follow_mount : true , recurse_count : 0 } ; for ( var key in defaults ) { if ( opts [ key ] = = = undefined ) { opts [ key ] = defaults [ key ] } } if ( opts . recurse_count > 8 ) { throw new FS . ErrnoError ( 40 ) } var parts = PATH . normalizeArray ( path . split ( " / " ) . filter ( function ( p ) { return ! ! p } ) , false ) ; var current = FS . root ; var current_path = " / " ; for ( var i = 0 ; i < parts . length ; i + + ) { var islast = i = = = parts . length - 1 ; if ( islast & & opts . parent ) { break } current = FS . lookupNode ( current , parts [ i ] ) ; current_path = PATH . join2 ( current_path , parts [ i ] ) ; if ( FS . isMountpoint ( current ) ) { if ( ! islast | | islast & & opts . follow_mount ) { current = current . mounted . root } } if ( ! islast | | opts . follow ) { var count = 0 ; while ( FS . isLink ( current . mode ) ) { var link = FS . readlink ( current_path ) ; current_path = PATH . resolve ( PATH . dirname ( current_path ) , link ) ; var lookup = FS . lookupPath ( current_path , { recurse_count : opts . recurse_count } ) ; current = lookup . node ; if ( count + + > 40 ) { throw new FS . ErrnoError ( 40 ) } } } } return { path : current_path , node : current } } , getPath : function ( node ) { var path ; while ( true ) { if ( FS . isRoot ( node ) ) { var mount = node . mount . mountpoint ; if ( ! path ) return mount ; return mount [ mount . length - 1 ] ! = = " / " ? mount + " / " + path : mount + path } path = path ? node . name + " / " + path : node . name ; node = node . parent } } , hashName : function ( parentid , name ) { var hash = 0 ; for ( var i = 0 ; i < name . length ; i + + ) { hash = ( hash < < 5 ) - hash + name . charCodeAt ( i ) | 0 } return ( parentid + hash > > > 0 ) % FS . nameTable . length } , hashAddNode : function ( node ) { var hash = FS . hashName ( node . parent . id , node . name ) ; node . name_next = FS . nameTable [ hash ] ; FS . nameTable [ hash ] = node } , hashRemoveNode : function ( node ) { var hash = FS . hashName ( node . parent . id , node . name ) ; if ( FS . nameTable [ hash ] = = = node ) { FS . nameTable [ hash ] = node . name_next } else { var current = FS . nameTable [ hash ] ; while ( current ) { if ( current . name_next = = = node ) { current . name_next = node . name_next ; break } current = current . name_next } } } , lookupNode : function ( parent , name ) { var err = FS . mayLookup ( parent ) ; if ( err ) { throw new FS . ErrnoError ( err , parent ) } var hash = FS . hashName ( parent . id , name ) ; for ( var node = FS . nameTable [ hash ] ; node ; node = node . name_next ) { var nodeName = node . name ; if ( node . parent . id = = = parent . id & & nodeName = = = name ) { return node } } return FS . lookup ( parent , name ) } , createNode : function ( parent , name , mode , rdev ) { if ( ! FS . FSNode ) { FS . FSNode = function ( parent , name , mode , rdev ) { if ( ! parent ) { parent = this } this . parent = parent ; this . mount = parent . mount ; this . mounted = null ; this . id = FS . nextInode + + ; this . name = name ; this . mode = mode ; this . node_ops = { } ; this . stream_ops = { } ; this . rdev = rdev } ; FS . FSNode . prototype = { } ; var readMode = 292 | 73 ; var writeMode = 146 ; Object . defineProperties ( FS . FSNode . prototype , { read : { get : function ( ) { return ( this . mode & readMode ) = = = readMode } , set : function ( val ) { val ? this . mode | = readMode : this . mode & = ~ readMode } } , write : { get : function ( ) { return ( this . mode & writeMode ) = = = writeMode } , set : function ( val ) { val ? this . mode | = writeMode : this . mode & = ~ writeMode } } , isFolder : { get : function ( ) { return FS . isDir ( this . mode ) } } , isDevice : { get : function ( ) { return FS . isChrdev ( this . mode ) } } } ) } var node = new FS . FSNode ( parent , name , mode , rdev ) ; FS . hashAddNode ( node ) ; return node } , destroyNode : function ( node ) { FS . hashRemoveNode ( node ) } , isRoot : function ( node ) { return node = = = node . parent } , isMountpoint : function ( node ) { return ! ! node . mounted } , isFile : function ( mode ) { return ( mode & 61440 ) = = = 32768 } , isDir : function ( mode ) { return ( mode & 61440 ) = = = 16384 } , isLink : function ( mode ) { return ( mode & 61440 ) = = = 40960 } , isChrdev : function ( mode ) { return ( mode & 61440 ) = = = 8192 } , isBlkdev : function ( mode ) { return ( mode & 61440 ) = = = 24576 } , isFIFO : function ( mode ) { return ( mode & 61440 ) = = = 4096 } , isSocket : function ( mode ) { return ( mode & 49152 ) = = = 49152 } , flagModes : { " r " : 0 , " rs " : 1052672 , " r + " : 2 , " w " : 577 , " wx " : 705 , " xw " : 705 , " w + " : 578 , " wx + " : 706 , " xw + " : 706 , " a " : 1089 , " ax " : 1217 , " xa " : 1217 , " a + " : 1090 , " ax + " : 1218 , " xa + " : 1218 } , modeStringToFlags : function ( str ) { var flags = FS . flagModes [ str ] ; if ( typeof flags = = = " undefined " ) { throw new Error ( " Unknown file open mode : " + str ) } return flags } , flagsToPermissionString : function ( flag ) { var perms = [ " r " , " w " , " rw " ] [ flag & 3 ] ; if ( flag & 512 ) { perms + = " w " } return perms } , nodePermissions : function ( node , perms ) { if ( FS . ignorePermissions ) { return 0 } if ( perms . indexOf ( " r " ) ! = = - 1 & & ! ( node . mode & 292 ) ) { return 13 } else if ( perms . indexOf ( " w " ) ! = = - 1 & & ! ( node . mode & 146 ) ) { return 13 } else if ( perms . indexOf ( " x " ) ! = = - 1 & & ! ( node . mode & 73 ) ) { return 13 } return 0 } , mayLookup : function ( dir ) { var err = FS . nodePermissions ( dir , " x " ) ; if ( err ) return err ; if ( ! dir . node_ops . lookup ) return 13 ; return 0 } , mayCreate : function ( dir , name ) { try { var node = FS . lookupNode ( dir , name ) ; return 17 } catch ( e ) { } return FS . nodePermissions ( dir , " wx " ) } , mayDelete : function ( dir , name , isdir ) { var node ; try { node = FS . lookupNode ( dir , name ) } catch ( e ) { return e . errno } var err = FS . nodePermissions ( dir , " wx " ) ; if ( err ) { return err } if ( isdir ) { if ( ! FS . isDir ( node . mode ) ) { return 20 } if ( FS . isRoot ( node ) | | FS . getPath ( node ) = = = FS . cwd ( ) ) { return 16 } } else { if ( FS . isDir ( node . mode ) ) { return 21 } } return 0 } , mayOpen : function ( node , flags ) { if ( ! node ) { return 2 } if ( FS . isLink ( node . mode ) ) { return 40 } else if ( FS . isDir ( node . mode ) ) { if ( FS . flagsToPermissionString ( flags ) ! = = " r " | | flags & 512 ) { return 21 } } return FS . nodePermissions ( node , FS . flagsToPermissionString ( flags ) ) } , MAX_OPEN_FDS : 4096 , nextfd : function ( fd_start , fd_end ) { fd_start = fd_start | | 0 ; fd_end = fd_end | | FS . MAX_OPEN_FDS ; for ( var fd = fd_start ; fd < = fd_end ; fd + + ) { if ( ! FS . streams [ fd ] ) { return fd } } throw new FS . ErrnoError ( 24 ) } , getStream : function ( fd ) { return FS . streams [ fd ] } , createStream : function ( stream , fd_start , fd_end ) { if ( ! FS . FSStream ) { FS . FSStream = function ( ) { } ; FS . FSStream . prototype = { } ; Object . defineProperties ( FS . FSStream . prototype , { object : { get : function ( ) { return this . node } , set : function ( val ) { this . node = val } } , isRead : { get : function ( ) { return ( this . flags & 2097155 ) ! = = 1 } } , isWrite : { get : function ( ) { return ( this . flags & 2097155 ) ! = = 0 } } , isAppend : { get : function ( ) { return this . flags & 1024 } } } ) } var newStream = new FS . FSStream ; for ( var p in stream ) { newStream [ p ] = stream [ p ] } stream = newStream ; var fd = FS . nextfd ( fd_start , fd_end ) ; stream . fd = fd ; FS . streams [ fd ] = stream ; return stream } , closeStream : function ( fd ) { FS . streams [ fd ] = null } , chrdev_stream_ops : { open : function ( stream ) { var device = FS . getDevice ( stream . node . rdev ) ; stream . stream_ops = device . stream_ops ; if ( stream . stream_ops . open ) { stream . stream_ops . open ( stream ) } } , llseek : function ( ) { throw new FS . ErrnoError ( 29 ) } } , major : function ( dev ) { return dev > > 8 } , minor : function ( dev ) { return dev & 255 } , makedev : function ( ma , mi ) { return ma < < 8 | mi } , registerDevice : function ( dev , ops ) { FS . devices [ dev ] = { stream_ops : ops } } , getDevice : function ( dev ) { return FS . devices [ dev ] } , getMounts : function ( mount ) { var mounts = [ ] ; var check = [ mount ] ; while ( check . length ) { var m = check . pop ( ) ; mounts . push ( m ) ; check . push . apply ( check , m . mounts ) } return mounts } , syncfs : function ( populate , callback ) { if ( typeof populate = = = " function " ) { callback = populate ; populate = false } FS . syncFSRequests + + ; if ( FS . syncFSRequests > 1 ) { console . log ( " warning : " + FS . syncFSRequests + " FS . syncfs operations in flight at once , probably just doing extra work " ) } var mounts = FS . getMounts ( FS . root . mount ) ; var completed = 0 ; function doCallback ( err ) { FS . syncFSRequests - - ; return callback ( err ) } function done ( err ) { if ( err ) { if ( ! done . errored ) { done . errored = true ; return doCallback ( err ) } return } if ( + + completed > = mounts . length ) { doCallback ( null ) } } mounts . forEach ( function ( mount ) { if ( ! mount . type . syncfs ) { return done ( null ) } mount . type . syncfs ( mount , populate , done ) } ) } , mount : function ( type , opts , mountpoint ) { var root = mountpoint = = = " / " ; var pseudo = ! mountpoint ; var node ; if ( root & & FS . root ) { throw new FS . ErrnoError ( 16 ) } else if ( ! root & & ! pseudo ) { var lookup = FS . lookupPath ( mountpoint , { follow_mount : false } ) ; mountpoint = lookup . path ; node = lookup . node ; if ( FS . isMountpoint ( node ) ) { throw new FS . ErrnoError ( 16 ) } if ( ! FS . isDir ( node . mode ) ) { throw new FS . ErrnoError ( 20 ) } } var mount = { type : type , opts : opts , mountpoint : mountpoint , mounts : [ ] } ; var mountRoot = type . mount ( mount ) ; mountRoot . mount = mount ; mount . root = mountRoot ; if ( root ) { FS . root = mountRoot } else if ( node ) { node . mounted = mount ; if ( node . mount ) { node . mount . mounts . push ( mount ) } } return mountRoot } , unmount : function ( mountpoint ) { var lookup = FS . lookupPath ( mountpoint , { follow_mount : false } ) ; if ( ! FS . isMountpoint ( lookup . node ) ) { throw new FS . ErrnoError ( 22 ) } var node = lookup . node ; var mount = node . mounted ; var mounts = FS . getMounts ( mount ) ; Object . keys ( FS . nameTable ) . forEach ( function ( hash ) { var current = FS . nameTable [ hash ] ; while ( current ) { var next = current . name_next ; if ( mounts . indexOf ( current . mount ) ! = = - 1 ) { FS . destroyNode ( current ) } current = next } } ) ; node . mounted = null ; var idx = node . mount . mounts . indexOf ( mount ) ; node . mount . mounts . splice ( idx , 1 ) } , lookup : function ( parent , name ) { return parent . node_ops . lookup ( parent , name ) } , mknod : function ( path , mode , dev ) { var lookup = FS . lookupPath ( path , { parent : true } ) ; var parent = lookup . node ; var name = PATH . basename ( path ) ; if ( ! name | | name = = = " . " | | name = = = " . . " ) { throw new FS . ErrnoError ( 22 ) } var err = FS . mayCreate ( parent , name ) ; if ( err ) { throw new FS . ErrnoError ( err ) } if ( ! parent . node_ops . mknod ) { throw new FS . ErrnoError ( 1 ) } return parent . node_ops . mknod ( parent , name , mode , dev ) } , create : function ( path , mode ) { mode = mode ! = = undefined ? mode : 438 ; mode & = 4095 ; mode | = 32768 ; return FS . mknod ( path , mode , 0 ) } , mkdir : function ( path , mode ) { mode = mode ! = = undefined ? mode : 511 ; mode & = 511 | 512 ; mode | = 16384 ; return FS . mknod ( path , mode , 0 ) } , mkdirTree : function ( path , mode ) { var dirs = path . split ( " / " ) ; var d = " " ; for ( var i = 0 ; i < dirs . length ; + + i ) { if ( ! dirs [ i ] ) continue ; d + = " / " + dirs [ i ] ; try { FS . mkdir ( d , mode ) } catch ( e ) { if ( e . errno ! = 17 ) throw e } } } , mkdev : function ( path , mode , dev ) { if ( typeof dev = = = " undefined " ) { dev = mode ; mode = 438 } mode | = 8192 ; return FS . mknod ( path , mode , dev ) } , symlink : function ( oldpath , newpath ) { if ( ! PATH . resolve ( oldpath ) ) { throw new FS . ErrnoError ( 2 ) } var lookup = FS . lookupPath ( newpath , { parent : true } ) ; var parent = lookup . node ; if ( ! parent ) { throw new FS . ErrnoError ( 2 ) } var newname = PATH . basename ( newpath ) ; var err = FS . mayCreate ( parent , newname ) ; if ( err ) { throw new FS . ErrnoError ( err ) } if ( ! parent . node_ops . symlink ) { throw new FS . ErrnoError ( 1 ) } return parent . node_ops . symlink ( parent , newname , oldpath ) } , rename : function ( old_path , new_path ) { var old_dirname = PATH . dirname ( old_path ) ; var new_dirname = PATH . dirname ( new_path ) ; var old_name = PATH . basename ( old_path ) ; var new_name = PATH . basename ( new_path ) ; var lookup , old_dir , new_dir ; try { lookup = FS . lookupPath ( old_path , { parent : true } ) ; old_dir = lookup . node ; lookup = FS . lookupPath ( new_path , { parent : true } ) ; new_dir = lookup . node } catch ( e ) { throw new FS . ErrnoError ( 16 ) } if ( ! old_dir | | ! new_dir ) throw new FS . ErrnoError ( 2 ) ; if ( old_dir . mount ! = = new_dir . mount ) { throw new FS . ErrnoError ( 18 ) } var old_node = FS . lookupNode ( old_dir , old_name ) ; var relative = PATH . relative ( old_path , new_dirname ) ; if ( relative . charAt ( 0 ) ! = = " . " ) { throw new FS . ErrnoError ( 22 ) } relative = PATH . relative ( new_path , old_dirname ) ; if ( relative . charAt ( 0 ) ! = = " . " ) { throw new FS . ErrnoError ( 39 ) } var new_node ; try { new_node = FS . lookupNode ( new_dir , new_name ) } catch ( e ) { } if ( old_node = = = new_node ) { return } var isdir = FS . isDir ( old_node . mode ) ; var err = FS . mayDelete ( old_dir , old_name , isdir ) ; if ( err ) { throw new FS . ErrnoError ( err ) } err = new_node ? FS . mayDelete ( new_dir , new_name , isdir ) : FS . mayCreate ( new_dir , new_name ) ; if ( err ) { throw new FS . ErrnoError ( err ) } if ( ! old_dir . node_ops . rename ) { throw new FS . ErrnoError ( 1 ) } if ( FS . isMountpoint ( old_node ) | | new_node & & FS . isMountpoint ( new_node ) ) { throw new FS . ErrnoError ( 16 ) } if ( new_dir ! = = old_dir ) { err = FS . nodePermissions ( old_dir , " w " ) ; if ( err ) { throw new FS . ErrnoError ( err ) } } try { if ( FS . trackingDelegate [ " willMovePath " ] ) { FS . trackingDelegate [ " willMovePath " ] ( old_path , new_path ) } } catch ( e ) { console . log ( " FS . trackingDelegate [ ' willMovePath ' ] ( ' " + old_path + " ' , ' " + new_path + " ' ) threw an exception : " + e . message ) } FS . hashRemoveNode ( old_node ) ; try { old_dir . node_ops . rename ( old_node , new_dir , new_name ) } catch ( e ) { throw e } finally { FS . hashAddNode ( old_node ) } try { if ( FS . trackingDelegate [ " onMovePath " ] ) FS . trackingDelegate [ " onMovePath " ] ( old_path , new_path ) } catch ( e ) { console . log ( " FS . trackingDelegate [ ' onMovePath ' ] ( ' " + old_path + " ' , ' " + new_path + " ' ) threw an exception : " + e . message ) } } , rmdir : function ( path ) { var lookup = FS . lookupPath ( path , { parent : true } ) ; var parent = lookup . node ; var name = PATH . basename ( path ) ; var node = FS . lookupNode ( parent , name ) ; var err = FS . mayDelete ( parent , name , true ) ; if ( err ) { throw new FS . ErrnoError ( err ) } if ( ! parent . node_ops . rmdir ) { throw new FS . ErrnoError ( 1 ) } if ( FS . isMountpoint ( node ) ) { throw new FS . ErrnoError ( 16 ) } try { if ( FS . trackingDelegate [ " willDeletePath " ] ) { FS . trackingDelegate [ " willDeletePath " ] ( path ) } } catch ( e ) { console . log ( " FS . trackingDelegate [ ' willDeletePath ' ] ( ' " + path + " ' ) threw an exception : " + e . message ) } parent . node_ops . rmdir ( parent , name ) ; FS . destroyNode ( node ) ; try { if ( FS . trackingDelegate [ " onDeletePath " ] ) FS . trackingDelegate [ " onDeletePath " ] ( path ) } catch ( e ) { console . log ( " FS . trackingDelegate [ ' onDeletePath ' ] ( ' " + path + " ' ) threw an exception : " + e . message ) } } , readdir : function ( path ) { var lookup = FS . lookupPath ( path , { follow : true } ) ; var node = lookup . node ; if ( ! node . node_ops . readdir ) { throw new FS . ErrnoError ( 20 ) } return node . node_ops . readdir ( node ) } , unlink : function ( path ) { var lookup = FS . lookupPath ( path , { parent : true } ) ; var parent = lookup . node ; var name = PATH . basename ( path ) ; var node = FS . lookupNode ( parent , name ) ; var err = FS . mayDelete ( parent , name , false ) ; if ( err ) { throw new FS . ErrnoError ( err ) } if ( ! parent . node_ops . unlink ) { throw new FS . ErrnoError ( 1 ) } if ( FS . isMountpoint ( node ) ) { throw new FS . ErrnoError ( 16 ) } try { if ( FS . trackingDelegate [ " willDeletePath " ] ) { FS . trackingDelegate [ " willDeletePath " ] ( path ) } } catch ( e ) { console . log ( " FS . trackingDelegate [ ' willDeletePath ' ] ( ' " + path + " ' ) threw an exception : " + e . message ) } parent . node_ops . unlink ( parent , name ) ; FS . destroyNode ( node ) ; try { if ( FS . trackingDelegate [ " onDeletePath " ] ) FS . trackingDelegate [ " onDeletePath " ] ( path ) } catch ( e ) { console . log ( " FS . trackingDelegate [ ' onDeletePath ' ] ( ' " + path + " ' ) threw an exception : " + e . message ) } } , readlink : function ( path ) { var lookup = FS . lookupPath ( path ) ; var link = lookup . node ; if ( ! link ) { throw new FS . ErrnoError ( 2 ) } if ( ! link . node_ops . readlink ) { throw new FS . ErrnoError ( 22 ) } return PATH . resolve ( FS . getPath ( link . parent ) , link . node_ops . readlink ( link ) ) } , stat : function ( path , dontFollow ) { var lookup = FS . lookupPath ( path , { follow : ! dontFollow } ) ; var node = lookup . node ; if ( ! node ) { throw new FS . ErrnoError ( 2 ) } if ( ! node . node_ops . getattr ) { throw new FS . ErrnoError ( 1 ) } return node . node_ops . getattr ( node ) } , lstat : function ( path ) { return FS . stat ( path , true ) } , chmod : function ( path , mode , dontFollow ) { var node ; if ( typeof path = = = " string " ) { var lookup = FS . lookupPath ( path , { follow : ! dontFollow } ) ; node = lookup . node } else { node = path } if ( ! node . node_ops . setattr ) { throw new FS . ErrnoError ( 1 ) } node . node_ops . setattr ( node , { mode : mode & 4095 | node . mode & ~ 4095 , timestamp : Date . now ( ) } ) } , lchmod : function ( path , mode ) { FS . chmod ( path , mode , true ) } , fchmod : function ( fd , mode ) { var stream = FS . getStream ( fd ) ; if ( ! stream ) { throw new FS . ErrnoError ( 9 ) } FS . chmod ( stream . node , mode ) } , chown : function ( path , uid , gid , dontFollow ) { var node ; if ( typeof path = = = " string " ) { var lookup = FS . lookupPath ( path , { follow : ! dontFollow } ) ; node = lookup . node } else { node = path } if ( ! node . node_ops . setattr ) { throw new FS . ErrnoError ( 1 ) } node . node_ops . setattr ( node , { timestamp : Date . now ( ) } ) } , lchown : function ( path , uid , gid ) { FS . chown ( path , uid , gid , true ) } , fchown : function ( fd , uid , gid ) { var stream = FS . getStream ( fd ) ; if ( ! stream ) { throw new FS . ErrnoError ( 9 ) } FS . chown ( stream . node , uid , gid ) } , truncate : function ( path , len ) { if ( len < 0 ) { throw new FS . ErrnoError ( 22 ) } var node ; if ( typeof path = = = " string " ) { var lookup = FS . lookupPath ( path , { follow : true } ) ; node = lookup . node } else { node = path } if ( ! node . node_ops . setattr ) { throw new FS . ErrnoError ( 1 ) } if ( FS . isDir ( node . mode ) ) { throw new FS . ErrnoError ( 21 ) } if ( ! FS . isFile ( node . mode ) ) { throw new FS . ErrnoError ( 22 ) } var err = FS . nodePermissions ( node , " w " ) ; if ( err ) { throw new FS . ErrnoError ( err ) } node . node_ops . setattr ( node , { size : len , timestamp : Date . now ( ) } ) } , ftruncate : function ( fd , len ) { var stream = FS . getStream ( fd ) ; if ( ! stream ) { throw new FS . ErrnoError ( 9 ) } if ( ( stream . flags & 2097155 ) = = = 0 ) { throw new FS . ErrnoError ( 22 ) } FS . truncate ( stream . node , len ) } , utime : function ( path , atime , mtime ) { var lookup = FS . lookupPath ( path , { follow : true } ) ; var node = lookup . node ; node . node_ops . setattr ( node , { timestamp : Math . max ( atime , mtime ) } ) } , open : function ( path , flags , mode , fd_start , fd_end ) { if ( path = = = " " ) { throw new FS . ErrnoError ( 2 ) } flags = typeof flags = = = " string " ? FS . modeStringToFlags ( flags ) : flags ; mode = typeof mode = = = " undefined " ? 438 : mode ; if ( flags & 64 ) { mode = mode & 4095 | 32768 } else { mode = 0 } var node ; if ( typeof path = = = " object " ) { node = path } else { path = PATH . normalize ( path ) ; try { var lookup = FS . lookupPath ( path , { follow : ! ( flags & 131072 ) } ) ; node = lookup . node } catch ( e ) { } } var created = false ; if ( flags & 64 ) { if ( node ) { if ( flags & 128 ) { throw new FS . ErrnoError ( 17 ) } } else { node = FS . mknod ( path , mode , 0 ) ; created = true } } if ( ! node ) { throw new FS . ErrnoError ( 2 ) } if ( FS . isChrdev ( node . mode ) ) { flags & = ~ 512 } if ( flags & 65536 & & ! FS . isDir ( node . mode ) ) { throw new FS . ErrnoError ( 20 ) } if ( ! created ) { var err = FS . mayOpen ( node , flags ) ; if ( err ) { throw new FS . ErrnoError ( err ) } } if ( flags & 512 ) { FS . truncate ( node , 0 ) } flags & = ~ ( 128 | 512 ) ; var stream = FS . createStream ( { node : node , path : FS . getPath ( node ) , flags : flags , seekable : true , position : 0 , stream_ops : node . stream_ops , ungotten : [ ] , error : false } , fd_start , fd_end ) ; if ( stream . stream_ops . open ) { stream . stream_ops . open ( stream ) } if ( Module [ " logReadFiles " ] & & ! ( flags & 1 ) ) { if ( ! FS . readFiles ) FS . readFiles = { } ; if ( ! ( path in FS . readFiles ) ) { FS . readFiles [ path ] = 1 ; console . log ( " FS . trackingDelegate error on read file : " + path ) } } try { if ( FS . trackingDelegate [ " onOpenFile " ] ) { var trackingFlags = 0 ; if ( ( flags & 2097155 ) ! = = 1 ) { trackingFlags | = FS . tracking . openFlags . READ } if ( ( flags & 2097155 ) ! = = 0 ) { trackingFlags | = FS . tracking . openFlags . WRITE } FS . trackingDelegate [ " onOpenFile " ] ( path , trackingFlags ) } } catch ( e ) { console . log ( " FS . trackingDelegate [ ' onOpenFile ' ] ( ' " + path + " ' , flags ) threw an exception : " + e . message ) } return stream } , close : function ( stream ) { if ( FS . isClosed ( stream ) ) { throw new FS . ErrnoError ( 9 ) } if ( stream . getdents ) stream . getdents = null ; try { if ( stream . stream_ops . close ) { stream . stream_ops . close ( stream ) } } catch ( e ) { throw e } finally { FS . closeStream ( stream . fd ) } stream . fd = null } , isClosed : function ( stream ) { return stream . fd = = = null } , llseek : function ( stream , offset , whence ) { if ( FS . isClosed ( stream ) ) { throw new FS . ErrnoError ( 9 ) } if ( ! stream . seekable | | ! stream . stream_ops . llseek ) { throw new FS . ErrnoError ( 29 ) } if ( whence ! = 0 & & whence ! = 1 & & whence ! = 2 ) { throw new FS . ErrnoError ( 22 ) } stream . position = stream . stream_ops . llseek ( stream , offset , whence ) ; stream . ungotten = [ ] ; return stream . position } , read : function ( stream , buffer , offset , length , position ) { if ( length < 0 | | position < 0 ) { throw new FS . ErrnoError ( 22 ) } if ( FS . isClosed ( stream ) ) { throw new FS . ErrnoError ( 9 ) } if ( ( stream . flags & 2097155 ) = = = 1 ) { throw new FS . ErrnoError ( 9 ) } if ( FS . isDir ( stream . node . mode ) ) { throw new FS . ErrnoError ( 21 ) } if ( ! stream . stream_ops . read ) { throw new FS . ErrnoError ( 22 ) } var seeking = typeof position ! = = " undefined " ; if ( ! seeking ) { position = stream . position } else if ( ! stream . seekable ) { throw new FS . ErrnoError ( 29 ) } var bytesRead = stream . stream_ops . read ( stream , buffer , offset , length , position ) ; if ( ! seeking ) stream . position + = bytesRead ; return bytesRead } , write : function ( stream , buffer , offset , length , position , canOwn ) { if ( length < 0 | | position < 0 ) { throw new FS . ErrnoError ( 22 ) } if ( FS . isClosed ( stream ) ) { throw new FS . ErrnoError ( 9 ) } if ( ( stream . flags & 2097155 ) = = = 0 ) { throw new FS . ErrnoError ( 9 ) } if ( FS . isDir ( stream . node . mode ) ) { throw new FS . ErrnoError ( 21 ) } if ( ! stream . stream_ops . write ) { throw new FS . ErrnoError ( 22 ) } if ( stream . flags & 1024 ) { FS . llseek ( stream , 0 , 2 ) } var seeking = typeof position ! = = " undefined " ; if ( ! seeking ) { position = stream . position } else if ( ! stream . seekable ) { throw new FS . ErrnoError ( 29 ) } var bytesWritten = stream . stream_ops . write ( stream , buffer , offset , length , position , canOwn ) ; if ( ! seeking ) stream . position + = bytesWritten ; try { if ( stream . path & & FS . trackingDelegate [ " onWriteToFile " ] ) FS . trackingDelegate [ " onWriteToFile " ] ( stream . path ) } catch ( e ) { console . log ( " FS . trackingDelegate [ ' onWriteToFile ' ] ( ' " + stream . path + " ' ) threw an exception : " + e . message ) } return bytesWritten } , allocate : function ( stream , offset , length ) { if ( FS . isClosed ( stream ) ) { throw new FS . ErrnoError ( 9 ) } if ( offset < 0 | | length < = 0 ) { throw new FS . ErrnoError ( 22 ) } if ( ( stream . flags & 2097155 ) = = = 0 ) { throw new FS . ErrnoError ( 9 ) } if ( ! FS . isFile ( stream . node . mode ) & & ! FS . isDir ( stream . node . mode ) ) { throw new FS . ErrnoError ( 19 ) } if ( ! stream . stream_ops . allocate ) { throw new FS . ErrnoError ( 95 ) } stream . stream_ops . allocate ( stream , offset , length ) } , mmap : function ( stream , buffer , offset , length , position , prot , flags ) { if ( ( stream . flags & 2097155 ) = = = 1 ) { throw new FS . ErrnoError ( 13 ) } if ( ! stream . stream_ops . mmap ) { throw new FS . ErrnoError ( 19 ) } return stream . stream_ops . mmap ( stream , buffer , offset , length , position , prot , flags ) } , msync : function ( stream , buffer , offset , length , mmapFlags ) { if ( ! stream | | ! stream . stream_ops . msync ) { return 0 } return stream . stream_ops . msync ( stream , buffer , offset , length , mmapFlags ) } , munmap : function ( stream ) { return 0 } , ioctl : function ( stream , cmd , arg ) { if ( ! stream . stream_ops . ioctl ) { throw new FS . ErrnoError ( 25 ) } return stream . stream_ops . ioctl ( stream , cmd , arg ) } , readFile : function ( path , opts ) { opts = opts | | { } ; opts . flags = opts . flags | | " r " ; opts . encoding = opts . encoding | | " binary " ; if ( opts . encoding ! = = " utf8 " & & opts . encoding ! = = " binary " ) { throw new Error ( ' Invalid encoding type " ' + opts . encoding + ' " ' ) } var ret ; var stream = FS . open ( path , opts . flags ) ; var stat = FS . stat ( path ) ; var length = stat . size ; var buf = new Uint8Array ( length ) ; FS . read ( stream , buf , 0 , length , 0 ) ; if ( opts . encoding = = = " utf8 " ) { ret = UTF8ArrayToString ( buf , 0 ) } else if ( opts . encoding = = = " binary " ) { ret = buf } FS . close ( stream ) ; return ret } , writeFile : function ( path , data , opts ) { opts = opts | | { } ; opts . flags = opts . flags | | " w " ; var stream = FS . open ( path , opts . flags , opts . mode ) ; if ( typeof data = = = " string " ) { var buf = new Uint8Array ( lengthBytesUTF8 ( data ) + 1 ) ; var actualNumBytes = stringToUTF8Array ( data , buf , 0 , buf . length ) ; FS . write ( stream , buf , 0 , actualNumBytes , undefined , opts . canOwn ) } else if ( ArrayBuffer . isView ( data ) ) { FS . write ( stream , data , 0 , data . byteLength , undefined , opts . canOwn ) } else { throw new Error ( " Unsupported data type " ) } FS . close ( stream ) } , cwd : function ( ) { return FS . currentPath } , chdir : function ( path ) { var lookup = FS . lookupPath ( path , { follow : true } ) ; if ( lookup . node = = = null ) { throw new FS . ErrnoError ( 2 ) } if ( ! FS . isDir ( lookup . node . mode ) ) { throw new FS . ErrnoError ( 20 ) } var err = FS . nodePermissions ( lookup . node , " x " ) ; if ( err ) { throw new FS . ErrnoError ( err ) } FS . currentPath = lookup . path } , createDefaultDirectories : function ( ) { FS . mkdir ( " / tmp " ) ; FS . mkdir ( " / home " ) ; FS . mkdir ( " / home / web_user " ) } , createDefaultDevices : function ( ) { FS . mkdir ( " / dev " ) ; FS . registerDevice ( FS . makedev ( 1 , 3 ) , { read : function ( ) { return 0 } , write : function ( stream , buffer , offset , length , pos ) { return length } } ) ; FS . mkdev ( " / dev / null " , FS . makedev ( 1 , 3 ) ) ; TTY . register ( FS . makedev ( 5 , 0 ) , TTY . default_tty_ops ) ; TTY . register ( FS . makedev ( 6 , 0 ) , TTY . default_tty1_ops ) ; FS . mkdev ( " / dev / tty " , FS . makedev ( 5 , 0 ) ) ; FS . mkdev ( " / dev / tty1 " , FS . makedev ( 6 , 0 ) ) ; var random_device ; if ( typeof crypto = = = " object " & & typeof crypto [ " getRandomValues " ] = = = " function " ) { var randomBuffer = new Uint8Array ( 1 ) ; random_device = function ( ) { crypto . getRandomValues ( randomBuffer ) ; return randomBuffer [ 0 ] } } else if ( ENVIRONMENT_IS_NODE ) { try { var crypto_module = require ( " crypto " ) ; random_device = function ( ) { return crypto_module [ " randomBytes " ] ( 1 ) [ 0 ] } } catch ( e ) { random_device = function ( ) { return Math . random ( ) * 256 | 0 } } } else { random_device = function ( ) { abort ( " random_device " ) } } FS . createDevice ( " / dev " , " random " , random_device ) ; FS . createDevice ( " / dev " , " urandom " , random_device ) ; FS . mkdir ( " / dev / shm " ) ; FS . mkdir ( " / dev / shm / tmp " ) } , createSpecialDirectories : function ( ) { FS . mkdir ( " / proc " ) ; FS . mkdir ( " / proc / self " ) ; FS . mkdir ( " / proc / self / fd " ) ; FS . mount ( { mount : function ( ) { var node = FS . createNode ( " / proc / self " , " fd " , 16384 | 511 , 73 ) ; node . node_ops = { lookup : function ( parent , name ) { var fd = + name ; var stream = FS . getStream ( fd ) ; if ( ! stream ) throw new FS . ErrnoError ( 9 ) ; var ret = { parent : null , mount : { mountpoint : " fake " } , node_ops : { readlink : function ( ) { return stream . path } } } ; ret . parent = ret ; return ret } } ; return node } } , { } , " / proc / self / fd " ) } , createStandardStreams : function ( ) { if ( Module [ " stdin " ] ) { FS . createDevice ( " / dev " , " stdin " , Module [ " stdin " ] ) } else { FS . symlink ( " / dev / tty " , " / dev / stdin " ) } if ( Module [ " stdout " ] ) { FS . createDevice ( " / dev " , " stdout " , null , Module [ " stdout " ] ) } else { FS . symlink ( " / dev / tty " , " / dev / stdout " ) } if ( Module [ " stderr " ] ) { FS . createDevice ( " / dev " , " stderr " , null , Module [ " stderr " ] ) } else { FS . symlink ( " / dev / tty1 " , " / dev / stderr " ) } var stdin = FS . open ( " / dev / stdin " , " r " ) ; var stdout = FS . open ( " / dev / stdout " , " w " ) ; var stderr = FS . open ( " / dev / stderr " , " w " ) } , ensureErrnoError : function ( ) { if ( FS . ErrnoError ) return ; FS . ErrnoError = function ErrnoError ( errno , node ) { this . node = node ; this . setErrno = function ( errno ) { this . errno = errno } ; this . setErrno ( errno ) ; this . message = " FS error " ; if ( this . stack ) Object . defineProperty ( this , " stack " , { value : ( new Error ) . stack , writable : true } ) } ; FS . ErrnoError . prototype = new Error ; FS . ErrnoError . prototype . constructor = FS . ErrnoError ; [ 2 ] . forEach ( function ( code ) { FS . genericErrors [ code ] = new FS . ErrnoError ( code ) ; FS . genericErrors [ code ] . stack = " < generic error , no stack > " } ) } , staticInit : function ( ) { FS . ensureErrnoError ( ) ; FS . nameTable = new Array ( 4096 ) ; FS . mount ( MEMFS , { } , " / " ) ; FS . createDefaultDirectories ( ) ; FS . createDefaultDevices ( ) ; FS . createSpecialDirectories ( ) ; FS . filesystems = { " MEMFS " : MEMFS , " IDBFS " : IDBFS , " NODEFS " : NODEFS , " WORKERFS " : WORKERFS } } , init : function ( input , output , error ) { FS . init . initialized = true ; FS . ensureErrnoError ( ) ; Module [ " stdin " ] = input | | Module [ " stdin " ] ; Module [ " stdout " ] = output | | Module [ " stdout " ] ; Module [ " stderr " ] = error | | Module [ " stderr " ] ; FS . createStandardStreams ( ) } , quit : function ( ) { FS . init . initialized = false ; var fflush = Module [ " _fflush " ] ; if ( fflush ) fflush ( 0 ) ; for ( var i = 0 ; i < FS . streams . length ; i + + ) { var stream = FS . streams [ i ] ; if ( ! stream ) { continue } FS . close ( stream ) } } , getMode : function ( canRead , canWrite ) { var mode = 0 ; if ( canRead ) mode | = 292 | 73 ; if ( canWrite ) mode | = 146 ; return mode } , joinPath : function ( parts , forceRelative ) { var path = PATH . join . apply ( null , parts ) ; if ( forceRelative & & path [ 0 ] = = " / " ) path = path . substr ( 1 ) ; return path } , absolutePath : function ( relative , base ) { return PATH . resolve ( base , relative ) } , standardizePath : function ( path ) { return PATH . normalize ( path ) } , findObject : function ( path , dontResolveLastLink ) { var ret = FS . analyzePath ( path , dontResolveLastLink ) ; if ( ret . exists ) { return ret . object } else { ___setErrNo ( ret . error ) ; return null } } , analyzePath : function ( path , dontResolveLastLink ) { try { var lookup = FS . lookupPath ( path , { follow : ! dontResolveLastLink } ) ; path = lookup . path } catch ( e ) { } var ret = { isRoot : false , exists : false , error : 0 , name : null , path : null , object : null , parentExists : false , parentPath : null , parentObject : null } ; try { var lookup = FS . lookupPath ( path , { parent : true } ) ; ret . parentExists = true ; ret . parentPath = lookup . path ; ret . parentObject = lookup . node ; ret . name = PATH . basename ( path ) ; lookup = FS . lookupPath ( path , { follow : ! dontResolveLastLink } ) ; ret . exists = true ; ret . path = lookup . path ; ret . object = lookup . node ; ret . name = lookup . node . name ; ret . isRoot = lookup . path = = = " / " } catch ( e ) { ret . error = e . errno } return ret } , createFolder : function ( parent , name , canRead , canWrite ) { var path = PATH . join2 ( typeof parent = = = " string " ? parent : FS . getPath ( parent ) , name ) ; var mode = FS . getMode ( canRead , canWrite ) ; return FS . mkdir ( path , mode ) } , createPath : function ( parent , path , canRead , canWrite ) { parent = typeof parent = = = " string " ? parent : FS . getPath ( parent ) ; var parts = path . split ( " / " ) . reverse ( ) ; while ( parts . length ) { var part = parts . pop ( ) ; if ( ! part ) continue ; var current = PATH . join2 ( parent , part ) ; try { FS . mkdir ( current ) } catch ( e ) { } parent = current } return current } , createFile : function ( parent , name , properties , canRead , canWrite ) { var path = PATH . join2 ( typeof parent = = = " string " ? parent : FS . getPath ( parent ) , name ) ; var mode = FS . getMode ( canRead , canWrite ) ; return FS . create ( path , mode ) } , createDataFile : function ( parent , name , data , canRead , canWrite , canOwn ) { var path = name ? PATH . join2 ( typeof parent = = = " string " ? parent : FS . getPath ( parent ) , name ) : parent ; var mode = FS . getMode ( canRead , canWrite ) ; var node = FS . create ( path , mode ) ; if ( data ) { if ( typeof data = = = " string " ) { var arr = new Array ( data . length ) ; for ( var i = 0 , len = data . length ; i < len ; + + i ) arr [ i ] = data . charCodeAt ( i ) ; data = arr } FS . chmod ( node , mode | 146 ) ; var stream = FS . open ( node , " w " ) ; FS . write ( stream , data , 0 , data . length , 0 , canOwn ) ; FS . close ( stream ) ; FS . chmod ( node , mode ) } return node } , createDevice : function ( parent , name , input , output ) { var path = PATH . join2 ( typeof parent = = = " string " ? parent : FS . getPath ( parent ) , name ) ; var mode = FS . getMode ( ! ! input , ! ! output ) ; if ( ! FS . createDevice . major ) FS . createDevice . major = 64 ; var dev = FS . makedev ( FS . createDevice . major + + , 0 ) ; FS . registerDevice ( dev , { open : function ( stream ) { stream . seekable = false } , close : function ( stream ) { if ( output & & output . buffer & & output . buffer . length ) { output ( 10 ) } } , read : function ( stream , buffer , offset , length , pos ) { var bytesRead = 0 ; for ( var i = 0 ; i < length ; i + + ) { var result ; try { result = input ( ) } catch ( e ) { throw new FS . ErrnoError ( 5 ) } if ( result = = = undefined & & bytesRead = = = 0 ) { throw new FS . ErrnoError ( 11 ) } if ( result = = = null | | result = = = undefined ) break ; bytesRead + + ; buffer [ offset + i ] = result } if ( bytesRead ) { stream . node . timestamp = Date . now ( ) } return bytesRead } , write : function ( stream , buffer , offset , length , pos ) { for ( var i = 0 ; i < length ; i + + ) { try { output ( buffer [ offset + i ] ) } catch ( e ) { throw new FS . ErrnoError ( 5 ) } } if ( length ) { stream . node . timestamp = Date . now ( ) } return i } } ) ; return FS . mkdev ( path , mode , dev ) } , createLink : function ( parent , name , target , canRead , canWrite ) { var path = PATH . join2 ( typeof parent = = = " string " ? parent : FS . getPath ( parent ) , name ) ; return FS . symlink ( target , path ) } , forceLoadFile : function ( obj ) { if ( obj . isDevice | | obj . isFolder | | obj . link | | obj . contents ) return true ; var success = true ; if ( typeof XMLHttpRequest ! = = " undefined " ) { throw new Error ( " Lazy loading should have been performed ( contents set ) in createLazyFile , but it was not . Lazy loading only works in web workers . Use - - embed - file or - - preload - file in emcc on the main thread . " ) } else if ( Module [ " read " ] ) { try { obj . contents = intArrayFromString ( Module [ " read " ] ( obj . url ) , true ) ; obj . usedBytes = obj . contents . length } catch ( e ) { success = false } } else { throw new Error ( " Cannot load without read ( ) or XMLHttpRequest . " ) } if ( ! success ) ___setErrNo ( 5 ) ; return success } , createLazyFile : function ( parent , name , url , canRead , canWrite ) { function LazyUint8Array ( ) { this . lengthKnown = false ; this . chunks = [ ] } LazyUint8Array . prototype . get = function LazyUint8Array_get ( idx ) { if ( idx > this . length - 1 | | idx < 0 ) { return undefined } var chunkOffset = idx % this . chunkSize ; var chunkNum = idx / this . chunkSize | 0 ; return this . getter ( chunkNum ) [ chunkOffset ] } ; LazyUint8Array . prototype . setDataGetter = function LazyUint8Array_setDataGetter ( getter ) { this . getter = getter } ; LazyUint8Array . prototype . cacheLength = function LazyUint8Array_cacheLength ( ) { var xhr = new XMLHttpRequest ; xhr . open ( " HEAD " , url , false ) ; xhr . send ( null ) ; if ( ! ( xhr . status > = 200 & & xhr . status < 300 | | xhr . status = = = 304 ) ) throw new Error ( " Couldn ' t load " + url + " . Status : " + xhr . status ) ; var datalength = Number ( xhr . getResponseHeader ( " Content - length " ) ) ; var header ; var hasByteServing = ( header = xhr . getResponseHeader ( " Accept - Ranges " ) ) & & header = = = " bytes " ; var usesGzip = ( header = xhr . getResponseHeader ( " Content - Encoding " ) ) & & header = = = " gzip " ; var chunkSize = 1024 * 1024 ; if ( ! hasByteServing ) chunkSize = datalength ; var doXHR = function ( from , to ) { if ( from > to ) throw new Error ( " invalid range ( " + from + " , " + to + " ) or no bytes requested ! " ) ; if ( to > datalength - 1 ) throw new Error ( " only " + datalength + " bytes available ! programmer error ! " ) ; var xhr = new XMLHttpRequest ; xhr . open ( " GET " , url , false ) ; if ( datalength ! = = chunkSize ) xhr . setRequestHeader ( " Range " , " bytes = " + from + " - " + to ) ; if ( typeof Uint8Array ! = " undefined " ) xhr . responseType = " arraybuffer " ; if ( xhr . overrideMimeType ) { xhr . overrideMimeType ( " text / plain ; charset = x - user - defined " ) } xhr . send ( null ) ; if ( ! ( xhr . status > = 200 & & xhr . status < 300 | | xhr . status = = = 304 ) ) throw new Error ( " Couldn ' t load " + url + " . Status : " + xhr . status ) ; if ( xhr . response ! = = undefined ) { return new Uint8Array ( xhr . response | | [ ] ) } else { return intArrayFromString ( xhr . responseText | | " " , true ) } } ; var lazyArray = this ; lazyArray . setDataGetter ( function ( chunkNum ) { var start = chunkNum * chunkSize ; var end = ( chunkNum + 1 ) * chunkSize - 1 ; end = Math . min ( end , datalength - 1 ) ; if ( typeof lazyArray . chunks [ chunkNum ] = = = " undefined " ) { lazyArray . chunks [ chunkNum ] = doXHR ( start , end ) } if ( typeof lazyArray . chunks [ chunkNum ] = = = " undefined " ) throw new Error ( " doXHR failed ! " ) ; return lazyArray . chunks [ chunkNum ] } ) ; if ( usesGzip | | ! datalength ) { chunkSize = datalength = 1 ; datalength = this . getter ( 0 ) . length ; chunkSize = datalength ; console . log ( " LazyFiles on gzip forces download of the whole file when length is accessed " ) } this . _length = datalength ; this . _chunkSize = chunkSize ; this . lengthKnown = true } ; if ( typeof XMLHttpRequest ! = = " undefined " ) { if ( ! ENVIRONMENT_IS_WORKER ) throw " Cannot do synchronous binary XHRs outside webworkers in modern browsers . Use - - embed - file or - - preload - file in emcc " ; var lazyArray = new LazyUint8Array ; Object . defineProperties ( lazyArray , { length : { get : function ( ) { if ( ! this . lengthKnown ) { this . cacheLength ( ) } return this . _length } } , chunkSize : { get : function ( ) { if ( ! this . lengthKnown ) { this . cacheLength ( ) } return this . _chunkSize } } } ) ; var properties = { isDevice : false , contents : lazyArray } } else { var properties = { isDevice : false , url : url } } var node = FS . createFile ( parent , name , properties , canRead , canWrite ) ; if ( properties . contents ) { node . contents = properties . contents } else if ( properties . url ) { node . contents = null ; node . url = properties . url } Object . defineProperties ( node , { usedBytes : { get : function ( ) { return this . contents . length } } } ) ; var stream_ops = { } ; var keys = Object . keys ( node . stream_ops ) ; keys . forEach ( function ( key ) { var fn = node . stream_ops [ key ] ; stream_ops [ key ] = function forceLoadLazyFile ( ) { if ( ! FS . forceLoadFile ( node ) ) { throw new FS . ErrnoError ( 5 ) } return fn . apply ( null , arguments ) } } ) ; stream_ops . read = function stream_ops_read ( stream , buffer , offset , length , position ) { if ( ! FS . forceLoadFile ( node ) ) { throw new FS . ErrnoError ( 5 ) } var contents = stream . node . contents ; if ( position > = contents . length ) return 0 ; var size = Math . min ( contents . length - position , length ) ; if ( contents . slice ) { for ( var i = 0 ; i < size ; i + + ) { buffer [ offset + i ] = contents [ position + i ] } } else { for ( var i = 0 ; i < size ; i + + ) { buffer [ offset + i ] = contents . get ( position + i ) } } return size } ; node . stream_ops = stream_ops ; return node } , createPreloadedFile : function ( parent , name , url , canRead , canWrite , onload , onerror , dontCreateFile , canOwn , preFinish ) { Browser . init ( ) ; var fullname = name ? PATH . resolve ( PATH . join2 ( parent , name ) ) : parent ; var dep = getUniqueRunDependency ( " cp " + fullname ) ; function processData ( byteArray ) { function finish ( byteArray ) { if ( preFinish ) preFinish ( ) ; if ( ! dontCreateFile ) { FS . createDataFile ( parent , name , byteArray , canRead , canWrite , canOwn ) } if ( onload ) onload ( ) ; removeRunDependency ( dep ) } var handled = false ; Module [ " preloadPlugins " ] . forEach ( function ( plugin ) { if ( handled ) return ; if ( plugin [ " canHandle " ] ( fullname ) ) { plugin [ " handle " ] ( byteArray , fullname , finish , function ( ) { if ( onerror ) onerror ( ) ; removeRunDependency ( dep ) } ) ; handled = true } } ) ; if ( ! handled ) finish ( byteArray ) } addRunDependency ( dep ) ; if ( typeof url = = " string " ) { Browser . asyncLoad ( url , function ( byteArray ) { processData ( byteArray ) } , onerror ) } else { processData ( url ) } } , indexedDB : function ( ) { return window . indexedDB | | window . mozIndexedDB | | window . webkitIndexedDB | | window . msIndexedDB } , DB_NAME : function ( ) { return " EM_FS_ " + window . location . pathname } , DB_VERSION : 20 , DB_STORE_NAME : " FILE_DATA " , saveFilesToDB : function ( paths , onload , onerror ) { onload = onload | | function ( ) { } ; onerror = onerror | | function ( ) { } ; var indexedDB = FS . indexedDB ( ) ; try { var openRequest = indexedDB . open ( FS . DB_NAME ( ) , FS . DB_VERSION ) } catch ( e ) { return onerror ( e ) } openRequest . onupgradeneeded = function openRequest_onupgradeneeded ( ) { console . log ( " creating db " ) ; var db = openRequest . result ; db . createObjectStore ( FS . DB_STORE_NAME ) } ; openRequest . onsuccess = function openRequest_onsuccess ( ) { var db = openRequest . result ; var transaction = db . transaction ( [ FS . DB_STORE_NAME ] , " readwrite " ) ; var files = transaction . objectStore ( FS . DB_STORE_NAME ) ; var ok = 0 , fail = 0 , total = paths . length ; function finish ( ) { if ( fail = = 0 ) onload ( ) ; else onerror ( ) } paths . forEach ( function ( path ) { var putRequest = files . put ( FS . analyzePath ( path ) . object . contents , path ) ; putRequest . onsuccess = function putRequest_onsuccess ( ) { ok + + ; if ( ok + fail = = total ) finish ( ) } ; putRequest . onerror = function putRequest_onerror ( ) { fail + + ; if ( ok + fail = = total ) finish ( ) } } ) ; transaction . onerror = onerror } ; openRequest . onerror = onerror } , loadFilesFromDB : function ( paths , onload , onerror ) { onload = onload | | function ( ) { } ; onerror = onerror | | function ( ) { } ; var indexedDB = FS . indexedDB ( ) ; try { var openRequest = indexedDB . open ( FS . DB_NAME ( ) , FS . DB_VERSION ) } catch ( e ) { return onerror ( e ) } openRequest . onupgradeneeded = onerror ; openRequest . onsuccess = function openRequest_onsuccess ( ) { var db = openRequest . result ; try { var transaction = db . transaction ( [ FS . DB_STORE_NAME ] , " readonly " ) } catch ( e ) { onerror ( e ) ; return } var files = transaction . objectStore ( FS . DB_STORE_NAME ) ; var ok = 0 , fail = 0 , total = paths . length ; function finish ( ) { if ( fail = = 0 ) onload ( ) ; else onerror ( ) } paths . forEach ( function ( path ) { var getRequest = files . get ( path ) ; getRequest . onsuccess = function getRequest_onsuccess ( ) { if ( FS . analyzePath ( path ) . exists ) { FS . unlink ( path ) } FS . createDataFile ( PATH . dirname ( path ) , PATH . basename ( path ) , getRequest . result , true , true , true ) ; ok + + ; if ( ok + fail = = total ) finish ( ) } ; getRequest . onerror = function getRequest_onerror ( ) { fail + + ; if ( ok + fail = = total ) finish ( ) } } ) ; transaction . onerror = onerror } ; openRequest . onerror = onerror } } ; var ERRNO_CODES = { EPERM : 1 , ENOENT : 2 , ESRCH : 3 , EINTR : 4 , EIO : 5 , ENXIO : 6 , E2BIG : 7 , ENOEXEC : 8 , EBADF : 9 , ECHILD : 10 , EAGAIN : 11 , EWOULDBLOCK : 11 , ENOMEM : 12 , EACCES : 13 , EFAULT : 14 , ENOTBLK : 15 , EBUSY : 16 , EEXIST : 17 , EXDEV : 18 , ENODEV : 19 , ENOTDIR : 20 , EISDIR : 21 , EINVAL : 22 , ENFILE : 23 , EMFILE : 24 , ENOTTY : 25 , ETXTBSY : 26 , EFBIG : 27 , ENOSPC : 28 , ESPIPE : 29 , EROFS : 30 , EMLINK : 31 , EPIPE : 32 , EDOM : 33 , ERANGE : 34 , ENOMSG : 42 , EIDRM : 43 , ECHRNG : 44 , EL2NSYNC : 45 , EL3HLT : 46 , EL3RST : 47 , ELNRNG : 48 , EUNATCH : 49 , ENOCSI : 50 , EL2HLT : 51 , EDEADLK : 35 , ENOLCK : 37 , EBADE : 52 , EBADR : 53 , EXFULL : 54 , ENOANO : 55 , EBADRQC : 56 , EBADSLT : 57 , EDEADLOCK : 35 , EBFONT : 59 , ENOSTR : 60 , ENODATA : 61 , ETIME : 62 , ENOSR : 63 , ENONET : 64 , ENOPKG : 65 , EREMOTE : 66 , ENOLINK : 67 , EADV : 68 , ESRMNT : 69 , ECOMM : 70 , EPROTO : 71 , EMULTIHOP : 72 , EDOTDOT : 73 , EBADMSG : 74 , ENOTUNIQ : 76 , EBADFD : 77 , EREMCHG : 78 , ELIBACC : 79 , ELIBBAD : 80 , ELIBSCN : 81 , ELIBMAX : 82 , ELIBEXEC : 83 , ENOSYS : 38 , ENOTEMPTY : 39 , ENAMETOOLONG : 36 , ELOOP : 40 , EOPNOTSUPP : 95 , EPFNOSUPPORT : 96 , ECONNRESET : 104 , ENOBUFS : 105 , EAFNOSUPPORT : 97 , EPROTOTYPE : 91 , ENOTSOCK : 88 , ENOPROTOOPT : 92 , ESHUTDOWN : 108 , ECONNREFUSED : 111 , EADDRINUSE : 98 , ECONNABORTED : 103 , ENETUNREACH : 101 , ENETDOWN : 100 , ETIMEDOUT : 110 , EHOSTDOWN : 112 , EHOSTUNREACH : 113 , EINPROGRESS : 115 , EALREADY : 114 , EDESTADDRREQ : 89 , EMSGSIZE : 90 , EPROTONOSUPPORT : 93 , ESOCKTNOSUPPORT : 94 , EADDRNOTAVAIL : 99 , ENETRESET : 102 , EISCONN : 106 , ENOTCONN : 107 , ETOOMANYREFS : 109 , EUSERS : 87 , EDQUOT : 122 , ESTALE : 116 , ENOTSUP : 95 , ENOMEDIUM : 123 , EILSEQ : 84 , EOVERFLOW : 75 , ECANCELED : 125 , ENOTRECOVERABLE : 131 , EOWNERDEAD : 130 , ESTRPIPE : 86 } ; var SYSCALLS = { DEFAULT_POLLMASK : 5 , mappings : { } , umask : 511 , calculateAt : function ( dirfd , path ) { if ( path [ 0 ] ! = = " / " ) { var dir ; if ( dirfd = = = - 100 ) { dir = FS . cwd ( ) } else { var dirstream = FS . getStream ( dirfd ) ; if ( ! dirstream ) throw new FS . ErrnoError ( ERRNO_CODES . EBADF ) ; dir = dirstream . path } path = PATH . join2 ( dir , path ) } return path } , doStat : function ( func , path , buf ) { try { var stat = func ( path ) } catch ( e ) { if ( e & & e . node & & PATH . normalize ( path ) ! = = PATH . normalize ( FS . getPath ( e . node ) ) ) { return - ERRNO_CODES . ENOTDIR } throw e } HEAP32 [ buf > > 2 ] = stat . dev ; HEAP32 [ buf + 4 > > 2 ] = 0 ; HEAP32 [ buf + 8 > > 2 ] = stat . ino ; HEAP32 [ buf + 12 > > 2 ] = stat . mode ; HEAP32 [ buf + 16 > > 2 ] = stat . nlink ; HEAP32 [ buf + 20 > > 2 ] = stat . uid ; HEAP32 [ buf + 24 > > 2 ] = stat . gid ; HEAP32 [ buf + 28 > > 2 ] = stat . rdev ; HEAP32 [ buf + 32 > > 2 ] = 0 ; HEAP32 [ buf + 36 > > 2 ] = stat . size ; HEAP32 [ buf + 40 > > 2 ] = 4096 ; HEAP32 [ buf + 44 > > 2 ] = stat . blocks ; HEAP32 [ buf + 48 > > 2 ] = stat . atime . getTime ( ) / 1e3 | 0 ; HEAP32 [ buf + 52 > > 2 ] = 0 ; HEAP32 [ buf + 56 > > 2 ] = stat . mtime . getTime ( ) / 1e3 | 0 ; HEAP32 [ buf + 60 > > 2 ] = 0 ; HEAP32 [ buf + 64 > > 2 ] = stat . ctime . getTime ( ) / 1e3 | 0 ; HEAP32 [ buf + 68 > > 2 ] = 0 ; HEAP32 [ buf + 72 > > 2 ] = stat . ino ; return 0 } , doMsync : function ( addr , stream , len , flags ) { var buffer = new Uint8Array ( HEAPU8 . subarray ( addr , addr + len ) ) ; FS . msync ( stream , buffer , 0 , len , flags ) } , doMkdir : function ( path , mode ) { path = PATH . normalize ( path ) ; if ( path [ path . length - 1 ] = = = " / " ) path = path . substr ( 0 , path . length - 1 ) ; FS . mkdir ( path , mode , 0 ) ; return 0 } , doMknod : function ( path , mode , dev ) { switch ( mode & 61440 ) { case 32768 : case 8192 : case 24576 : case 4096 : case 49152 : break ; default : return - ERRNO_CODES . EINVAL } FS . mknod ( path , mode , dev ) ; return 0 } , doReadlink : function ( path , buf , bufsize ) { if ( bufsize < = 0 ) return - ERRNO_CODES . EINVAL ; var ret = FS . readlink ( path ) ; var len = Math . min ( bufsize , lengthBytesUTF8 ( ret ) ) ; var endChar = HEAP8 [ buf + len ] ; stringToUTF8 ( ret , buf , bufsize + 1 ) ; HEAP8 [ buf + len ] = endChar ; return len } , doAccess : function ( path , amode ) { if ( amode & ~ 7 ) { return - ERRNO_CODES . EINVAL } var node ; var lookup = FS . lookupPath ( path , { follow : true } ) ; node = lookup . node ; var perms = " " ; if ( amode & 4 ) perms + = " r " ; if ( amode & 2 ) perms + = " w " ; if ( amode & 1 ) perms + = " x " ; if ( perms & & FS . nodePermissions ( node , perms ) ) { return - ERRNO_CODES . EACCES } return 0 } , doDup : function ( path , flags , suggestFD ) { var suggest = FS . getStream ( suggestFD ) ; if ( suggest ) FS . close ( suggest ) ; return FS . open ( path , flags , 0 , suggestFD , suggestFD ) . fd } , doReadv : function ( stream , iov , iovcnt , offset ) { var ret = 0 ; for ( var i = 0 ; i < iovcnt ; i + + ) { var ptr = HEAP32 [ iov + i * 8 > > 2 ] ; var len = HEAP32 [ iov + ( i * 8 + 4 ) > > 2 ] ; var curr = FS . read ( stream , HEAP8 , ptr , len , offset ) ; if ( curr < 0 ) return - 1 ; ret + = curr ; if ( curr < len ) break } return ret } , doWritev : function ( stream , iov , iovcnt , offset ) { var ret = 0 ; for ( var i = 0 ; i < iovcnt ; i + + ) { var ptr = HEAP32 [ iov + i * 8 > > 2 ] ; var len = HEAP32 [ iov + ( i * 8 + 4 ) > > 2 ] ; var curr = FS . write ( stream , HEAP8 , ptr , len , offset ) ; if ( curr < 0 ) return - 1 ; ret + = curr } return ret } , varargs : 0 , get : function ( varargs ) { SYSCALLS . varargs + = 4 ; var ret = HEAP32 [ SYSCALLS . varargs - 4 > > 2 ] ; return ret } , getStr : function ( ) { var ret = UTF8ToString ( SYSCALLS . get ( ) ) ; return ret } , getStreamFromFD : function ( ) { var stream = FS . getStream ( SYSCALLS . get ( ) ) ; if ( ! stream ) throw new FS . ErrnoError ( ERRNO_CODES . EBADF ) ; return stream } , getSocketFromFD : function ( ) { var socket = SOCKFS . getSocket ( SYSCALLS . get ( ) ) ; if ( ! socket ) throw new FS . ErrnoError ( ERRNO_CODES . EBADF ) ; return socket } , getSocketAddress : function ( allowNull ) { var addrp = SYSCALLS . get ( ) , addrlen = SYSCALLS . get ( ) ; if ( allowNull & & addrp = = = 0 ) return null ; var info = __read_sockaddr ( addrp , addrlen ) ; if ( info . errno ) throw new FS . ErrnoError ( info . errno ) ; info . addr = DNS . lookup_addr ( info . addr ) | | info . addr ; return info } , get64 : function ( ) { var low = SYSCALLS . get ( ) , high = SYSCALLS . get ( ) ; return low } , getZero : function ( ) { SYSCALLS . get ( ) } } ; function ___syscall140 ( which , varargs ) { SYSCALLS . varargs = varargs ; try { var stream = SYSCALLS . getStreamFromFD ( ) , offset_high = SYSCALLS . get ( ) , offset_low = SYSCALLS . get ( ) , result = SYSCALLS . get ( ) , whence = SYSCALLS . get ( ) ; var offset = offset_low ; FS . llseek ( stream , offset , whence ) ; HEAP32 [ result > > 2 ] = stream . position ; if ( stream . getdents & & offset = = = 0 & & whence = = = 0 ) stream . getdents = null ; return 0 } catch ( e ) { if ( typeof FS = = = " undefined " | | ! ( e instanceof FS . ErrnoError ) ) abort ( e ) ; return - e . errno } } function ___syscall146 ( which , varargs ) { SYSCALLS . varargs = varargs ; try { var stream = SYSCALLS . getStreamFromFD ( ) , iov = SYSCALLS . get ( ) , iovcnt = SYSCALLS . get ( ) ; return SYSCALLS . doWritev ( stream , iov , iovcnt ) } catch ( e ) { if ( typeof FS = = = " undefined " | | ! ( e instanceof FS . ErrnoError ) ) abort ( e ) ; return - e . errno } } function ___syscall221 ( which , varargs ) { SYSCALLS . varargs = varargs ; try { var stream = SYSCALLS . getStreamFromFD ( ) , cmd = SYSCALLS . get ( ) ; switch ( cmd ) { case 0 : { var arg = SYSCALLS . get ( ) ; if ( arg < 0 ) { return - ERRNO_CODES . EINVAL } var newStream ; newStream = FS . open ( stream . path , stream . flags , 0 , arg ) ; return newStream . fd } case 1 : case 2 : return 0 ; case 3 : return stream . flags ; case 4 : { var arg = SYSCALLS . get ( ) ; stream . flags | = arg ; return 0 } case 12 : { var arg = SYSCALLS . get ( ) ; var offset = 0 ; HEAP16 [ arg + offset > > 1 ] = 2 ; return 0 } case 13 : case 14 : return 0 ; case 16 : case 8 : return - ERRNO_CODES . EINVAL ; case 9 : ___setErrNo ( ERRNO_CODES . EINVAL ) ; return - 1 ; default : { return - ERRNO_CODES . EINVAL } } } catch ( e ) { if ( typeof FS = = = " undefined " | | ! ( e instanceof FS . ErrnoError ) ) abort ( e ) ; return - e . errno } } function ___syscall3 ( which , varargs ) { SYSCALLS . varargs = varargs ; try { var stream = SYSCALLS . getStreamFromFD ( ) , buf = SYSCALLS . get ( ) , count = SYSCALLS . get ( ) ; return FS . read ( stream , HEAP8 , buf , count ) } catch ( e ) { if ( typeof FS = = = " undefined " | | ! ( e instanceof FS . ErrnoError ) ) abort ( e ) ; return - e . errno } } function ___syscall5 ( which , varargs ) { SYSCALLS . varargs = varargs ; try { var pathname = SYSCALLS . getStr ( ) , flags = SYSCALLS . get ( ) , mode = SYSCALLS . get ( ) ; var stream = FS . open ( pathname , flags , mode ) ; return stream . fd } catch ( e ) { if ( typeof FS = = = " undefined " | | ! ( e instanceof FS . ErrnoError ) ) abort ( e ) ; return - e . errno } } function ___syscall54 ( which , varargs ) { SYSCALLS . varargs = varargs ; try { var stream = SYSCALLS . getStreamFromFD ( ) , op = SYSCALLS . get ( ) ; switch ( op ) { case 21509 : case 21505 : { if ( ! stream . tty ) return - ERRNO_CODES . ENOTTY ; return 0 } case 21510 : case 21511 : case 21512 : case 21506 : case 21507 : case 21508 : { if ( ! stream . tty ) return - ERRNO_CODES . ENOTTY ; return 0 } case 21519 : { if ( ! stream . tty ) return - ERRNO_CODES . ENOTTY ; var argp = SYSCALLS . get ( ) ; HEAP32 [ argp > > 2 ] = 0 ; return 0 } case 21520 : { if ( ! stream . tty ) return - ERRNO_CODES . ENOTTY ; return - ERRNO_CODES . EINVAL } case 21531 : { var argp = SYSCALLS . get ( ) ; return FS . ioctl ( stream , op , argp ) } case 21523 : { if ( ! stream . tty ) return - ERRNO_CODES . ENOTTY ; return 0 } case 21524 : { if ( ! stream . tty ) return - ERRNO_CODES . ENOTTY ; return 0 } default : abort ( " bad ioctl syscall " + op ) } } catch ( e ) { if ( typeof FS = = = " undefined " | | ! ( e instanceof FS . ErrnoError ) ) abort ( e ) ; return - e . errno } } function ___syscall6 ( which , varargs ) { SYSCALLS . varargs = varargs ; try { var stream = SYSCALLS . getStreamFromFD ( ) ; FS . close ( stream ) ; return 0 } catch ( e ) { if ( typeof FS = = = " undefined " | | ! ( e instanceof FS . ErrnoError ) ) abort ( e ) ; return - e . errno } } var tupleRegistrations = { } ; function runDestructors ( destructors ) { while ( destructors . length ) { var ptr = destructors . pop ( ) ; var del = destructors . pop ( ) ; del ( ptr ) } } function simpleReadValueFromPointer ( pointer ) { return this [ " fromWireType " ] ( HEAPU32 [ pointer > > 2 ] ) } var awaitingDependencies = { } ; var registeredTypes = { } ; var typeDependencies = { } ; var char_0 = 48 ; var char_9 = 57 ; function makeLegalFunctionName ( name ) { if ( undefined = = = name ) { return " _unknown " } name = name . replace ( / [ ^ a - zA - Z0 - 9_ ] / g , " $ " ) ; var f = name . charCodeAt ( 0 ) ; if ( f > = char_0 & & f < = char_9 ) { return " _ " + name } else { return name } } function createNamedFunction ( name , body ) { name = makeLegalFunctionName ( name ) ; return new Function ( " body " , " return function " + name + " ( ) { \ n " + ' " use strict " ; ' + " return body . apply ( this , arguments ) ; \ n " + " } ; \ n " ) ( body ) } function extendError ( baseErrorType , errorName ) { var errorClass = createNamedFunction ( errorName , function ( message ) { this . name = errorName ; this . message = message ; var stack = new Error ( message ) . stack ; if ( stack ! = = undefined ) { this . stack = this . toString ( ) + " \ n " + stack . replace ( / ^ Error ( : [ ^ \ n ] * ) ? \ n / , " " ) } } ) ; errorClass . prototype = Object . create ( baseErrorType . prototype ) ; errorClass . prototype . constructor = errorClass ; errorClass . prototype . toString = function ( ) { if ( this . message = = = undefined ) { return this . name } else { return this . name + " : " + this . message } } ; return errorClass } var InternalError = undefined ; function throwInternalError ( message ) { throw new InternalError ( message ) } function whenDependentTypesAreResolved ( myTypes , dependentTypes , getTypeConverters ) { myTypes . forEach ( function ( type ) { typeDependencies [ type ] = dependentTypes } ) ; function onComplete ( typeConverters ) { var myTypeConverters = getTypeConverters ( typeConverters ) ; if ( myTypeConverters . length ! = = myTypes . length ) { throwInternalError ( " Mismatched type converter count " ) } for ( var i = 0 ; i < myTypes . length ; + + i ) { registerType ( myTypes [ i ] , myTypeConverters [ i ] ) } } var typeConverters = new Array ( dependentTypes . length ) ; var unregisteredTypes = [ ] ; var registered = 0 ; dependentTypes . forEach ( function ( dt , i ) { if ( registeredTypes . hasOwnProperty ( dt ) ) { typeConverters [ i ] = registeredTypes [ dt ] } else { unregisteredTypes . push ( dt ) ; if ( ! awaitingDependencies . hasOwnProperty ( dt ) ) { awaitingDependencies [ dt ] = [ ] } awaitingDependencies [ dt ] . push ( function ( ) { typeConverters [ i ] = registeredTypes [ dt ] ; + + registered ; if ( registered = = = unregisteredTypes . length ) { onComplete ( typeConverters ) } } ) } } ) ; if ( 0 = = = unregisteredTypes . length ) { onComplete ( typeConverters ) } } function __embind_finalize_value_array ( rawTupleType ) { var reg = tupleRegistrations [ rawTupleType ] ; delete tupleRegistrations [ rawTupleType ] ; var elements = reg . elements ; var elementsLength = elements . length ; var elementTypes = elements . map ( function ( elt ) { return elt . getterReturnType } ) . concat ( elements . map ( function ( elt ) { return elt . setterArgumentType } ) ) ; var rawConstructor = reg . rawConstructor ; var rawDestructor = reg . rawDestructor ; whenDependentTypesAreResolved ( [ rawTupleType ] , elementTypes , function ( elementTypes ) { elements . forEach ( function ( elt , i ) { var getterReturnType = elementTypes [ i ] ; var getter = elt . getter ; var getterContext = elt . getterContext ; var setterArgumentType = elementTypes [ i + elementsLength ] ; var setter = elt . setter ; var setterContext = elt . setterContext ; elt . read = function ( ptr ) { return getterReturnType [ " fromWireType " ] ( getter ( getterContext , ptr ) ) } ; elt . write = function ( ptr , o ) { var destructors = [ ] ; setter ( setterContext , ptr , setterArgumentType [ " toWireType " ] ( destructors , o ) ) ; runDestructors ( destructors ) } } ) ; return [ { name : reg . name , " fromWireType " : function ( ptr ) { var rv = new Array ( elementsLength ) ; for ( var i = 0 ; i < elementsLength ; + + i ) { rv [ i ] = elements [ i ] . read ( ptr ) } rawDestructor ( ptr ) ; return rv } , " toWireType " : function ( destructors , o ) { if ( elementsLength ! = = o . length ) { throw new TypeError ( " Incorrect number of tuple elements for " + reg . name + " : expected = " + elementsLength + " , actual = " + o . length ) } var ptr = rawConstructor ( ) ; for ( var i = 0 ; i < elementsLength ; + + i ) { elements [ i ] . write ( ptr , o [ i ] ) } if ( destructors ! = = null ) { destructors . push ( rawDestructor , ptr ) } return ptr } , " argPackAdvance " : 8 , " readValueFromPointer " : simpleReadValueFromPointer , destructorFunction : rawDestructor } ] } ) } var structRegistrations = { } ; function __embind_finalize_value_object ( structType ) { var reg = structRegistrations [ structType ] ; delete structRegistrations [ structType ] ; var rawConstructor = reg . rawConstructor ; var rawDestructor = reg . rawDestructor ; var fieldRecords = reg . fields ; var fieldTypes = fieldRecords . map ( function ( field ) { return field . getterReturnType } ) . concat ( fieldRecords . map ( function ( field ) { return field . setterArgumentType } ) ) ; whenDependentTypesAreResolved ( [ structType ] , fieldTypes , function ( fieldTypes ) { var fields = { } ; fieldRecords . forEach ( function ( field , i ) { var fieldName = field . fieldName ; var getterReturnType = fieldTypes [ i ] ; var getter = field . getter ; var getterContext = field . getterContext ; var setterArgumentType = fieldTypes [ i + fieldRecords . length ] ; var setter = field . setter ; var setterContext = field . setterContext ; fields [ fieldName ] = { read : function ( ptr ) { return getterReturnType [ " fromWireType " ] ( getter ( getterContext , ptr ) ) } , write : function ( ptr , o ) { var destructors = [ ] ; setter ( setterContext , ptr , setterArgumentType [ " toWireType " ] ( destructors , o ) ) ; runDestructors ( destructors ) } } } ) ; return [ { name : reg . name , " fromWireType " : function ( ptr ) { var rv = { } ; for ( var i in fields ) { rv [ i ] = fields [ i ] . read ( ptr ) } rawDestructor ( ptr ) ; return rv } , " toWireType " : function ( destructors , o ) { for ( var fieldName in fields ) { if ( ! ( fieldName in o ) ) { throw new TypeError ( " Missing field " ) } } var ptr = rawConstructor ( ) ; for ( fieldName in fields ) { fields [ fieldName ] . write ( ptr , o [ fieldName ] ) } if ( destructors ! = = null ) { destructors . push ( rawDestructor , ptr ) } return ptr } , " argPackAdvance " : 8 , " readValueFromPointer " : simpleReadValueFromPointer , destructorFunction : rawDestructor } ] } ) } function getShiftFromSize ( size ) { switch ( size ) { case 1 : return 0 ; case 2 : return 1 ; case 4 : return 2 ; case 8 : return 3 ; default : throw new TypeError ( " Unknown type size : " + size ) } } function embind_init_charCodes ( ) { var codes = new Array ( 256 ) ; for ( var i = 0 ; i < 256 ; + + i ) { codes [ i ] = String . fromCharCode ( i ) } embind_charCodes = codes } var embind_charCodes = undefined ; function readLatin1String ( ptr ) { var ret = " " ; var c = ptr ; while ( HEAPU8 [ c ] ) { ret + = embind_charCodes [ HEAPU8 [ c + + ] ] } return ret } var BindingError = undefined ; function throwBindingError ( message ) { throw new BindingError ( message ) } function registerType ( rawType , registeredInstance , options ) { options = options | | { } ; if ( ! ( " argPackAdvance " in registeredInstance ) ) { throw new TypeError ( " registerType registeredInstance requires argPackAdvance " ) } var name = registeredInstance . name ; if ( ! rawType ) { throwBindingError ( ' type " ' + name + ' " must have a positive integer typeid pointer ' ) } if ( registeredTypes . hasOwnProperty ( rawType ) ) { if ( options . ignoreDuplicateRegistrations ) { return } else { throwBindingError ( " Cannot register type ' " + name + " ' twice " ) } } registeredTypes [ rawType ] = registeredInstance ; delete typeDependencies [ rawType ] ; if ( awaitingDependencies . hasOwnProperty ( rawType ) ) { var callbacks = awaitingDependencies [ rawType ] ; delete awaitingDependencies [ rawType ] ; callbacks . forEach ( function ( cb ) { cb ( ) } ) } } function __embind_register_bool ( rawType , name , size , trueValue , falseValue ) { var shift = getShiftFromSize ( size ) ; name = readLatin1String ( name ) ; registerType ( rawType , { name : name , " fromWireType " : function ( wt ) { return ! ! wt } , " toWireType " : function ( destructors , o ) { return o ? trueValue : falseValue } , " argPackAdvance " : 8 , " readValueFromPointer " : function ( pointer ) { var heap ; if ( size = = = 1 ) { heap = HEAP8 } else if ( size = = = 2 ) { heap = HEAP16 } else if ( size = = = 4 ) { heap = HEAP32 } else { throw new TypeError ( " Unknown boolean type size : " + name ) } return this [ " fromWireType " ] ( heap [ pointer > > shift ] ) } , destructorFunction : null } ) } function ClassHandle_isAliasOf ( other ) { if ( ! ( this instanceof ClassHandle ) ) { return false } if ( ! ( other instanceof ClassHandle ) ) { return false } var leftClass = this . $ $ . ptrType . registeredClass ; var left = this . $ $ . ptr ; var rightClass = other . $ $ . ptrType . registeredClass ; var right = other . $ $ . ptr ; while ( leftClass . baseClass ) { left = leftClass . upcast ( left ) ; leftClass = leftClass . baseClass } while ( rightClass . baseClass ) { right = rightClass . upcast ( right ) ; rightClass = rightClass . baseClass } return leftClass = = = rightClass & & left = = = right } function shallowCopyInternalPointer ( o ) { return { count : o . count , deleteScheduled : o . deleteScheduled , preservePointerOnDelete : o . preservePointerOnDelete , ptr : o . ptr , ptrType : o . ptrType , smartPtr : o . smartPtr , smartPtrType : o . smartPtrType } } function throwInstanceAlreadyDeleted ( obj ) { function getInstanceTypeName ( handle ) { return handle . $ $ . ptrType . registeredClass . name } throwBindingError ( getInstanceTypeName ( obj ) + " instance already deleted " ) } function ClassHandle_clone ( ) { if ( ! this . $ $ . ptr ) { throwInstanceAlreadyDeleted ( this ) } if ( this . $ $ . preservePointerOnDelete ) { this . $ $ . count . value + = 1 ; return this } else { var clone = Object . create ( Object . getPrototypeOf ( this ) , { $ $ : { value : shallowCopyInternalPointer ( this . $ $ ) } } ) ; clone . $ $ . count . value + = 1 ; clone . $ $ . deleteScheduled = false ; return clone } } function runDestructor ( handle ) { var $ $ = handle . $ $ ; if ( $ $ . smartPtr ) { $ $ . smartPtrType . rawDestructor ( $ $ . smartPtr ) } else { $ $ . ptrType . registeredClass . rawDestructor ( $ $ . ptr ) } } function ClassHandle_delete ( ) { if ( ! this . $ $ . ptr ) { throwInstanceAlreadyDeleted ( this ) } if ( this . $ $ . deleteScheduled & & ! this . $ $ . preservePointerOnDelete ) { throwBindingError ( " Object already scheduled for deletion " ) } this . $ $ . count . value - = 1 ; var toDelete = 0 = = = this . $ $ . count . value ; if ( toDelete ) { runDestructor ( this ) } if ( ! this . $ $ . preservePointerOnDelete ) { this . $ $ . smartPtr = undefined ; this . $ $ . ptr = undefined } } function ClassHandle_isDeleted ( ) { return ! this . $ $ . ptr } var delayFunction = undefined ; var deletionQueue = [ ] ; function flushPendingDeletes ( ) { while ( deletionQueue . length ) { var obj = deletionQueue . pop ( ) ; obj . $ $ . deleteScheduled = false ; obj [ " delete " ] ( ) } } function ClassHandle_deleteLater ( ) { if ( ! this . $ $ . ptr ) { throwInstanceAlreadyDeleted ( this ) } if ( this . $ $ . deleteScheduled & & ! this . $ $ . preservePointerOnDelete ) { throwBindingError ( " Object already scheduled for deletion " ) } deletionQueue . push ( this ) ; if ( deletionQueue . length = = = 1 & & delayFunction ) { delayFunction ( flushPendingDeletes ) } this . $ $ . deleteScheduled = true ; return this } function init_ClassHandle ( ) { ClassHandle . prototype [ " isAliasOf " ] = ClassHandle_isAliasOf ; ClassHandle . prototype [ " clone " ] = ClassHandle_clone ; ClassHandle . prototype [ " delete " ] = ClassHandle_delete ; ClassHandle . prototype [ " isDeleted " ] = ClassHandle_isDeleted ; ClassHandle . prototype [ " deleteLater " ] = ClassHandle_deleteLater } function ClassHandle ( ) { } var registeredPointers = { } ; function ensureOverloadTable ( proto , methodName , humanName ) { if ( undefined = = = proto [ methodName ] . overloadTable ) { var prevFunc = proto [ methodName ] ; proto [ methodName ] = function ( ) { if ( ! proto [ methodName ] . overloadTable . hasOwnProperty ( arguments . length ) ) { throwBindingError ( " Function ' " + humanName + " ' called with an invalid number of arguments ( " + arguments . length + " ) - expects one of ( " + proto [ methodName ] . overloadTable + " ) ! " ) } return proto [ methodName ] . overloadTable [ arguments . length ] . apply ( this , arguments ) } ; proto [ methodName ] . overloadTable = [ ] ; proto [ methodName ] . overloadTable [ prevFunc . argCount ] = prevFunc } } function exposePublicSymbol ( name , value , numArguments ) { if ( Module . hasOwnProperty ( name ) ) { if ( undefined = = = numArguments | | undefined ! = = Module [ name ] . overloadTable & & undefined ! = = Module [ name ] . overloadTable [ numArguments ] ) { throwBindingError ( " Cannot register public name ' " + name + " ' twice " ) } ensureOverloadTable ( Module , name , name ) ; if ( Module . hasOwnProperty ( numArguments ) ) { throwBindingError ( " Cannot register multiple overloads of a function with the same number of arguments ( " + numArguments + " ) ! " ) } Module [ name ] . overloadTable [ numArguments ] = value } else { Module [ name ] = value ; if ( undefined ! = = numArguments ) { Module [ name ] . numArguments = numArguments } } } function RegisteredClass ( name , constructor , instancePrototype , rawDestructor , baseClass , getActualType , upcast , downcast ) { this . name = name ; this . constructor = constructor ; this . instancePrototype = instancePrototype ; this . rawDestructor = rawDestructor ; this . baseClass = baseClass ; this . getActualType = getActualType ; this . upcast = upcast ; this . downcast = downcast ; this . pureVirtualFunctions = [ ] } function upcastPointer ( ptr , ptrClass , desiredClass ) { while ( ptrClass ! = = desiredClass ) { if ( ! ptrClass . upcast ) { throwBindingError ( " Expected null or instance of " + desiredClass . name + " , got an instance of " + ptrClass . name ) } ptr = ptrClass . upcast ( ptr ) ; ptrClass = ptrClass . baseClass } return ptr } function constNoSmartPtrRawPointerToWireType ( destructors , handle ) { if ( handle = = = null ) { if ( this . isReference ) { throwBindingError ( " null is not a valid " + this . name ) } return 0 } if ( ! handle . $ $ ) { throwBindingError ( ' Cannot pass " ' + _embind_repr ( handle ) + ' " as a ' + this . name ) } if ( ! handle . $ $ . ptr ) { throwBindingError ( " Cannot pass deleted object as a pointer of type " + this . name ) } var handleClass = handle . $ $ . ptrType . registeredClass ; var ptr = upcastPointer ( handle . $ $ . ptr , handleClass , this . registeredClass ) ; return ptr } function genericPointerToWireType ( destructors , handle ) { var ptr ; if ( handle = = = null ) { if ( this . isReference ) { throwBindingError ( " null is not a valid " + this . name ) } if ( this . isSmartPointer ) { ptr = this . rawConstructor ( ) ; if ( destructors ! = = null ) { destructors . push ( this . rawDestructor , ptr ) } return ptr } else { return 0 } } if ( ! handle . $ $ ) { throwBindingError ( ' Cannot pass " ' + _embind_repr ( handle ) + ' " as a ' + this . name ) } if ( ! handle . $ $ . ptr ) { throwBindingError ( " Cannot pass deleted object as a pointer of type " + this . name ) } if ( ! this . isConst & & handle . $ $ . ptrType . isConst ) { throwBindingError ( " Cannot convert argument of type " + ( handle . $ $ . smartPtrType ? handle . $ $ . smartPtrType . name : handle . $ $ . ptrType . name ) + " to parameter type " + this . name ) } var handleClass = handle . $ $ . ptrType . registeredClass ; ptr = upcastPointer ( handle . $ $ . ptr , handleClass , this . registeredClass ) ; if ( this . isSmartPointer ) { if ( undefined = = = handle . $ $ . smartPtr ) { throwBindingError ( " Passing raw pointer to smart pointer is illegal " ) } switch ( this . sharingPolicy ) { case 0 : if ( handle . $ $ . smartPtrType = = = this ) { ptr = handle . $ $ . smartPtr } else { throwBindingError ( " Cannot convert argument of type " + ( handle . $ $ . smartPtrType ? handle . $ $ . smartPtrType . name : handle . $ $ . ptrType . name ) + " to parameter type " + this . name ) } break ; case 1 : ptr = handle . $ $ . smartPtr ; break ; case 2 : if ( handle . $ $ . smartPtrType = = = this ) { ptr = handle . $ $ . smartPtr } else { var clonedHandle = handle [ " clone " ] ( ) ; ptr = this . rawShare ( ptr , __emval_register ( function ( ) { clonedHandle [ " delete " ] ( ) } ) ) ; if ( destructors ! = = null ) { destructors . push ( this . rawDestructor , ptr ) } } break ; default : throwBindingError ( " Unsupporting sharing policy " ) } } return ptr } function nonConstNoSmartPtrRawPointerToWireType ( destructors , handle ) { if ( handle = = = null ) { if ( this . isReference ) { throwBindingError ( " null is not a valid " + this . name ) } return 0 } if ( ! handle . $ $ ) { throwBindingError ( ' Cannot pass " ' + _embind_repr ( handle ) + ' " as a ' + this . name ) } if ( ! handle . $ $ . ptr ) { throwBindingError ( " Cannot pass deleted object as a pointer of type " + this . name ) } if ( handle . $ $ . ptrType . isConst ) { throwBindingError ( " Cannot convert argument of type " + handle . $ $ . ptrType . name + " to parameter type " + this . name ) } var handleClass = handle . $ $ . ptrType . registeredClass ; var ptr = upcastPointer ( handle . $ $ . ptr , handleClass , this . registeredClass ) ; return ptr } function RegisteredPointer_getPointee ( ptr ) { if ( this . rawGetPointee ) { ptr = this . rawGetPointee ( ptr ) } return ptr } function RegisteredPointer_destructor ( ptr ) { if ( this . rawDestructor ) { this . rawDestructor ( ptr ) } } function RegisteredPointer_deleteObject ( handle ) { if ( handle ! = = null ) { handle [ " delete " ] ( ) } } function downcastPointer ( ptr , ptrClass , desiredClass ) { if ( ptrClass = = = desiredClass ) { return ptr } if ( undefined = = = desiredClass . baseClass ) { return null } var rv = downcastPointer ( ptr , ptrClass , desiredClass . baseClass ) ; if ( rv = = = null ) { return null } return desiredClass . downcast ( rv ) } function getInheritedInstanceCount ( ) { return Object . keys ( registeredInstances ) . length } function getLiveInheritedInstances ( ) { var rv = [ ] ; for ( var k in registeredInstances ) { if ( registeredInstances . hasOwnProperty ( k ) ) { rv . push ( registeredInstances [ k ] ) } } return rv } function setDelayFunction ( fn ) { delayFunction = fn ; if ( deletionQueue . length & & delayFunction ) { delayFunction ( flushPendingDeletes ) } } function init_embind ( ) { Module [ " getInheritedInstanceCount " ] = getInheritedInstanceCount ; Module [ " getLiveInheritedInstances " ] = getLiveInheritedInstances ; Module [ " flushPendingDeletes " ] = flushPendingDeletes ; Module [ " setDelayFunction " ] = setDelayFunction } var registeredInstances = { } ; function getBasestPointer ( class_ , ptr ) { if ( ptr = = = undefined ) { throwBindingError ( " ptr should not be undefined " ) } while ( class_ . baseClass ) { ptr = class_ . upcast ( ptr ) ; class_ = class_ . baseClass } return ptr } function getInheritedInstance ( class_ , ptr ) { ptr = getBasestPointer ( class_ , ptr ) ; return registeredInstances [ ptr ] } function makeClassHandle ( prototype , record ) { if ( ! record . ptrType | | ! record . ptr ) { throwInternalError ( " makeClassHandle requires ptr and ptrType " ) } var hasSmartPtrType = ! ! record . smartPtrType ; var hasSmartPtr = ! ! record . smartPtr ; if ( hasSmartPtrType ! = = hasSmartPtr ) { throwInternalError ( " Both smartPtrType and smartPtr must be specified " ) } record . count = { value : 1 } ; return Object . create ( prototype , { $ $ : { value : record } } ) } function RegisteredPointer_fromWireType ( ptr ) { var rawPointer = this . getPointee ( ptr ) ; if ( ! rawPointer ) { this . destructor ( ptr ) ; return null } var registeredInstance = getInheritedInstance ( this . registeredClass , rawPointer ) ; if ( undefined ! = = registeredInstance ) { if ( 0 = = = registeredInstance . $ $ . count . value ) { registeredInstance . $ $ . ptr = rawPointer ; registeredInstance . $ $ . smartPtr = ptr ; return registeredInstance [ " clone " ] ( ) } else { var rv = registeredInstance [ " clone " ] ( ) ; this . destructor ( ptr ) ; return rv } } function makeDefaultHandle ( ) { if ( this . isSmartPointer ) { return makeClassHandle ( this . registeredClass . instancePrototype , { ptrType : this . pointeeType , ptr : rawPointer , smartPtrType : this , smartPtr : ptr } ) } else { return makeClassHandle ( this . registeredClass . instancePrototype , { ptrType : this , ptr : ptr } ) } } var actualType = this . registeredClass . getActualType ( rawPointer ) ; var registeredPointerRecord = registeredPointers [ actualType ] ; if ( ! registeredPointerRecord ) { return makeDefaultHandle . call ( this ) } var toType ; if ( this . isConst ) { toType = registeredPointerRecord . constPointerType } else { toType = registeredPointerRecord . pointerType } var dp = downcastPointer ( rawPointer , this . registeredClass , toType . registeredClass ) ; if ( dp = = = null ) { return makeDefaultHandle . call ( this ) } if ( this . isSmartPointer ) { return makeClassHandle ( toType . registeredClass . instancePrototype , { ptrType : toType , ptr : dp , smartPtrType : this , smartPtr : ptr } ) } else { return makeClassHandle ( toType . registeredClass . instancePrototype , { ptrType : toType , ptr : dp } ) } } function init_RegisteredPointer ( ) { RegisteredPointer . prototype . getPointee = RegisteredPointer_getPointee ; RegisteredPointer . prototype . destructor = RegisteredPointer_destructor ; RegisteredPointer . prototype [ " argPackAdvance " ] = 8 ; RegisteredPointer . prototype [ " readValueFromPointer " ] = simpleReadValueFromPointer ; RegisteredPointer . prototype [ " deleteObject " ] = RegisteredPointer_deleteObject ; RegisteredPointer . prototype [ " fromWireType " ] = RegisteredPointer_fromWireType } function RegisteredPointer ( name , registeredClass , isReference , isConst , isSmartPointer , pointeeType , sharingPolicy , rawGetPointee , rawConstructor , rawShare , rawDestructor ) { this . name = name ; this . registeredClass = registeredClass ; this . isReference = isReference ; this . isConst = isConst ; this . isSmartPointer = isSmartPointer ; this . pointeeType = pointeeType ; this . sharingPolicy = sharingPolicy ; this . rawGetPointee = rawGetPointee ; this . rawConstructor = rawConstructor ; this . rawShare = rawShare ; this . rawDestructor = rawDestructor ; if ( ! isSmartPointer & & registeredClass . baseClass = = = undefined ) { if ( isConst ) { this [ " toWireType " ] = constNoSmartPtrRawPointerToWireType ; this . destructorFunction = null } else { this [ " toWireType " ] = nonConstNoSmartPtrRawPointerToWireType ; this . destructorFunction = null } } else { this [ " toWireType " ] = genericPointerToWireType } } function replacePublicSymbol ( name , value , numArguments ) { if ( ! Module . hasOwnProperty ( name ) ) { throwInternalError ( " Replacing nonexistant public symbol " ) } if ( undefined ! = = Module [ name ] . overloadTable & & undefined ! = = numArguments ) { Module [ name ] . overloadTable [ numArguments ] = value } else { Module [ name ] = value ; Module [ name ] . argCount = numArguments } } function embind__requireFunction ( signature , rawFunction ) { signature = readLatin1String ( signature ) ; function makeDynCaller ( dynCall ) { var args = [ ] ; for ( var i = 1 ; i < signature . length ; + + i ) { args . push ( " a " + i ) } var name = " dynCall_ " + signature + " _ " + rawFunction ; var body = " return function " + name + " ( " + args . join ( " , " ) + " ) { \ n " ; body + = " return dynCall ( rawFunction " + ( args . length ? " , " : " " ) + args . join ( " , " ) + " ) ; \ n " ; body + = " } ; \ n " ; return new Function ( " dynCall " , " rawFunction " , body ) ( dynCall , rawFunction ) } var fp ; if ( Module [ " FUNCTION_TABLE_ " + signature ] ! = = undefined ) { fp = Module [ " FUNCTION_TABLE_ " + signature ] [ rawFunction ] } else if ( typeof FUNCTION_TABLE ! = = " undefined " ) { fp = FUNCTION_TABLE [ rawFunction ] } else { var dc = Module [ " dynCall_ " + signature ] ; if ( dc = = = undefined ) { dc = Module [ " dynCall_ " + signature . replace ( / f / g , " d " ) ] ; if ( dc = = = undefined ) { throwBindingError ( " No dynCall invoker for signature : " + signature ) } } fp = makeDynCaller ( dc ) } if ( typeof fp ! = = " function " ) { throwBindingError ( " unknown function pointer with signature " + signature + " : " + rawFunction ) } return fp } var UnboundTypeError = undefined ; function getTypeName ( type ) { var ptr = ___getTypeName ( type ) ; var rv = readLatin1String ( ptr ) ; _free ( ptr ) ; return rv } function throwUnboundTypeError ( message , types ) { var unboundTypes = [ ] ; var seen = { } ; function visit ( type ) { if ( seen [ type ] ) { return } if ( registeredTypes [ type ] ) { return } if ( typeDependencies [ type ] ) { typeDependencies [ type ] . forEach ( visit ) ; return } unboundTypes . push ( type ) ; seen [ type ] = true } types . forEach ( visit ) ; throw new UnboundTypeError ( message + " : " + unboundTypes . map ( getTypeName ) . join ( [ " , " ] ) ) } function __embind_register_class ( rawType , rawPointerType , rawConstPointerType , baseClassRawType , getActualTypeSignature , getActualType , upcastSignature , upcast , downcastSignature , downcast , name , destructorSignature , rawDestructor ) { name = readLatin1String ( name ) ; getActualType = embind__requireFunction ( getActualTypeSignature , getActualType ) ; if ( upcast ) { upcast = embind__requireFunction ( upcastSignature , upcast ) } if ( downcast ) { downcast = embind__requireFunction ( downcastSignature , downcast ) } rawDestructor = embind__requireFunction ( destructorSignature , rawDestructor ) ; var legalFunctionName = makeLegalFunctionName ( name ) ; exposePublicSymbol ( legalFunctionName , function ( ) { throwUnboundTypeError ( " Cannot construct " + name + " due to unbound types " , [ baseClassRawType ] ) } ) ; whenDependentTypesAreResolved ( [ rawType , rawPointerType , rawConstPointerType ] , baseClassRawType ? [ baseClassRawType ] : [ ] , function ( base ) { base = base [ 0 ] ; var baseClass ; var basePrototype ; if ( baseClassRawType ) { baseClass = base . registeredClass ; basePrototype = baseClass . instancePrototype } else { basePrototype = ClassHandle . prototype } var constructor = createNamedFunction ( legalFunctionName , function ( ) { if ( Object . getPrototypeOf ( this ) ! = = instancePrototype ) { throw new BindingError ( " Use ' new ' to construct " + name ) } if ( undefined = = = registeredClass . constructor_body ) { throw new BindingError ( name + " has no accessible constructor " ) } var body = registeredClass . constructor_body [ arguments . length ] ; if ( undefined = = = body ) { throw new BindingError ( " Tried to invoke ctor of " + name + " with invalid number of parameters ( " + arguments . length + " ) - expected ( " + Object . keys ( registeredClass . constructor_body ) . toString ( ) + " ) parameters instead ! " ) } return body . apply ( this , arguments ) } ) ; var instancePrototype = Object . create ( basePrototype , { constructor : { value : constructor } } ) ; constructor . prototype = instancePrototype ; var registeredClass = new RegisteredClass ( name , constructor , instancePrototype , rawDestructor , baseClass , getActualType , upcast , downcast ) ; var referenceConverter = new RegisteredPointer ( name , registeredClass , true , false , false ) ; var pointerConverter = new RegisteredPointer ( name + " * " , registeredClass , false , false , false ) ; var constPointerConverter = new RegisteredPointer ( name + " const * " , registeredClass , false , true , false ) ; registeredPointers [ rawType ] = { pointerType : pointerConverter , constPointerType : constPointerConverter } ; replacePublicSymbol ( legalFunctionName , constructor ) ; return [ referenceConverter , pointerConverter , constPointerConverter ] } ) } function new_ ( constructor , argumentList ) { if ( ! ( constructor instanceof Function ) ) { throw new TypeError ( " new_ called with constructor type " + typeof constructor + " which is not a function " ) } var dummy = createNamedFunction ( constructor . name | | " unknownFunctionName " , function ( ) { } ) ; dummy . prototype = constructor . prototype ; var obj = new dummy ; var r = constructor . apply ( obj , argumentList ) ; return r instanceof Object ? r : obj } function craftInvokerFunction ( humanName , argTypes , classType , cppInvokerFunc , cppTargetFunc ) { var argCount = argTypes . length ; if ( argCount < 2 ) { throwBindingError ( " argTypes array size mismatch ! Must at least get return value and ' this ' types ! " ) } var isClassMethodFunc = argTypes [ 1 ] ! = = null & & classType ! = = null ; var needsDestructorStack = false ; for ( var i = 1 ; i < argTypes . length ; + + i ) { if ( argTypes [ i ] ! = = null & & argTypes [ i ] . destructorFunction = = = undefined ) { needsDestructorStack = true ; break } } var returns = argTypes [ 0 ] . name ! = = " void " ; var argsList = " " ; var argsListWired = " " ; for ( var i = 0 ; i < argCount - 2 ; + + i ) { argsList + = ( i ! = = 0 ? " , " : " " ) + " arg " + i ; argsListWired + = ( i ! = = 0 ? " , " : " " ) + " arg " + i + " Wired " } var invokerFnBody = " return function " + makeLegalFunctionName ( humanName ) + " ( " + argsList + " ) { \ n " + " if ( arguments . length ! = = " + ( argCount - 2 ) + " ) { \ n " + " throwBindingError ( ' function " + humanName + " called with ' + arguments . length + ' arguments , expected " + ( argCount - 2 ) + " args ! ' ) ; \ n " + " } \ n " ; if ( needsDestructorStack ) { invokerFnBody + = " var destructors = [ ] ; \ n " } var dtorStack = needsDestructorStack ? " destructors " : " null " ; var args1 = [ " throwBindingError " , " invoker " , " fn " , " runDestructors " , " retType " , " classParam " ] ; var args2 = [ throwBindingError , cppInvokerFunc , cppTargetFunc , runDestructors , argTypes [ 0 ] , argTypes [ 1 ] ] ; if ( isClassMethodFunc ) { invokerFnBody + = " var thisWired = classParam . toWireType ( " + dtorStack + " , this ) ; \ n " } for ( var i = 0 ; i < argCount - 2 ; + + i ) { invokerFnBody + = " var arg " + i + " Wired = argType " + i + " . toWireType ( " + dtorStack + " , arg " + i + " ) ; / / " + argTypes [ i + 2 ] . name + " \ n " ; args1 . push ( " argType " + i ) ; args2 . push ( argTypes [ i + 2 ] ) } if ( isClassMethodFunc ) { argsListWired = " thisWired " + ( argsListWired . length > 0 ? " , " : " " ) + argsListWired } invokerFnBody + = ( returns ? " var rv = " : " " ) + " invoker ( fn " + ( argsListWired . length > 0 ? " , " : " " ) + argsListWired + " ) ; \ n " ; if ( needsDestructorStack ) { invokerFnBody + = " runDestructors ( destructors ) ; \ n " } else { for ( var i = isClassMethodFunc ? 1 : 2 ; i < argTypes . length ; + + i ) { var paramName = i = = = 1 ? " thisWired " : " arg " + ( i - 2 ) + " Wired " ; if ( argTypes [ i ] . destructorFunction ! = = null ) { invokerFnBody + = paramName + " _dtor ( " + paramName + " ) ; / / " + argTypes [ i ] . name + " \ n " ; args1 . push ( paramName + " _dtor " ) ; args2 . push ( argTypes [ i ] . destructorFunction ) } } } if ( returns ) { invokerFnBody + = " var ret = retType . fromWireType ( rv ) ; \ n " + " return ret ; \ n " } else { } invokerFnBody + = " } \ n " ; args1 . push ( invokerFnBody ) ; var invokerFunction = new_ ( Function , args1 ) . apply ( null , args2 ) ; return invokerFunction } function heap32VectorToArray ( count , firstElement ) { var array = [ ] ; for ( var i = 0 ; i < count ; i + + ) { array . push ( HEAP32 [ ( firstElement > > 2 ) + i ] ) } return array } function __embind_register_class_class_function ( rawClassType , methodName , argCount , rawArgTypesAddr , invokerSignature , rawInvoker , fn ) { var rawArgTypes = heap32VectorToArray ( argCount , rawArgTypesAddr ) ; methodName = readLatin1String ( methodName ) ; rawInvoker = embind__requireFunction ( invokerSignature , rawInvoker ) ; whenDependentTypesAreResolved ( [ ] , [ rawClassType ] , function ( classType ) { classType = classType [ 0 ] ; var humanName = classType . name + " . " + methodName ; function unboundTypesHandler ( ) { throwUnboundTypeError ( " Cannot call " + humanName + " due to unbound types " , rawArgTypes ) } var proto = classType . registeredClass . constructor ; if ( undefined = = = proto [ methodName ] ) { unboundTypesHandler . argCount = argCount - 1 ; proto [ methodName ] = unboundTypesHandler } else { ensureOverloadTable ( proto , methodName , humanName ) ; proto [ methodName ] . overloadTable [ argCount - 1 ] = unboundTypesHandler } whenDependentTypesAreResolved ( [ ] , rawArgTypes , function ( argTypes ) { var invokerArgsArray = [ argTypes [ 0 ] , null ] . concat ( argTypes . slice ( 1 ) ) ; var func = craftInvokerFunction ( humanName , invokerArgsArray , null , rawInvoker , fn ) ; if ( undefined = = = proto [ methodName ] . overloadTable ) { func . argCount = argCount - 1 ; proto [ methodName ] = func } else { proto [ methodName ] . overloadTable [ argCount - 1 ] = func } return [ ] } ) ; return [ ] } ) } function __embind_register_class_constructor ( rawClassType , argCount , rawArgTypesAddr , invokerSignature , invoker , rawConstructor ) { var rawArgTypes = heap32VectorToArray ( argCount , rawArgTypesAddr ) ; invoker = embind__requireFunction ( invokerSignature , invoker ) ; whenDependentTypesAreResolved ( [ ] , [ rawClassType ] , function ( classType ) { classType = classType [ 0 ] ; var humanName = " constructor " + classType . name ; if ( undefined = = = classType . registeredClass . constructor_body ) { classType . registeredClass . constructor_body = [ ] } if ( undefined ! = = classType . registeredClass . constructor_body [ argCount - 1 ] ) { throw new BindingError ( " Cannot register multiple constructors with identical number of parameters ( " + ( argCount - 1 ) + " ) for class ' " + classType . name + " ' ! Overload resolution is currently only performed using the parameter count , not actual type info ! " ) } classType . registeredClass . constructor_body [ argCount - 1 ] = function unboundTypeHandler ( ) { throwUnboundTypeError ( " Cannot construct " + classType . name + " due to unbound types " , rawArgTypes ) } ; whenDependentTypesAreResolved ( [ ] , rawArgTypes , function ( argTypes ) { classType . registeredClass . constructor_body [ argCount - 1 ] = function constructor_body ( ) { if ( arguments . length ! = = argCount - 1 ) { throwBindingError ( humanName + " called with " + arguments . length + " arguments , expected " + ( argCount - 1 ) ) } var destructors = [ ] ; var args = new Array ( argCount ) ; args [ 0 ] = rawConstructor ; for ( var i = 1 ; i < argCount ; + + i ) { args [ i ] = argTypes [ i ] [ " toWireType " ] ( destructors , arguments [ i - 1 ] ) } var ptr = invoker . apply ( null , args ) ; runDestructors ( destructors ) ; return argTypes [ 0 ] [ " fromWireType " ] ( ptr ) } ; return [ ] } ) ; return [ ] } ) } function __embind_register_class_function ( rawClassType , methodName , argCount , rawArgTypesAddr , invokerSignature , rawInvoker , context , isPureVirtual ) { var rawArgTypes = heap32VectorToArray ( argCount , rawArgTypesAddr ) ; methodName = readLatin1String ( methodName ) ; rawInvoker = embind__requireFunction ( invokerSignature , rawInvoker ) ; whenDependentTypesAreResolved ( [ ] , [ rawClassType ] , function ( classType ) { classType = classType [ 0 ] ; var humanName = classType . name + " . " + methodName ; if ( isPureVirtual ) { classType . registeredClass . pureVirtualFunctions . push ( methodName ) } function unboundTypesHandler ( ) { throwUnboundTypeError ( " Cannot call " + humanName + " due to unbound types " , rawArgTypes ) } var proto = classType . registeredClass . instancePrototype ; var method = proto [ methodName ] ; if ( undefined = = = method | | undefined = = = method . overloadTable & & method . className ! = = classType . name & & method . argCount = = = argCount - 2 ) { unboundTypesHandler . argCount = argCount - 2 ; unboundTypesHandler . className = classType . name ; proto [ methodName ] = unboundTypesHandler } else { ensureOverloadTable ( proto , methodName , humanName ) ; proto [ methodName ] . overloadTable [ argCount - 2 ] = unboundTypesHandler } whenDependentTypesAreResolved ( [ ] , rawArgTypes , function ( argTypes ) { var memberFunction = craftInvokerFunction ( humanName , argTypes , classType , rawInvoker , context ) ; if ( undefined = = = proto [ methodName ] . overloadTable ) { memberFunction . argCount = argCount - 2 ; proto [ methodName ] = memberFunction } else { proto [ methodName ] . overloadTable [ argCount - 2 ] = memberFunction } return [ ] } ) ; return [ ] } ) } function validateThis ( this_ , classType , humanName ) { if ( ! ( this_ instanceof Object ) ) { throwBindingError ( humanName + ' with invalid " this " : ' + this_ ) } if ( ! ( this_ instanceof classType . registeredClass . constructor ) ) { throwBindingError ( humanName + ' incompatible with " this " of type ' + this_ . constructor . name ) } if ( ! this_ . $ $ . ptr ) { throwBindingError ( " cannot call emscripten binding method " + humanName + " on deleted object " ) } return upcastPointer ( this_ . $ $ . ptr , this_ . $ $ . ptrType . registeredClass , classType . registeredClass ) } function __embind_register_class_property ( classType , fieldName , getterReturnType , getterSignature , getter , getterContext , setterArgumentType , setterSignature , setter , setterContext ) { fieldName = readLatin1String ( fieldName ) ; getter = embind__requireFunction ( getterSignature , getter ) ; whenDependentTypesAreResolved ( [ ] , [ classType ] , function ( classType ) { classType = classType [ 0 ] ; var humanName = classType . name + " . " + fieldName ; var desc = { get : function ( ) { throwUnboundTypeError ( " Cannot access " + humanName + " due to unbound types " , [ getterReturnType , setterArgumentType ] ) } , enumerable : true , configurable : true } ; if ( setter ) { desc . set = function ( ) { throwUnboundTypeError ( " Cannot access " + humanName + " due to unbound types " , [ getterReturnType , setterArgumentType ] ) } } else { desc . set = function ( v ) { throwBindingError ( humanName + " is a read - only property " ) } } Object . defineProperty ( classType . registeredClass . instancePrototype , fieldName , desc ) ; whenDependentTypesAreResolved ( [ ] , setter ? [ getterReturnType , setterArgumentType ] : [ getterReturnType ] , function ( types ) { var getterReturnType = types [ 0 ] ; var desc = { get : function ( ) { var ptr = validateThis ( this , classType , humanName + " getter " ) ; return getterReturnType [ " fromWireType " ] ( getter ( getterContext , ptr ) ) } , enumerable : true } ; if ( setter ) { setter = embind__requireFunction ( setterSignature , setter ) ; var setterArgumentType = types [ 1 ] ; desc . set = function ( v ) { var ptr = validateThis ( this , classType , humanName + " setter " ) ; var destructors = [ ] ; setter ( setterContext , ptr , setterArgumentType [ " toWireType " ] ( destructors , v ) ) ; runDestructors ( destructors ) } } Object . defineProperty ( classType . registeredClass . instancePrototype , fieldName , desc ) ; return [ ] } ) ; return [ ] } ) } var emval_free_list = [ ] ; var emval_handle_array = [ { } , { value : undefined } , { value : null } , { value : true } , { value : false } ] ; function __emval_decref ( handle ) { if ( handle > 4 & & 0 = = = - - emval_handle_array [ handle ] . refcount ) { emval_handle_array [ handle ] = undefined ; emval_free_list . push ( handle ) } } function count_emval_handles ( ) { var count = 0 ; for ( var i = 5 ; i < emval_handle_array . length ; + + i ) { if ( emval_handle_array [ i ] ! = = undefined ) { + + count } } return count } function get_first_emval ( ) { for ( var i = 5 ; i < emval_handle_array . length ; + + i ) { if ( emval_handle_array [ i ] ! = = undefined ) { return emval_handle_array [ i ] } } return null } function init_emval ( ) { Module [ " count_emval_handles " ] = count_emval_handles ; Module [ " get_first_emval " ] = get_first_emval } function __emval_register ( value ) { switch ( value ) { case undefined : { return 1 } case null : { return 2 } case true : { return 3 } case false : { return 4 } default : { var handle = emval_free_list . length ? emval_free_list . pop ( ) : emval_handle_array . length ; emval_handle_array [ handle ] = { refcount : 1 , value : value } ; return handle } } } function __embind_register_emval ( rawType , name ) { name = readLatin1String ( name ) ; registerType ( rawType , { name : name , " fromWireType " : function ( handle ) { var rv = emval_handle_array [ handle ] . value ; __emval_decref ( handle ) ; return rv } , " toWireType " : function ( destructors , value ) { return __emval_register ( value ) } , " argPackAdvance " : 8 , " readValueFromPointer " : simpleReadValueFromPointer , destructorFunction : null } ) } function enumReadValueFromPointer ( name , shift , signed ) { switch ( shift ) { case 0 : return function ( pointer ) { var heap = signed ? HEAP8 : HEAPU8 ; return this [ " fromWireType " ] ( heap [ pointer ] ) } ; case 1 : return function ( pointer ) { var heap = signed ? HEAP16 : HEAPU16 ; return this [ " fromWireType " ] ( heap [ pointer > > 1 ] ) } ; case 2 : return function ( pointer ) { var heap = signed ? HEAP32 : HEAPU32 ; return this [ " fromWireType " ] ( heap [ pointer > > 2 ] ) } ; default : throw new TypeError ( " Unknown integer type : " + name ) } } function __embind_register_enum ( rawType , name , size , isSigned ) { var shift = getShiftFromSize ( size ) ; name = readLatin1String ( name ) ; function ctor ( ) { } ctor . values = { } ; registerType ( rawType , { name : name , constructor : ctor , " fromWireType " : function ( c ) { return this . constructor . values [ c ] } , " toWireType " : function ( destructors , c ) { return c . value } , " argPackAdvance " : 8 , " readValueFromPointer " : enumReadValueFromPointer ( name , shift , isSigned ) , destructorFunction : null } ) ; exposePublicSymbol ( name , ctor ) } function requireRegisteredType ( rawType , humanName ) { var impl = registeredTypes [ rawType ] ; if ( undefined = = = impl ) { throwBindingError ( humanName + " has unknown type " + getTypeName ( rawType ) ) } return impl } function __embind_register_enum_value ( rawEnumType , name , enumValue ) { var enumType = requireRegisteredType ( rawEnumType , " enum " ) ; name = readLatin1String ( name ) ; var Enum = enumType . constructor ; var Value = Object . create ( enumType . constructor . prototype , { value : { value : enumValue } , constructor : { value : createNamedFunction ( enumType . name + " _ " + name , function ( ) { } ) } } ) ; Enum . values [ enumValue ] = Value ; Enum [ name ] = Value } function _embind_repr ( v ) { if ( v = = = null ) { return " null " } var t = typeof v ; if ( t = = = " object " | | t = = = " array " | | t = = = " function " ) { return v . toString ( ) } else { return " " + v } } function floatReadValueFromPointer ( name , shift ) { switch ( shift ) { case 2 : return function ( pointer ) { return this [ " fromWireType " ] ( HEAPF32 [ pointer > > 2 ] ) } ; case 3 : return function ( pointer ) { return this [ " fromWireType " ] ( HEAPF64 [ pointer > > 3 ] ) } ; default : throw new TypeError ( " Unknown float type : " + name ) } } function __embind_register_float ( rawType , name , size ) { var shift = getShiftFromSize ( size ) ; name = readLatin1String ( name ) ; registerType ( rawType , { name : name , " fromWireType " : function ( value ) { return value } , " toWireType " : function ( destructors , value ) { if ( typeof value ! = = " number " & & typeof value ! = = " boolean " ) { throw new TypeError ( ' Cannot convert " ' + _embind_repr ( value ) + ' " to ' + this . name ) } return value } , " argPackAdvance " : 8 , " readValueFromPointer " : floatReadValueFromPointer ( name , shift ) , destructorFunction : null } ) } function __embind_register_function ( name , argCount , rawArgTypesAddr , signature , rawInvoker , fn ) { var argTypes = heap32VectorToArray ( argCount , rawArgTypesAddr ) ; name = readLatin1String ( name ) ; rawInvoker = embind__requireFunction ( signature , rawInvoker ) ; exposePublicSymbol ( name , function ( ) { throwUnboundTypeError ( " Cannot call " + name + " due to unbound types " , argTypes ) } , argCount - 1 ) ; whenDependentTypesAreResolved ( [ ] , argTypes , function ( argTypes ) { var invokerArgsArray = [ argTypes [ 0 ] , null ] . concat ( argTypes . slice ( 1 ) ) ; replacePublicSymbol ( name , craftInvokerFunction ( name , invokerArgsArray , null , rawInvoker , fn ) , argCount - 1 ) ; return [ ] } ) } function integerReadValueFromPointer ( name , shift , signed ) { switch ( shift ) { case 0 : return signed ? function readS8FromPointer ( pointer ) { return HEAP8 [ pointer ] } : function readU8FromPointer ( pointer ) { return HEAPU8 [ pointer ] } ; case 1 : return signed ? function readS16FromPointer ( pointer ) { return HEAP16 [ pointer > > 1 ] } : function readU16FromPointer ( pointer ) { return HEAPU16 [ pointer > > 1 ] } ; case 2 : return signed ? function readS32FromPointer ( pointer ) { return HEAP32 [ pointer > > 2 ] } : function readU32FromPointer ( pointer ) { return HEAPU32 [ pointer > > 2 ] } ; default : throw new TypeError ( " Unknown integer type : " + name ) } } function __embind_register_integer ( primitiveType , name , size , minRange , maxRange ) { name = readLatin1String ( name ) ; if ( maxRange = = = - 1 ) { maxRange = 4294967295 } var shift = getShiftFromSize ( size ) ; var fromWireType = function ( value ) { return value } ; if ( minRange = = = 0 ) { var bitshift = 32 - 8 * size ; fromWireType = function ( value ) { return value < < bitshift > > > bitshift } } var isUnsignedType = name . indexOf ( " unsigned " ) ! = - 1 ; registerType ( primitiveType , { name : name , " fromWireType " : fromWireType , " toWireType " : function ( destructors , value ) { if ( typeof value ! = = " number " & & typeof value ! = = " boolean " ) { throw new TypeError ( ' Cannot convert " ' + _embind_repr ( value ) + ' " to ' + this . name ) } if ( value < minRange | | value > maxRange ) { throw new TypeError ( ' Passing a number " ' + _embind_repr ( value ) + ' " from JS side to C / C + + side to an argument of type " ' + name + ' " , which is outside the valid range [ ' + minRange + " , " + maxRange + " ] ! " ) } return isUnsignedType ? value > > > 0 : value | 0 } , " argPackAdvance " : 8 , " readValueFromPointer " : integerReadValueFromPointer ( name , shift , minRange ! = = 0 ) , destructorFunction : null } ) } function __embind_register_memory_view ( rawType , dataTypeIndex , name ) { var typeMapping = [ Int8Array , Uint8Array , Int16Array , Uint16Array , Int32Array , Uint32Array , Float32Array , Float64Array ] ; var TA = typeMapping [ dataTypeIndex ] ; function decodeMemoryView ( handle ) { handle = handle > > 2 ; var heap = HEAPU32 ; var size = heap [ handle ] ; var data = heap [ handle + 1 ] ; return new TA ( heap [ " buffer " ] , data , size ) } name = readLatin1String ( name ) ; registerType ( rawType , { name : name , " fromWireType " : decodeMemoryView , " argPackAdvance " : 8 , " readValueFromPointer " : decodeMemoryView } , { ignoreDuplicateRegistrations : true } ) } function __embind_register_std_string ( rawType , name ) { name = readLatin1String ( name ) ; var stdStringIsUTF8 = name = = = " std : : string " ; registerType ( rawType , { name : name , " fromWireType " : function ( value ) { var length = HEAPU32 [ value > > 2 ] ; var str ; if ( stdStringIsUTF8 ) { var endChar = HEAPU8 [ value + 4 + length ] ; var endCharSwap = 0 ; if ( endChar ! = 0 ) { endCharSwap = endChar ; HEAPU8 [ value + 4 + length ] = 0 } var decodeStartPtr = value + 4 ; for ( var i = 0 ; i < = length ; + + i ) { var currentBytePtr = value + 4 + i ; if ( HEAPU8 [ currentBytePtr ] = = 0 ) { var stringSegment = UTF8ToString ( decodeStartPtr ) ; if ( str = = = undefined ) str = stringSegment ; else { str + = String . fromCharCode ( 0 ) ; str + = stringSegment } decodeStartPtr = currentBytePtr + 1 } } if ( endCharSwap ! = 0 ) HEAPU8 [ value + 4 + length ] = endCharSwap } else { var a = new Array ( length ) ; for ( var i = 0 ; i < length ; + + i ) { a [ i ] = String . fromCharCode ( HEAPU8 [ value + 4 + i ] ) } str = a . join ( " " ) } _free ( value ) ; return str } , " toWireType " : function ( destructors , value ) { if ( value instanceof ArrayBuffer ) { value = new Uint8Array ( value ) } var getLength ; var valueIsOfTypeString = typeof value = = = " string " ; if ( ! ( valueIsOfTypeString | | value instanceof Uint8Array | | value instanceof Uint8ClampedArray | | value instanceof Int8Array ) ) { throwBindingError ( " Cannot pass non - string to std : : string " ) } if ( stdStringIsUTF8 & & valueIsOfTypeString ) { getLength = function ( ) { return lengthBytesUTF8 ( value ) } } else { getLength = function ( ) { return value . length } } var length = getLength ( ) ; var ptr = _malloc ( 4 + length + 1 ) ; HEAPU32 [ ptr > > 2 ] = length ; if ( stdStringIsUTF8 & & valueIsOfTypeString ) { stringToUTF8 ( value , ptr + 4 , length + 1 ) } else { if ( valueIsOfTypeString ) { for ( var i = 0 ; i < length ; + + i ) { var charCode = value . charCodeAt ( i ) ; if ( charCode > 255 ) { _free ( ptr ) ; throwBindingError ( " String has UTF - 16 code units that do not fit in 8 bits " ) } HEAPU8 [ ptr + 4 + i ] = charCode } } else { for ( var i = 0 ; i < length ; + + i ) { HEAPU8 [ ptr + 4 + i ] = value [ i ] } } } if ( destructors ! = = null ) { destructors . push ( _free , ptr ) } return ptr } , " argPackAdvance " : 8 , " readValueFromPointer " : simpleReadValueFromPointer , destructorFunction : function ( ptr ) { _free ( ptr ) } } ) } function __embind_register_std_wstring ( rawType , charSize , name ) { name = readLatin1String ( name ) ; var getHeap , shift ; if ( charSize = = = 2 ) { getHeap = function ( ) { return HEAPU16 } ; shift = 1 } else if ( charSize = = = 4 ) { getHeap = function ( ) { return HEAPU32 } ; shift = 2 } registerType ( rawType , { name : name , " fromWireType " : function ( value ) { var HEAP = getHeap ( ) ; var length = HEAPU32 [ value > > 2 ] ; var a = new Array ( length ) ; var start = value + 4 > > shift ; for ( var i = 0 ; i < length ; + + i ) { a [ i ] = String . fromCharCode ( HEAP [ start + i ] ) } _free ( value ) ; return a . join ( " " ) } , " toWireType " : function ( destructors , value ) { var HEAP = getHeap ( ) ; var length = value . length ; var ptr = _malloc ( 4 + length * charSize ) ; HEAPU32 [ ptr > > 2 ] = length ; var start = ptr + 4 > > shift ; for ( var i = 0 ; i < length ; + + i ) { HEAP [ start + i ] = value . charCodeAt ( i ) } if ( destructors ! = = null ) { destructors . push ( _free , ptr ) } return ptr } , " argPackAdvance " : 8 , " readValueFromPointer " : simpleReadValueFromPointer , destructorFunction : function ( ptr ) { _free ( ptr ) } } ) } function __embind_register_value_array ( rawType , name , constructorSignature , rawConstructor , destructorSignature , rawDestructor ) { tupleRegistrations [ rawType ] = { name : readLatin1String ( name ) , rawConstructor : embind__requireFunction ( constructorSignature , rawConstructor ) , rawDestructor : embind__requireFunction ( destructorSignature , rawDestructor ) , elements : [ ] } } function __embind_register_value_array_element ( rawTupleType , getterReturnType , getterSignature , getter , getterContext , setterArgumentType , setterSignature , setter , setterContext ) { tupleRegistrations [ rawTupleType ] . elements . push ( { getterReturnType : getterReturnType , getter : embind__requireFunction ( getterSignature , getter ) , getterContext : getterContext , setterArgumentType : setterArgumentType , setter : embind__requireFunction ( setterSignature , setter ) , setterContext : setterContext } ) } function __embind_register_value_object ( rawType , name , constructorSignature , rawConstructor , destructorSignature , rawDestructor ) { structRegistrations [ rawType ] = { name : readLatin1String ( name ) , rawConstructor : embind__requireFunction ( constructorSignature , rawConstructor ) , rawDestructor : embind__requireFunction ( destructorSignature , rawDestructor ) , fields : [ ] } } function __embind_register_value_object_field ( structType , fieldName , getterReturnType , getterSignature , getter , getterContext , setterArgumentType , setterSignature , setter , setterContext ) { structRegistrations [ structType ] . fields . push ( { fieldName : readLatin1String ( fieldName ) , getterReturnType : getterReturnType , getter : embind__requireFunction ( getterSignature , getter ) , getterContext : getterContext , setterArgumentType : setterArgumentType , setter : embind__requireFunction ( setterSignature , setter ) , setterContext : setterContext } ) } function __embind_register_void ( rawType , name ) { name = readLatin1String ( name ) ; registerType ( rawType , { isVoid : true , name : name , " argPackAdvance " : 0 , " fromWireType " : function ( ) { return undefined } , " toWireType " : function ( destructors , o ) { return undefined } } ) } function requireHandle ( handle ) { if ( ! handle ) { throwBindingError ( " Cannot use deleted val . handle = " + handle ) } return emval_handle_array [ handle ] . value } function __emval_as ( handle , returnType , destructorsRef ) { handle = requireHandle ( handle ) ; returnType = requireRegisteredType ( returnType , " emval : : as " ) ; var destructors = [ ] ; var rd = __emval_register ( destructors ) ; HEAP32 [ destructorsRef > > 2 ] = rd ; return returnType [ " toWireType " ] ( destructors , handle ) } function __emval_get_property ( handle , key ) { handle = requireHandle ( handle ) ; key = requireHandle ( key ) ; return __emval_register ( handle [ key ] ) } function __emval_incref ( handle ) { if ( handle > 4 ) { emval_handle_array [ handle ] . refcount + = 1 } } var emval_symbols = { } ; function getStringOrSymbol ( address ) { var symbol = emval_symbols [ address ] ; if ( symbol = = = undefined ) { return readLatin1String ( address ) } else { return symbol } } function __emval_new_cstring ( v ) { return __emval_register ( getStringOrSymbol ( v ) ) } function __emval_run_destructors ( handle ) { var destructors = emval_handle_array [ handle ] . value ; runDestructors ( destructors ) ; __emval_decref ( handle ) } function __emval_take_value ( type , argv ) { type = requireRegisteredType ( type , " _emval_take_value " ) ; var v = type [ " readValueFromPointer " ] ( argv ) ; return __emval_register ( v ) } function _abort ( ) { Module [ " abort " ] ( ) } function _emscripten_get_now ( ) { abort ( ) } function _emscripten_get_now_is_monotonic ( ) { return 0 | | ENVIRONMENT_IS_NODE | | typeof dateNow ! = = " undefined " | | ( ENVIRONMENT_IS_WEB | | ENVIRONMENT_IS_WORKER ) & & self [ " performance " ] & & self [ " performance " ] [ " now " ] } function _clock_gettime ( clk_id , tp ) { var now ; if ( clk_id = = = 0 ) { now = Date . now ( ) } else if ( clk_id = = = 1 & & _emscripten_get_now_is_monotonic ( ) ) { now = _emscripten_get_now ( ) } else { ___setErrNo ( 22 ) ; return - 1 } HEAP32 [ tp > > 2 ] = now / 1e3 | 0 ; HEAP32 [ tp + 4 > > 2 ] = now % 1e3 * 1e3 * 1e3 | 0 ; return 0 } function _emscripten_get_heap_size ( ) { return TOTAL_MEMORY } var GL = { counter : 1 , lastError : 0 , buffers : [ ] , mappedBuffers : { } , programs : [ ] , framebuffers : [ ] , renderbuffers : [ ] , textures : [ ] , uniforms : [ ] , shaders : [ ] , vaos : [ ] , contexts : { } , currentContext : null , offscreenCanvases : { } , timerQueriesEXT : [ ] , queries : [ ] , samplers : [ ] , transformFeedbacks : [ ] , syncs : [ ] , programInfos : { } , stringCache : { } , stringiCache : { } , unpackAlignment : 4 , init : function ( ) { GL . miniTempBuffer = new Float32Array ( GL . MINI_TEMP_BUFFER_SIZE ) ; for ( var i = 0 ; i < GL . MINI_TEMP_BUFFER_SIZE ; i + + ) { GL . miniTempBufferViews [ i ] = GL . miniTempBuffer . subarray ( 0 , i + 1 ) } } , recordError : function recordError ( errorCode ) { if ( ! GL . lastError ) { GL . lastError = errorCode } } , getNewId : function ( table ) { var ret = GL . counter + + ; for ( var i = table . length ; i < ret ; i + + ) { table [ i ] = null } return ret } , MINI_TEMP_BUFFER_SIZE : 256 , miniTempBuffer : null , miniTempBufferViews : [ 0 ] , getSource : function ( shader , count , string , length ) { var source = " " ; for ( var i = 0 ; i < count ; + + i ) { var len = length ? HEAP32 [ length + i * 4 > > 2 ] : - 1 ; source + = UTF8ToString ( HEAP32 [ string + i * 4 > > 2 ] , len < 0 ? undefined : len ) } return source } , createContext : function ( canvas , webGLContextAttributes ) { var ctx = webGLContextAttributes . majorVersion > 1 ? canvas . getContext ( " webgl2 " , webGLContextAttributes ) : canvas . getContext ( " webgl " , webGLContextAttributes ) | | canvas . getContext ( " experimental - webgl " , webGLContextAttributes ) ; return ctx & & GL . registerContext ( ctx , webGLContextAttributes ) } , registerContext : function ( ctx , webGLContextAttributes ) { var handle = _malloc ( 8 ) ; var context = { handle : handle , attributes : webGLContextAttributes , version : webGLContextAttributes . majorVersion , GLctx : ctx } ; function getChromeVersion ( ) { var raw = navigator . userAgent . match ( / Chrom ( e | ium ) \ / ( [ 0 - 9 ] + ) \ . / ) ; return raw ? parseInt ( raw [ 2 ] , 10 ) : false } context . supportsWebGL2EntryPoints = context . version > = 2 & & ( getChromeVersion ( ) = = = false | | getChromeVersion ( ) > = 58 ) ; if ( ctx . canvas ) ctx . canvas . GLctxObject = context ; GL . contexts [ handle ] = context ; if ( typeof webGLContextAttributes . enableExtensionsByDefault = = = " undefined " | | webGLContextAttributes . enableExtensionsByDefault ) { GL . initExtensions ( context ) } return handle } , makeContextCurrent : function ( contextHandle ) { GL . currentContext = GL . contexts [ contextHandle ] ; Module . ctx = GLctx = GL . currentContext & & GL . currentContext . GLctx ; return ! ( contextHandle & & ! GLctx ) } , getContext : function ( contextHandle ) { return GL . contexts [ contextHandle ] } , deleteContext : function ( contextHandle ) { if ( GL . currentContext = = = GL . contexts [ contextHandle ] ) GL . currentContext = null ; if ( typeof JSEvents = = = " object " ) JSEvents . removeAllHandlersOnTarget ( GL . contexts [ contextHandle ] . GLctx . canvas ) ; if ( GL . contexts [ contextHandle ] & & GL . contexts [ contextHandle ] . GLctx . canvas ) GL . contexts [ contextHandle ] . GLctx . canvas . GLctxObject = undefined ; _free ( GL . contexts [ contextHandle ] ) ; GL . contexts [ contextHandle ] = null } , initExtensions : function ( context ) { if ( ! context ) context = GL . currentContext ; if ( context . initExtensionsDone ) return ; context . initExtensionsDone = true ; var GLctx = context . GLctx ; if ( context . version < 2 ) { var instancedArraysExt = GLctx . getExtension ( " ANGLE_instanced_arrays " ) ; if ( instancedArraysExt ) { GLctx [ " vertexAttribDivisor " ] = function ( index , divisor ) { instancedArraysExt [ " vertexAttribDivisorANGLE " ] ( index , divisor ) } ; GLctx [ " drawArraysInstanced " ] = function ( mode , first , count , primcount ) { instancedArraysExt [ " drawArraysInstancedANGLE " ] ( mode , first , count , primcount ) } ; GLctx [ " drawElementsInstanced " ] = function ( mode , count , type , indices , primcount ) { instancedArraysExt [ " drawElementsInstancedANGLE " ] ( mode , count , type , indices , primcount ) } } var vaoExt = GLctx . getExtension ( " OES_vertex_array_object " ) ; if ( vaoExt ) { GLctx [ " createVertexArray " ] = function ( ) { return vaoExt [ " createVertexArrayOES " ] ( ) } ; GLctx [ " deleteVertexArray " ] = function ( vao ) { vaoExt [ " deleteVertexArrayOES " ] ( vao ) } ; GLctx [ " bindVertexArray " ] = function ( vao ) { vaoExt [ " bindVertexArrayOES " ] ( vao ) } ; GLctx [ " isVertexArray " ] = function ( vao ) { return vaoExt [ " isVertexArrayOES " ] ( vao ) } } var drawBuffersExt = GLctx . getExtension ( " WEBGL_draw_buffers " ) ; if ( drawBuffersExt ) { GLctx [ " drawBuffers " ] = function ( n , bufs ) { drawBuffersExt [ " drawBuffersWEBGL " ] ( n , bufs ) } } } GLctx . disjointTimerQueryExt = GLctx . getExtension ( " EXT_disjoint_timer_query " ) ; var automaticallyEnabledExtensions = [ " OES_texture_float " , " OES_texture_half_float " , " OES_standard_derivatives " , " OES_vertex_array_object " , " WEBGL_compressed_texture_s3tc " , " WEBGL_depth_texture " , " OES_element_index_uint " , " EXT_texture_filter_anisotropic " , " EXT_frag_depth " , " WEBGL_draw_buffers " , " ANGLE_instanced_arrays " , " OES_texture_float_linear " , " OES_texture_half_float_linear " , " EXT_blend_minmax " , " EXT_shader_texture_lod " , " WEBGL_compressed_texture_pvrtc " , " EXT_color_buffer_half_float " , " WEBGL_color_buffer_float " , " EXT_sRGB " , " WEBGL_compressed_texture_etc1 " , " EXT_disjoint_timer_query " , " WEBGL_compressed_texture_etc " , " WEBGL_compressed_texture_astc " , " EXT_color_buffer_float " , " WEBGL_compressed_texture_s3tc_srgb " , " EXT_disjoint_timer_query_webgl2 " ] ; var exts = GLctx . getSupportedExtensions ( ) ; if ( exts & & exts . length > 0 ) { GLctx . getSupportedExtensions ( ) . forEach ( function ( ext ) { if ( automaticallyEnabledExtensions . indexOf ( ext ) ! = - 1 ) { GLctx . getExtension ( ext ) } } ) } } , populateUniformTable : function ( program ) { var p = GL . programs [ program ] ; var ptable = GL . programInfos [ program ] = { uniforms : { } , maxUniformLength : 0 , maxAttributeLength : - 1 , maxUniformBlockNameLength : - 1 } ; var utable = ptable . uniforms ; var numUniforms = GLctx . getProgramParameter ( p , 35718 ) ; for ( var i = 0 ; i < numUniforms ; + + i ) { var u = GLctx . getActiveUniform ( p , i ) ; var name = u . name ; ptable . maxUniformLength = Math . max ( ptable . maxUniformLength , name . length + 1 ) ; if ( name . slice ( - 1 ) = = " ] " ) { name = name . slice ( 0 , name . lastIndexOf ( " [ " ) ) } var loc = GLctx . getUniformLocation ( p , name ) ; if ( loc ) { var id = GL . getNewId ( GL . uniforms ) ; utable [ name ] = [ u . size , id ] ; GL . uniforms [ id ] = loc ; for ( var j = 1 ; j < u . size ; + + j ) { var n = name + " [ " + j + " ] " ; loc = GLctx . getUniformLocation ( p , n ) ; id = GL . getNewId ( GL . uniforms ) ; GL . uniforms [ id ] = loc } } } } } ; function _emscripten_glActiveTexture ( x0 ) { GLctx [ " activeTexture " ] ( x0 ) } function _emscripten_glAttachShader ( program , shader ) { GLctx . attachShader ( GL . programs [ program ] , GL . shaders [ shader ] ) } function _emscripten_glBeginQuery ( target , id ) { GLctx [ " beginQuery " ] ( target , GL . queries [ id ] ) } function _emscripten_glBeginQueryEXT ( target , id ) { GLctx . disjointTimerQueryExt [ " beginQueryEXT " ] ( target , GL . timerQueriesEXT [ id ] ) } function _emscripten_glBeginTransformFeedback ( x0 ) { GLctx [ " beginTransformFeedback " ] ( x0 ) } function _emscripten_glBindAttribLocation ( program , index , name ) { GLctx . bindAttribLocation ( GL . programs [ program ] , index , UTF8ToString ( name ) ) } function _emscripten_glBindBuffer ( target , buffer ) { if ( target = = 35051 ) { GLctx . currentPixelPackBufferBinding = buffer } else if ( target = = 35052 ) { GLctx . currentPixelUnpackBufferBinding = buffer } GLctx . bindBuffer ( target , GL . buffers [ buffer ] ) } function _emscripten_glBindBufferBase ( target , index , buffer ) { GLctx [ " bindBufferBase " ] ( target , index , GL . buffers [ buffer ] ) } function _emscripten_glBindBufferRange ( target , index , buffer , offset , ptrsize ) { GLctx [ " bindBufferRange " ] ( target , index , GL . buffers [ buffer ] , offset , ptrsize ) } function _emscripten_glBindFramebuffer ( target , framebuffer ) { GLctx . bindFramebuffer ( target , GL . framebuffers [ framebuffer ] ) } function _emscripten_glBindRenderbuffer ( target , renderbuffer ) { GLctx . bindRenderbuffer ( target , GL . renderbuffers [ renderbuffer ] ) } function _emscripten_glBindSampler ( unit , sampler ) { GLctx [ " bindSampler " ] ( unit , GL . samplers [ sampler ] ) } function _emscripten_glBindTexture ( target , texture ) { GLctx . bindTexture ( target , GL . textures [ texture ] ) } function _emscripten_glBindTransformFeedback ( target , id ) { GLctx [ " bindTransformFeedback " ] ( target , GL . transformFeedbacks [ id ] ) } function _emscripten_glBindVertexArray ( vao ) { GLctx [ " bindVertexArray " ] ( GL . vaos [ vao ] ) } function _emscripten_glBindVertexArrayOES ( vao ) { GLctx [ " bindVertexArray " ] ( GL . vaos [ vao ] ) } function _emscripten_glBlendColor ( x0 , x1 , x2 , x3 ) { GLctx [ " blendColor " ] ( x0 , x1 , x2 , x3 ) } function _emscripten_glBlendEquation ( x0 ) { GLctx [ " blendEquation " ] ( x0 ) } function _emscripten_glBlendEquationSeparate ( x0 , x1 ) { GLctx [ " blendEquationSeparate " ] ( x0 , x1 ) } function _emscripten_glBlendFunc ( x0 , x1 ) { GLctx [ " blendFunc " ] ( x0 , x1 ) } function _emscripten_glBlendFuncSeparate ( x0 , x1 , x2 , x3 ) { GLctx [ " blendFuncSeparate " ] ( x0 , x1 , x2 , x3 ) } function _emscripten_glBlitFramebuffer ( x0 , x1 , x2 , x3 , x4 , x5 , x6 , x7 , x8 , x9 ) { GLctx [ " blitFramebuffer " ] ( x0 , x1 , x2 , x3 , x4 , x5 , x6 , x7 , x8 , x9 ) } function _emscripten_glBufferData ( target , size , data , usage ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { if ( data ) { GLctx . bufferData ( target , HEAPU8 , usage , data , size ) } else { GLctx . bufferData ( target , size , usage ) } } else { GLctx . bufferData ( target , data ? HEAPU8 . subarray ( data , data + size ) : size , usage ) } } function _emscripten_glBufferSubData ( target , offset , size , data ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { GLctx . bufferSubData ( target , offset , HEAPU8 , data , size ) ; return } GLctx . bufferSubData ( target , offset , HEAPU8 . subarray ( data , data + size ) ) } function _emscripten_glCheckFramebufferStatus ( x0 ) { return GLctx [ " checkFramebufferStatus " ] ( x0 ) } function _emscripten_glClear ( x0 ) { GLctx [ " clear " ] ( x0 ) } function _emscripten_glClearBufferfi ( x0 , x1 , x2 , x3 ) { GLctx [ " clearBufferfi " ] ( x0 , x1 , x2 , x3 ) } function _emscripten_glClearBufferfv ( buffer , drawbuffer , value ) { GLctx [ " clearBufferfv " ] ( buffer , drawbuffer , HEAPF32 , value > > 2 ) } function _emscripten_glClearBufferiv ( buffer , drawbuffer , value ) { GLctx [ " clearBufferiv " ] ( buffer , drawbuffer , HEAP32 , value > > 2 ) } function _emscripten_glClearBufferuiv ( buffer , drawbuffer , value ) { GLctx [ " clearBufferuiv " ] ( buffer , drawbuffer , HEAPU32 , value > > 2 ) } function _emscripten_glClearColor ( x0 , x1 , x2 , x3 ) { GLctx [ " clearColor " ] ( x0 , x1 , x2 , x3 ) } function _emscripten_glClearDepthf ( x0 ) { GLctx [ " clearDepth " ] ( x0 ) } function _emscripten_glClearStencil ( x0 ) { GLctx [ " clearStencil " ] ( x0 ) } function _emscripten_glClientWaitSync ( sync , flags , timeoutLo , timeoutHi ) { timeoutLo = timeoutLo > > > 0 ; timeoutHi = timeoutHi > > > 0 ; var timeout = timeoutLo = = 4294967295 & & timeoutHi = = 4294967295 ? - 1 : makeBigInt ( timeoutLo , timeoutHi , true ) ; return GLctx . clientWaitSync ( GL . syncs [ sync ] , flags , timeout ) } function _emscripten_glColorMask ( red , green , blue , alpha ) { GLctx . colorMask ( ! ! red , ! ! green , ! ! blue , ! ! alpha ) } function _emscripten_glCompileShader ( shader ) { GLctx . compileShader ( GL . shaders [ shader ] ) } function _emscripten_glCompressedTexImage2D ( target , level , internalFormat , width , height , border , imageSize , data ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { if ( GLctx . currentPixelUnpackBufferBinding ) { GLctx [ " compressedTexImage2D " ] ( target , level , internalFormat , width , height , border , imageSize , data ) } else { GLctx [ " compressedTexImage2D " ] ( target , level , internalFormat , width , height , border , HEAPU8 , data , imageSize ) } return } GLctx [ " compressedTexImage2D " ] ( target , level , internalFormat , width , height , border , data ? HEAPU8 . subarray ( data , data + imageSize ) : null ) } function _emscripten_glCompressedTexImage3D ( target , level , internalFormat , width , height , depth , border , imageSize , data ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { if ( GLctx . currentPixelUnpackBufferBinding ) { GLctx [ " compressedTexImage3D " ] ( target , level , internalFormat , width , height , depth , border , imageSize , data ) } else { GLctx [ " compressedTexImage3D " ] ( target , level , internalFormat , width , height , depth , border , HEAPU8 , data , imageSize ) } } else { GLctx [ " compressedTexImage3D " ] ( target , level , internalFormat , width , height , depth , border , data ? HEAPU8 . subarray ( data , data + imageSize ) : null ) } } function _emscripten_glCompressedTexSubImage2D ( target , level , xoffset , yoffset , width , height , format , imageSize , data ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { if ( GLctx . currentPixelUnpackBufferBinding ) { GLctx [ " compressedTexSubImage2D " ] ( target , level , xoffset , yoffset , width , height , format , imageSize , data ) } else { GLctx [ " compressedTexSubImage2D " ] ( target , level , xoffset , yoffset , width , height , format , HEAPU8 , data , imageSize ) } return } GLctx [ " compressedTexSubImage2D " ] ( target , level , xoffset , yoffset , width , height , format , data ? HEAPU8 . subarray ( data , data + imageSize ) : null ) } function _emscripten_glCompressedTexSubImage3D ( target , level , xoffset , yoffset , zoffset , width , height , depth , format , imageSize , data ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { if ( GLctx . currentPixelUnpackBufferBinding ) { GLctx [ " compressedTexSubImage3D " ] ( target , level , xoffset , yoffset , zoffset , width , height , depth , format , imageSize , data ) } else { GLctx [ " compressedTexSubImage3D " ] ( target , level , xoffset , yoffset , zoffset , width , height , depth , format , HEAPU8 , data , imageSize ) } } else { GLctx [ " compressedTexSubImage3D " ] ( target , level , xoffset , yoffset , zoffset , width , height , depth , format , data ? HEAPU8 . subarray ( data , data + imageSize ) : null ) } } function _emscripten_glCopyBufferSubData ( x0 , x1 , x2 , x3 , x4 ) { GLctx [ " copyBufferSubData " ] ( x0 , x1 , x2 , x3 , x4 ) } function _emscripten_glCopyTexImage2D ( x0 , x1 , x2 , x3 , x4 , x5 , x6 , x7 ) { GLctx [ " copyTexImage2D " ] ( x0 , x1 , x2 , x3 , x4 , x5 , x6 , x7 ) } function _emscripten_glCopyTexSubImage2D ( x0 , x1 , x2 , x3 , x4 , x5 , x6 , x7 ) { GLctx [ " copyTexSubImage2D " ] ( x0 , x1 , x2 , x3 , x4 , x5 , x6 , x7 ) } function _emscripten_glCopyTexSubImage3D ( x0 , x1 , x2 , x3 , x4 , x5 , x6 , x7 , x8 ) { GLctx [ " copyTexSubImage3D " ] ( x0 , x1 , x2 , x3 , x4 , x5 , x6 , x7 , x8 ) } function _emscripten_glCreateProgram ( ) { var id = GL . getNewId ( GL . programs ) ; var program = GLctx . createProgram ( ) ; program . name = id ; GL . programs [ id ] = program ; return id } function _emscripten_glCreateShader ( shaderType ) { var id = GL . getNewId ( GL . shaders ) ; GL . shaders [ id ] = GLctx . createShader ( shaderType ) ; return id } function _emscripten_glCullFace ( x0 ) { GLctx [ " cullFace " ] ( x0 ) } function _emscripten_glDeleteBuffers ( n , buffers ) { for ( var i = 0 ; i < n ; i + + ) { var id = HEAP32 [ buffers + i * 4 > > 2 ] ; var buffer = GL . buffers [ id ] ; if ( ! buffer ) continue ; GLctx . deleteBuffer ( buffer ) ; buffer . name = 0 ; GL . buffers [ id ] = null ; if ( id = = GL . currArrayBuffer ) GL . currArrayBuffer = 0 ; if ( id = = GL . currElementArrayBuffer ) GL . currElementArrayBuffer = 0 ; if ( id = = GLctx . currentPixelPackBufferBinding ) GLctx . currentPixelPackBufferBinding = 0 ; if ( id = = GLctx . currentPixelUnpackBufferBinding ) GLctx . currentPixelUnpackBufferBinding = 0 } } function _emscripten_glDeleteFramebuffers ( n , framebuffers ) { for ( var i = 0 ; i < n ; + + i ) { var id = HEAP32 [ framebuffers + i * 4 > > 2 ] ; var framebuffer = GL . framebuffers [ id ] ; if ( ! framebuffer ) continue ; GLctx . deleteFramebuffer ( framebuffer ) ; framebuffer . name = 0 ; GL . framebuffers [ id ] = null } } function _emscripten_glDeleteProgram ( id ) { if ( ! id ) return ; var program = GL . programs [ id ] ; if ( ! program ) { GL . recordError ( 1281 ) ; return } GLctx . deleteProgram ( program ) ; program . name = 0 ; GL . programs [ id ] = null ; GL . programInfos [ id ] = null } function _emscripten_glDeleteQueries ( n , ids ) { for ( var i = 0 ; i < n ; i + + ) { var id = HEAP32 [ ids + i * 4 > > 2 ] ; var query = GL . queries [ id ] ; if ( ! query ) continue ; GLctx [ " deleteQuery " ] ( query ) ; GL . queries [ id ] = null } } function _emscripten_glDeleteQueriesEXT ( n , ids ) { for ( var i = 0 ; i < n ; i + + ) { var id = HEAP32 [ ids + i * 4 > > 2 ] ; var query = GL . timerQueriesEXT [ id ] ; if ( ! query ) continue ; GLctx . disjointTimerQueryExt [ " deleteQueryEXT " ] ( query ) ; GL . timerQueriesEXT [ id ] = null } } function _emscripten_glDeleteRenderbuffers ( n , renderbuffers ) { for ( var i = 0 ; i < n ; i + + ) { var id = HEAP32 [ renderbuffers + i * 4 > > 2 ] ; var renderbuffer = GL . renderbuffers [ id ] ; if ( ! renderbuffer ) continue ; GLctx . deleteRenderbuffer ( renderbuffer ) ; renderbuffer . name = 0 ; GL . renderbuffers [ id ] = null } } function _emscripten_glDeleteSamplers ( n , samplers ) { for ( var i = 0 ; i < n ; i + + ) { var id = HEAP32 [ samplers + i * 4 > > 2 ] ; var sampler = GL . samplers [ id ] ; if ( ! sampler ) continue ; GLctx [ " deleteSampler " ] ( sampler ) ; sampler . name = 0 ; GL . samplers [ id ] = null } } function _emscripten_glDeleteShader ( id ) { if ( ! id ) return ; var shader = GL . shaders [ id ] ; if ( ! shader ) { GL . recordError ( 1281 ) ; return } GLctx . deleteShader ( shader ) ; GL . shaders [ id ] = null } function _emscripten_glDeleteSync ( id ) { if ( ! id ) return ; var sync = GL . syncs [ id ] ; if ( ! sync ) { GL . recordError ( 1281 ) ; return } GLctx . deleteSync ( sync ) ; sync . name = 0 ; GL . syncs [ id ] = null } function _emscripten_glDeleteTextures ( n , textures ) { for ( var i = 0 ; i < n ; i + + ) { var id = HEAP32 [ textures + i * 4 > > 2 ] ; var texture = GL . textures [ id ] ; if ( ! texture ) continue ; GLctx . deleteTexture ( texture ) ; texture . name = 0 ; GL . textures [ id ] = null } } function _emscripten_glDeleteTransformFeedbacks ( n , ids ) { for ( var i = 0 ; i < n ; i + + ) { var id = HEAP32 [ ids + i * 4 > > 2 ] ; var transformFeedback = GL . transformFeedbacks [ id ] ; if ( ! transformFeedback ) continue ; GLctx [ " deleteTransformFeedback " ] ( transformFeedback ) ; transformFeedback . name = 0 ; GL . transformFeedbacks [ id ] = null } } function _emscripten_glDeleteVertexArrays ( n , vaos ) { for ( var i = 0 ; i < n ; i + + ) { var id = HEAP32 [ vaos + i * 4 > > 2 ] ; GLctx [ " deleteVertexArray " ] ( GL . vaos [ id ] ) ; GL . vaos [ id ] = null } } function _emscripten_glDeleteVertexArraysOES ( n , vaos ) { for ( var i = 0 ; i < n ; i + + ) { var id = HEAP32 [ vaos + i * 4 > > 2 ] ; GLctx [ " deleteVertexArray " ] ( GL . vaos [ id ] ) ; GL . vaos [ id ] = null } } function _emscripten_glDepthFunc ( x0 ) { GLctx [ " depthFunc " ] ( x0 ) } function _emscripten_glDepthMask ( flag ) { GLctx . depthMask ( ! ! flag ) } function _emscripten_glDepthRangef ( x0 , x1 ) { GLctx [ " depthRange " ] ( x0 , x1 ) } function _emscripten_glDetachShader ( program , shader ) { GLctx . detachShader ( GL . programs [ program ] , GL . shaders [ shader ] ) } function _emscripten_glDisable ( x0 ) { GLctx [ " disable " ] ( x0 ) } function _emscripten_glDisableVertexAttribArray ( index ) { GLctx . disableVertexAttribArray ( index ) } function _emscripten_glDrawArrays ( mode , first , count ) { GLctx . drawArrays ( mode , first , count ) } function _emscripten_glDrawArraysInstanced ( mode , first , count , primcount ) { GLctx [ " drawArraysInstanced " ] ( mode , first , count , primcount ) } function _emscripten_glDrawArraysInstancedANGLE ( mode , first , count , primcount ) { GLctx [ " drawArraysInstanced " ] ( mode , first , count , primcount ) } function _emscripten_glDrawArraysInstancedARB ( mode , first , count , primcount ) { GLctx [ " drawArraysInstanced " ] ( mode , first , count , primcount ) } function _emscripten_glDrawArraysInstancedEXT ( mode , first , count , primcount ) { GLctx [ " drawArraysInstanced " ] ( mode , first , count , primcount ) } function _emscripten_glDrawArraysInstancedNV ( mode , first , count , primcount ) { GLctx [ " drawArraysInstanced " ] ( mode , first , count , primcount ) } var __tempFixedLengthArray = [ ] ; function _emscripten_glDrawBuffers ( n , bufs ) { var bufArray = __tempFixedLengthArray [ n ] ; for ( var i = 0 ; i < n ; i + + ) { bufArray [ i ] = HEAP32 [ bufs + i * 4 > > 2 ] } GLctx [ " drawBuffers " ] ( bufArray ) } function _emscripten_glDrawBuffersEXT ( n , bufs ) { var bufArray = __tempFixedLengthArray [ n ] ; for ( var i = 0 ; i < n ; i + + ) { bufArray [ i ] = HEAP32 [ bufs + i * 4 > > 2 ] } GLctx [ " drawBuffers " ] ( bufArray ) } function _emscripten_glDrawBuffersWEBGL ( n , bufs ) { var bufArray = __tempFixedLengthArray [ n ] ; for ( var i = 0 ; i < n ; i + + ) { bufArray [ i ] = HEAP32 [ bufs + i * 4 > > 2 ] } GLctx [ " drawBuffers " ] ( bufArray ) } function _emscripten_glDrawElements ( mode , count , type , indices ) { GLctx . drawElements ( mode , count , type , indices ) } function _emscripten_glDrawElementsInstanced ( mode , count , type , indices , primcount ) { GLctx [ " drawElementsInstanced " ] ( mode , count , type , indices , primcount ) } function _emscripten_glDrawElementsInstancedANGLE ( mode , count , type , indices , primcount ) { GLctx [ " drawElementsInstanced " ] ( mode , count , type , indices , primcount ) } function _emscripten_glDrawElementsInstancedARB ( mode , count , type , indices , primcount ) { GLctx [ " drawElementsInstanced " ] ( mode , count , type , indices , primcount ) } function _emscripten_glDrawElementsInstancedEXT ( mode , count , type , indices , primcount ) { GLctx [ " drawElementsInstanced " ] ( mode , count , type , indices , primcount ) } function _emscripten_glDrawElementsInstancedNV ( mode , count , type , indices , primcount ) { GLctx [ " drawElementsInstanced " ] ( mode , count , type , indices , primcount ) } function _glDrawElements ( mode , count , type , indices ) { GLctx . drawElements ( mode , count , type , indices ) } function _emscripten_glDrawRangeElements ( mode , start , end , count , type , indices ) { _glDrawElements ( mode , count , type , indices ) } function _emscripten_glEnable ( x0 ) { GLctx [ " enable " ] ( x0 ) } function _emscripten_glEnableVertexAttribArray ( index ) { GLctx . enableVertexAttribArray ( index ) } function _emscripten_glEndQuery ( x0 ) { GLctx [ " endQuery " ] ( x0 ) } function _emscripten_glEndQueryEXT ( target ) { GLctx . disjointTimerQueryExt [ " endQueryEXT " ] ( target ) } function _emscripten_glEndTransformFeedback ( ) { GLctx [ " endTransformFeedback " ] ( ) } function _emscripten_glFenceSync ( condition , flags ) { var sync = GLctx . fenceSync ( condition , flags ) ; if ( sync ) { var id = GL . getNewId ( GL . syncs ) ; sync . name = id ; GL . syncs [ id ] = sync ; return id } else { return 0 } } function _emscripten_glFinish ( ) { GLctx [ " finish " ] ( ) } function _emscripten_glFlush ( ) { GLctx [ " flush " ] ( ) } function _emscripten_glFlushMappedBufferRange ( ) { err ( " missing function : emscripten_glFlushMappedBufferRange " ) ; abort ( - 1 ) } function _emscripten_glFramebufferRenderbuffer ( target , attachment , renderbuffertarget , renderbuffer ) { GLctx . framebufferRenderbuffer ( target , attachment , renderbuffertarget , GL . renderbuffers [ renderbuffer ] ) } function _emscripten_glFramebufferTexture2D ( target , attachment , textarget , texture , level ) { GLctx . framebufferTexture2D ( target , attachment , textarget , GL . textures [ texture ] , level ) } function _emscripten_glFramebufferTextureLayer ( target , attachment , texture , level , layer ) { GLctx . framebufferTextureLayer ( target , attachment , GL . textures [ texture ] , level , layer ) } function _emscripten_glFrontFace ( x0 ) { GLctx [ " frontFace " ] ( x0 ) } function __glGenObject ( n , buffers , createFunction , objectTable ) { for ( var i = 0 ; i < n ; i + + ) { var buffer = GLctx [ createFunction ] ( ) ; var id = buffer & & GL . getNewId ( objectTable ) ; if ( buffer ) { buffer . name = id ; objectTable [ id ] = buffer } else { GL . recordError ( 1282 ) } HEAP32 [ buffers + i * 4 > > 2 ] = id } } function _emscripten_glGenBuffers ( n , buffers ) { __glGenObject ( n , buffers , " createBuffer " , GL . buffers ) } function _emscripten_glGenFramebuffers ( n , ids ) { __glGenObject ( n , ids , " createFramebuffer " , GL . framebuffers ) } function _emscripten_glGenQueries ( n , ids ) { __glGenObject ( n , ids , " createQuery " , GL . queries ) } function _emscripten_glGenQueriesEXT ( n , ids ) { for ( var i = 0 ; i < n ; i + + ) { var query = GLctx . disjointTimerQueryExt [ " createQueryEXT " ] ( ) ; if ( ! query ) { GL . recordError ( 1282 ) ; while ( i < n ) HEAP32 [ ids + i + + * 4 > > 2 ] = 0 ; return } var id = GL . getNewId ( GL . timerQueriesEXT ) ; query . name = id ; GL . timerQueriesEXT [ id ] = query ; HEAP32 [ ids + i * 4 > > 2 ] = id } } function _emscripten_glGenRenderbuffers ( n , renderbuffers ) { __glGenObject ( n , renderbuffers , " createRenderbuffer " , GL . renderbuffers ) } function _emscripten_glGenSamplers ( n , samplers ) { __glGenObject ( n , samplers , " createSampler " , GL . samplers ) } function _emscripten_glGenTextures ( n , textures ) { __glGenObject ( n , textures , " createTexture " , GL . textures ) } function _emscripten_glGenTransformFeedbacks ( n , ids ) { __glGenObject ( n , ids , " createTransformFeedback " , GL . transformFeedbacks ) } function _emscripten_glGenVertexArrays ( n , arrays ) { __glGenObject ( n , arrays , " createVertexArray " , GL . vaos ) } function _emscripten_glGenVertexArraysOES ( n , arrays ) { __glGenObject ( n , arrays , " createVertexArray " , GL . vaos ) } function _emscripten_glGenerateMipmap ( x0 ) { GLctx [ " generateMipmap " ] ( x0 ) } function _emscripten_glGetActiveAttrib ( program , index , bufSize , length , size , type , name ) { program = GL . programs [ program ] ; var info = GLctx . getActiveAttrib ( program , index ) ; if ( ! info ) return ; if ( bufSize > 0 & & name ) { var numBytesWrittenExclNull = stringToUTF8 ( info . name , name , bufSize ) ; if ( length ) HEAP32 [ length > > 2 ] = numBytesWrittenExclNull } else { if ( length ) HEAP32 [ length > > 2 ] = 0 } if ( size ) HEAP32 [ size > > 2 ] = info . size ; if ( type ) HEAP32 [ type > > 2 ] = info . type } function _emscripten_glGetActiveUniform ( program , index , bufSize , length , size , type , name ) { program = GL . programs [ program ] ; var info = GLctx . getActiveUniform ( program , index ) ; if ( ! info ) return ; if ( bufSize > 0 & & name ) { var numBytesWrittenExclNull = stringToUTF8 ( info . name , name , bufSize ) ; if ( length ) HEAP32 [ length > > 2 ] = numBytesWrittenExclNull } else { if ( length ) HEAP32 [ length > > 2 ] = 0 } if ( size ) HEAP32 [ size > > 2 ] = info . size ; if ( type ) HEAP32 [ type > > 2 ] = info . type } function _emscripten_glGetActiveUniformBlockName ( program , uniformBlockIndex , bufSize , length , uniformBlockName ) { program = GL . programs [ program ] ; var result = GLctx [ " getActiveUniformBlockName " ] ( program , uniformBlockIndex ) ; if ( ! result ) return ; if ( uniformBlockName & & bufSize > 0 ) { var numBytesWrittenExclNull = stringToUTF8 ( result , uniformBlockName , bufSize ) ; if ( length ) HEAP32 [ length > > 2 ] = numBytesWrittenExclNull } else { if ( length ) HEAP32 [ length > > 2 ] = 0 } } function _emscripten_glGetActiveUniformBlockiv ( program , uniformBlockIndex , pname , params ) { if ( ! params ) { GL . recordError ( 1281 ) ; return } program = GL . programs [ program ] ; switch ( pname ) { case 35393 : var name = GLctx [ " getActiveUniformBlockName " ] ( program , uniformBlockIndex ) ; HEAP32 [ params > > 2 ] = name . length + 1 ; return ; default : var result = GLctx [ " getActiveUniformBlockParameter " ] ( program , uniformBlockIndex , pname ) ; if ( ! result ) return ; if ( typeof result = = " number " ) { HEAP32 [ params > > 2 ] = result } else { for ( var i = 0 ; i < result . length ; i + + ) { HEAP32 [ params + i * 4 > > 2 ] = result [ i ] } } } } function _emscripten_glGetActiveUniformsiv ( program , uniformCount , uniformIndices , pname , params ) { if ( ! params ) { GL . recordError ( 1281 ) ; return } if ( uniformCount > 0 & & uniformIndices = = 0 ) { GL . recordError ( 1281 ) ; return } program = GL . programs [ program ] ; var ids = [ ] ; for ( var i = 0 ; i < uniformCount ; i + + ) { ids . push ( HEAP32 [ uniformIndices + i * 4 > > 2 ] ) } var result = GLctx [ " getActiveUniforms " ] ( program , ids , pname ) ; if ( ! result ) return ; var len = result . length ; for ( var i = 0 ; i < len ; i + + ) { HEAP32 [ params + i * 4 > > 2 ] = result [ i ] } } function _emscripten_glGetAttachedShaders ( program , maxCount , count , shaders ) { var result = GLctx . getAttachedShaders ( GL . programs [ program ] ) ; var len = result . length ; if ( len > maxCount ) { len = maxCount } HEAP32 [ count > > 2 ] = len ; for ( var i = 0 ; i < len ; + + i ) { var id = GL . shaders . indexOf ( result [ i ] ) ; HEAP32 [ shaders + i * 4 > > 2 ] = id } } function _emscripten_glGetAttribLocation ( program , name ) { return GLctx . getAttribLocation ( GL . programs [ program ] , UTF8ToString ( name ) ) } function emscriptenWebGLGet ( name_ , p , type ) { if ( ! p ) { GL . recordError ( 1281 ) ; return } var ret = undefined ; switch ( name_ ) { case 36346 : ret = 1 ; break ; case 36344 : if ( type ! = = " Integer " & & type ! = = " Integer64 " ) { GL . recordError ( 1280 ) } return ; case 34814 : case 36345 : ret = 0 ; break ; case 34466 : var formats = GLctx . getParameter ( 34467 ) ; ret = formats ? formats . length : 0 ; break ; case 33309 : if ( GL . currentContext . version < 2 ) { GL . recordError ( 1282 ) ; return } var exts = GLctx . getSupportedExtensions ( ) ; ret = 2 * exts . length ; break ; case 33307 : case 33308 : if ( GL . currentContext . version < 2 ) { GL . recordError ( 1280 ) ; return } ret = name_ = = 33307 ? 3 : 0 ; break } if ( ret = = = undefined ) { var result = GLctx . getParameter ( name_ ) ; switch ( typeof result ) { case " number " : ret = result ; break ; case " boolean " : ret = result ? 1 : 0 ; break ; case " string " : GL . recordError ( 1280 ) ; return ; case " object " : if ( result = = = null ) { switch ( name_ ) { case 34964 : case 35725 : case 34965 : case 36006 : case 36007 : case 32873 : case 34229 : case 35097 : case 36389 : case 34068 : { ret = 0 ; break } default : { GL . recordError ( 1280 ) ; return } } } else if ( result instanceof Float32Array | | result instanceof Uint32Array | | result instanceof Int32Array | | result instanceof Array ) { for ( var i = 0 ; i < result . length ; + + i ) { switch ( type ) { case " Integer " : HEAP32 [ p + i * 4 > > 2 ] = result [ i ] ; break ; case " Float " : HEAPF32 [ p + i * 4 > > 2 ] = result [ i ] ; break ; case " Boolean " : HEAP8 [ p + i > > 0 ] = result [ i ] ? 1 : 0 ; break ; default : throw " internal glGet error , bad type : " + type } } return } else { try { ret = result . name | 0 } catch ( e ) { GL . recordError ( 1280 ) ; err ( " GL_INVALID_ENUM in glGet " + type + " v : Unknown object returned from WebGL getParameter ( " + name_ + " ) ! ( error : " + e + " ) " ) ; return } } break ; default : GL . recordError ( 1280 ) ; return } } switch ( type ) { case " Integer64 " : tempI64 = [ ret > > > 0 , ( tempDouble = ret , + Math_abs ( tempDouble ) > = 1 ? tempDouble > 0 ? ( Math_min ( + Math_floor ( tempDouble / 4294967296 ) , 4294967295 ) | 0 ) > > > 0 : ~ ~ + Math_ceil ( ( tempDouble - + ( ~ ~ tempDouble > > > 0 ) ) / 4294967296 ) > > > 0 : 0 ) ] , HEAP32 [ p > > 2 ] = tempI64 [ 0 ] , HEAP32 [ p + 4 > > 2 ] = tempI64 [ 1 ] ; break ; case " Integer " : HEAP32 [ p > > 2 ] = ret ; break ; case " Float " : HEAPF32 [ p > > 2 ] = ret ; break ; case " Boolean " : HEAP8 [ p > > 0 ] = ret ? 1 : 0 ; break ; default : throw " internal glGet error , bad type : " + type } } function _emscripten_glGetBooleanv ( name_ , p ) { emscriptenWebGLGet ( name_ , p , " Boolean " ) } function _emscripten_glGetBufferParameteri64v ( target , value , data ) { if ( ! data ) { GL . recordError ( 1281 ) ; return } tempI64 = [ GLctx . getBufferParameter ( target , value ) > > > 0 , ( tempDouble = GLctx . getBufferParameter ( target , value ) , + Math_abs ( tempDouble ) > = 1 ? tempDouble > 0 ? ( Math_min ( + Math_floor ( tempDouble / 4294967296 ) , 4294967295 ) | 0 ) > > > 0 : ~ ~ + Math_ceil ( ( tempDouble - + ( ~ ~ tempDouble > > > 0 ) ) / 4294967296 ) > > > 0 : 0 ) ] , HEAP32 [ data > > 2 ] = tempI64 [ 0 ] , HEAP32 [ data + 4 > > 2 ] = tempI64 [ 1 ] } function _emscripten_glGetBufferParameteriv ( target , value , data ) { if ( ! data ) { GL . recordError ( 1281 ) ; return } HEAP32 [ data > > 2 ] = GLctx . getBufferParameter ( target , value ) } function _emscripten_glGetBufferPointerv ( ) { err ( " missing function : emscripten_glGetBufferPointerv " ) ; abort ( - 1 ) } function _emscripten_glGetError ( ) { if ( GL . lastError ) { var error = GL . lastError ; GL . lastError = 0 ; return error } else { return GLctx . getError ( ) } } function _emscripten_glGetFloatv ( name_ , p ) { emscriptenWebGLGet ( name_ , p , " Float " ) } function _emscripten_glGetFragDataLocation ( program , name ) { return GLctx [ " getFragDataLocation " ] ( GL . programs [ program ] , UTF8ToString ( name ) ) } function _emscripten_glGetFramebufferAttachmentParameteriv ( target , attachment , pname , params ) { var result = GLctx . getFramebufferAttachmentParameter ( target , attachment , pname ) ; if ( result instanceof WebGLRenderbuffer | | result instanceof WebGLTexture ) { result = result . name | 0 } HEAP32 [ params > > 2 ] = result } function emscriptenWebGLGetIndexed ( target , index , data , type ) { if ( ! data ) { GL . recordError ( 1281 ) ; return } var result = GLctx [ " getIndexedParameter " ] ( target , index ) ; var ret ; switch ( typeof result ) { case " boolean " : ret = result ? 1 : 0 ; break ; case " number " : ret = result ; break ; case " object " : if ( result = = = null ) { switch ( target ) { case 35983 : case 35368 : ret = 0 ; break ; default : { GL . recordError ( 1280 ) ; return } } } else if ( result instanceof WebGLBuffer ) { ret = result . name | 0 } else { GL . recordError ( 1280 ) ; return } break ; default : GL . recordError ( 1280 ) ; return } switch ( type ) { case " Integer64 " : tempI64 = [ ret > > > 0 , ( tempDouble = ret , + Math_abs ( tempDouble ) > = 1 ? tempDouble > 0 ? ( Math_min ( + Math_floor ( tempDouble / 4294967296 ) , 4294967295 ) | 0 ) > > > 0 : ~ ~ + Math_ceil ( ( tempDouble - + ( ~ ~ tempDouble > > > 0 ) ) / 4294967296 ) > > > 0 : 0 ) ] , HEAP32 [ data > > 2 ] = tempI64 [ 0 ] , HEAP32 [ data + 4 > > 2 ] = tempI64 [ 1 ] ; break ; case " Integer " : HEAP32 [ data > > 2 ] = ret ; break ; case " Float " : HEAPF32 [ data > > 2 ] = ret ; break ; case " Boolean " : HEAP8 [ data > > 0 ] = ret ? 1 : 0 ; break ; default : throw " internal emscriptenWebGLGetIndexed ( ) error , bad type : " + type } } function _emscripten_glGetInteger64i_v ( target , index , data ) { emscriptenWebGLGetIndexed ( target , index , data , " Integer64 " ) } function _emscripten_glGetInteger64v ( name_ , p ) { emscriptenWebGLGet ( name_ , p , " Integer64 " ) } function _emscripten_glGetIntegeri_v ( target , index , data ) { emscriptenWebGLGetIndexed ( target , index , data , " Integer " ) } function _emscripten_glGetIntegerv ( name_ , p ) { emscriptenWebGLGet ( name_ , p , " Integer " ) } function _emscripten_glGetInternalformativ ( ) { err ( " missing function : emscripten_glGetInternalformativ " ) ; abort ( - 1 ) } function _emscripten_glGetProgramBinary ( program , bufSize , length , binaryFormat , binary ) { GL . recordError ( 1282 ) } function _emscripten_glGetProgramInfoLog ( program , maxLength , length , infoLog ) { var log = GLctx . getProgramInfoLog ( GL . programs [ program ] ) ; if ( log = = = null ) log = " ( unknown error ) " ; if ( maxLength > 0 & & infoLog ) { var numBytesWrittenExclNull = stringToUTF8 ( log , infoLog , maxLength ) ; if ( length ) HEAP32 [ length > > 2 ] = numBytesWrittenExclNull } else { if ( length ) HEAP32 [ length > > 2 ] = 0 } } function _emscripten_glGetProgramiv ( program , pname , p ) { if ( ! p ) { GL . recordError ( 1281 ) ; return } if ( program > = GL . counter ) { GL . recordError ( 1281 ) ; return } var ptable = GL . programInfos [ program ] ; if ( ! ptable ) { GL . recordError ( 1282 ) ; return } if ( pname = = 35716 ) { var log = GLctx . getProgramInfoLog ( GL . programs [ program ] ) ; if ( log = = = null ) log = " ( unknown error ) " ; HEAP32 [ p > > 2 ] = log . length + 1 } else if ( pname = = 35719 ) { HEAP32 [ p > > 2 ] = ptable . maxUniformLength } else if ( pname = = 35722 ) { if ( ptable . maxAttributeLength = = - 1 ) { program = GL . programs [ program ] ; var numAttribs = GLctx . getProgramParameter ( program , 35721 ) ; ptable . maxAttributeLength = 0 ; for ( var i = 0 ; i < numAttribs ; + + i ) { var activeAttrib = GLctx . getActiveAttrib ( program , i ) ; ptable . maxAttributeLength = Math . max ( ptable . maxAttributeLength , activeAttrib . name . length + 1 ) } } HEAP32 [ p > > 2 ] = ptable . maxAttributeLength } else if ( pname = = 35381 ) { if ( ptable . maxUniformBlockNameLength = = - 1 ) { program = GL . programs [ program ] ; var numBlocks = GLctx . getProgramParameter ( program , 35382 ) ; ptable . maxUniformBlockNameLength = 0 ; for ( var i = 0 ; i < numBlocks ; + + i ) { var activeBlockName = GLctx . getActiveUniformBlockName ( program , i ) ; ptable . maxUniformBlockNameLength = Math . max ( ptable . maxUniformBlockNameLength , activeBlockName . length + 1 ) } } HEAP32 [ p > > 2 ] = ptable . maxUniformBlockNameLength } else { HEAP32 [ p > > 2 ] = GLctx . getProgramParameter ( GL . programs [ program ] , pname ) } } function _emscripten_glGetQueryObjecti64vEXT ( id , pname , params ) { if ( ! params ) { GL . recordError ( 1281 ) ; return } var query = GL . timerQueriesEXT [ id ] ; var param = GLctx . disjointTimerQueryExt [ " getQueryObjectEXT " ] ( query , pname ) ; var ret ; if ( typeof param = = " boolean " ) { ret = param ? 1 : 0 } else { ret = param } tempI64 = [ ret > > > 0 , ( tempDouble = ret , + Math_abs ( tempDouble ) > = 1 ? tempDouble > 0 ? ( Math_min ( + Math_floor ( tempDouble / 4294967296 ) , 4294967295 ) | 0 ) > > > 0 : ~ ~ + Math_ceil ( ( tempDouble - + ( ~ ~ tempDouble > > > 0 ) ) / 4294967296 ) > > > 0 : 0 ) ] , HEAP32 [ params > > 2 ] = tempI64 [ 0 ] , HEAP32 [ params + 4 > > 2 ] = tempI64 [ 1 ] } function _emscripten_glGetQueryObjectivEXT ( id , pname , params ) { if ( ! params ) { GL . recordError ( 1281 ) ; return } var query = GL . timerQueriesEXT [ id ] ; var param = GLctx . disjointTimerQueryExt [ " getQueryObjectEXT " ] ( query , pname ) ; var ret ; if ( typeof param = = " boolean " ) { ret = param ? 1 : 0 } else { ret = param } HEAP32 [ params > > 2 ] = ret } function _emscripten_glGetQueryObjectui64vEXT ( id , pname , params ) { if ( ! params ) { GL . recordError ( 1281 ) ; return } var query = GL . timerQueriesEXT [ id ] ; var param = GLctx . disjointTimerQueryExt [ " getQueryObjectEXT " ] ( query , pname ) ; var ret ; if ( typeof param = = " boolean " ) { ret = param ? 1 : 0 } else { ret = param } tempI64 = [ ret > > > 0 , ( tempDouble = ret , + Math_abs ( tempDouble ) > = 1 ? tempDouble > 0 ? ( Math_min ( + Math_floor ( tempDouble / 4294967296 ) , 4294967295 ) | 0 ) > > > 0 : ~ ~ + Math_ceil ( ( tempDouble - + ( ~ ~ tempDouble > > > 0 ) ) / 4294967296 ) > > > 0 : 0 ) ] , HEAP32 [ params > > 2 ] = tempI64 [ 0 ] , HEAP32 [ params + 4 > > 2 ] = tempI64 [ 1 ] } function _emscripten_glGetQueryObjectuiv ( id , pname , params ) { if ( ! params ) { GL . recordError ( 1281 ) ; return } var query = GL . queries [ id ] ; var param = GLctx [ " getQueryParameter " ] ( query , pname ) ; var ret ; if ( typeof param = = " boolean " ) { ret = param ? 1 : 0 } else { ret = param } HEAP32 [ params > > 2 ] = ret } function _emscripten_glGetQueryObjectuivEXT ( id , pname , params ) { if ( ! params ) { GL . recordError ( 1281 ) ; return } var query = GL . timerQueriesEXT [ id ] ; var param = GLctx . disjointTimerQueryExt [ " getQueryObjectEXT " ] ( query , pname ) ; var ret ; if ( typeof param = = " boolean " ) { ret = param ? 1 : 0 } else { ret = param } HEAP32 [ params > > 2 ] = ret } function _emscripten_glGetQueryiv ( target , pname , params ) { if ( ! params ) { GL . recordError ( 1281 ) ; return } HEAP32 [ params > > 2 ] = GLctx [ " getQuery " ] ( target , pname ) } function _emscripten_glGetQueryivEXT ( target , pname , params ) { if ( ! params ) { GL . recordError ( 1281 ) ; return } HEAP32 [ params > > 2 ] = GLctx . disjointTimerQueryExt [ " getQueryEXT " ] ( target , pname ) } function _emscripten_glGetRenderbufferParameteriv ( target , pname , params ) { if ( ! params ) { GL . recordError ( 1281 ) ; return } HEAP32 [ params > > 2 ] = GLctx . getRenderbufferParameter ( target , pname ) } function _emscripten_glGetSamplerParameterfv ( sampler , pname , params ) { if ( ! params ) { GL . recordError ( 1281 ) ; return } sampler = GL . samplers [ sampler ] ; HEAPF32 [ params > > 2 ] = GLctx [ " getSamplerParameter " ] ( sampler , pname ) } function _emscripten_glGetSamplerParameteriv ( sampler , pname , params ) { if ( ! params ) { GL . recordError ( 1281 ) ; return } sampler = GL . samplers [ sampler ] ; HEAP32 [ params > > 2 ] = GLctx [ " getSamplerParameter " ] ( sampler , pname ) } function _emscripten_glGetShaderInfoLog ( shader , maxLength , length , infoLog ) { var log = GLctx . getShaderInfoLog ( GL . shaders [ shader ] ) ; if ( log = = = null ) log = " ( unknown error ) " ; if ( maxLength > 0 & & infoLog ) { var numBytesWrittenExclNull = stringToUTF8 ( log , infoLog , maxLength ) ; if ( length ) HEAP32 [ length > > 2 ] = numBytesWrittenExclNull } else { if ( length ) HEAP32 [ length > > 2 ] = 0 } } function _emscripten_glGetShaderPrecisionFormat ( shaderType , precisionType , range , precision ) { var result = GLctx . getShaderPrecisionFormat ( shaderType , precisionType ) ; HEAP32 [ range > > 2 ] = result . rangeMin ; HEAP32 [ range + 4 > > 2 ] = result . rangeMax ; HEAP32 [ precision > > 2 ] = result . precision } function _emscripten_glGetShaderSource ( shader , bufSize , length , source ) { var result = GLctx . getShaderSource ( GL . shaders [ shader ] ) ; if ( ! result ) return ; if ( bufSize > 0 & & source ) { var numBytesWrittenExclNull = stringToUTF8 ( result , source , bufSize ) ; if ( length ) HEAP32 [ length > > 2 ] = numBytesWrittenExclNull } else { if ( length ) HEAP32 [ length > > 2 ] = 0 } } function _emscripten_glGetShaderiv ( shader , pname , p ) { if ( ! p ) { GL . recordError ( 1281 ) ; return } if ( pname = = 35716 ) { var log = GLctx . getShaderInfoLog ( GL . shaders [ shader ] ) ; if ( log = = = null ) log = " ( unknown error ) " ; HEAP32 [ p > > 2 ] = log . length + 1 } else if ( pname = = 35720 ) { var source = GLctx . getShaderSource ( GL . shaders [ shader ] ) ; var sourceLength = source = = = null | | source . length = = 0 ? 0 : source . length + 1 ; HEAP32 [ p > > 2 ] = sourceLength } else { HEAP32 [ p > > 2 ] = GLctx . getShaderParameter ( GL . shaders [ shader ] , pname ) } } function stringToNewUTF8 ( jsString ) { var length = lengthBytesUTF8 ( jsString ) + 1 ; var cString = _malloc ( length ) ; stringToUTF8 ( jsString , cString , length ) ; return cString } function _emscripten_glGetString ( name_ ) { if ( GL . stringCache [ name_ ] ) return GL . stringCache [ name_ ] ; var ret ; switch ( name_ ) { case 7939 : var exts = GLctx . getSupportedExtensions ( ) ; var gl_exts = [ ] ; for ( var i = 0 ; i < exts . length ; + + i ) { gl_exts . push ( exts [ i ] ) ; gl_exts . push ( " GL_ " + exts [ i ] ) } ret = stringToNewUTF8 ( gl_exts . join ( " " ) ) ; break ; case 7936 : case 7937 : case 37445 : case 37446 : var s = GLctx . getParameter ( name_ ) ; if ( ! s ) { GL . recordError ( 1280 ) } ret = stringToNewUTF8 ( s ) ; break ; case 7938 : var glVersion = GLctx . getParameter ( GLctx . VERSION ) ; if ( GL . currentContext . version > = 2 ) glVersion = " OpenGL ES 3 . 0 ( " + glVersion + " ) " ; else { glVersion = " OpenGL ES 2 . 0 ( " + glVersion + " ) " } ret = stringToNewUTF8 ( glVersion ) ; break ; case 35724 : var glslVersion = GLctx . getParameter ( GLctx . SHADING_LANGUAGE_VERSION ) ; var ver_re = / ^ WebGL GLSL ES ( [ 0 - 9 ] \ . [ 0 - 9 ] [ 0 - 9 ] ? ) ( ? : $ | . * ) / ; var ver_num = glslVersion . match ( ver_re ) ; if ( ver_num ! = = null ) { if ( ver_num [ 1 ] . length = = 3 ) ver_num [ 1 ] = ver_num [ 1 ] + " 0 " ; glslVersion = " OpenGL ES GLSL ES " + ver_num [ 1 ] + " ( " + glslVersion + " ) " } ret = stringToNewUTF8 ( glslVersion ) ; break ; default : GL . recordError ( 1280 ) ; return 0 } GL . stringCache [ name_ ] = ret ; return ret } function _emscripten_glGetStringi ( name , index ) { if ( GL . currentContext . version < 2 ) { GL . recordError ( 1282 ) ; return 0 } var stringiCache = GL . stringiCache [ name ] ; if ( stringiCache ) { if ( index < 0 | | index > = stringiCache . length ) { GL . recordError ( 1281 ) ; return 0 } return stringiCache [ index ] } switch ( name ) { case 7939 : var exts = GLctx . getSupportedExtensions ( ) ; var gl_exts = [ ] ; for ( var i = 0 ; i < exts . length ; + + i ) { gl_exts . push ( stringToNewUTF8 ( exts [ i ] ) ) ; gl_exts . push ( stringToNewUTF8 ( " GL_ " + exts [ i ] ) ) } stringiCache = GL . stringiCache [ name ] = gl_exts ; if ( index < 0 | | index > = stringiCache . length ) { GL . recordError ( 1281 ) ; return 0 } return stringiCache [ index ] ; default : GL . recordError ( 1280 ) ; return 0 } } function _emscripten_glGetSynciv ( sync , pname , bufSize , length , values ) { if ( bufSize < 0 ) { GL . recordError ( 1281 ) ; return } if ( ! values ) { GL . recordError ( 1281 ) ; return } var ret = GLctx . getSyncParameter ( GL . syncs [ sync ] , pname ) ; HEAP32 [ length > > 2 ] = ret ; if ( ret ! = = null & & length ) HEAP32 [ length > > 2 ] = 1 } function _emscripten_glGetTexParameterfv ( target , pname , params ) { if ( ! params ) { GL . recordError ( 1281 ) ; return } HEAPF32 [ params > > 2 ] = GLctx . getTexParameter ( target , pname ) } function _emscripten_glGetTexParameteriv ( target , pname , params ) { if ( ! params ) { GL . recordError ( 1281 ) ; return } HEAP32 [ params > > 2 ] = GLctx . getTexParameter ( target , pname ) } function _emscripten_glGetTransformFeedbackVarying ( program , index , bufSize , length , size , type , name ) { program = GL . programs [ program ] ; var info = GLctx [ " getTransformFeedbackVarying " ] ( program , index ) ; if ( ! info ) return ; if ( name & & bufSize > 0 ) { var numBytesWrittenExclNull = stringToUTF8 ( info . name , name , bufSize ) ; if ( length ) HEAP32 [ length > > 2 ] = numBytesWrittenExclNull } else { if ( length ) HEAP32 [ length > > 2 ] = 0 } if ( size ) HEAP32 [ size > > 2 ] = info . size ; if ( type ) HEAP32 [ type > > 2 ] = info . type } function _emscripten_glGetUniformBlockIndex ( program , uniformBlockName ) { return GLctx [ " getUniformBlockIndex " ] ( GL . programs [ program ] , UTF8ToString ( uniformBlockName ) ) } function _emscripten_glGetUniformIndices ( program , uniformCount , uniformNames , uniformIndices ) { if ( ! uniformIndices ) { GL . recordError ( 1281 ) ; return } if ( uniformCount > 0 & & ( uniformNames = = 0 | | uniformIndices = = 0 ) ) { GL . recordError ( 1281 ) ; return } program = GL . programs [ program ] ; var names = [ ] ; for ( var i = 0 ; i < uniformCount ; i + + ) names . push ( UTF8ToString ( HEAP32 [ uniformNames + i * 4 > > 2 ] ) ) ; var result = GLctx [ " getUniformIndices " ] ( program , names ) ; if ( ! result ) return ; var len = result . length ; for ( var i = 0 ; i < len ; i + + ) { HEAP32 [ uniformIndices + i * 4 > > 2 ] = result [ i ] } } function _emscripten_glGetUniformLocation ( program , name ) { name = UTF8ToString ( name ) ; var arrayIndex = 0 ; if ( name [ name . length - 1 ] = = " ] " ) { var leftBrace = name . lastIndexOf ( " [ " ) ; arrayIndex = name [ leftBrace + 1 ] ! = " ] " ? parseInt ( name . slice ( leftBrace + 1 ) ) : 0 ; name = name . slice ( 0 , leftBrace ) } var uniformInfo = GL . programInfos [ program ] & & GL . programInfos [ program ] . uniforms [ name ] ; if ( uniformInfo & & arrayIndex > = 0 & & arrayIndex < uniformInfo [ 0 ] ) { return uniformInfo [ 1 ] + arrayIndex } else { return - 1 } } function emscriptenWebGLGetUniform ( program , location , params , type ) { if ( ! params ) { GL . recordError ( 1281 ) ; return } var data = GLctx . getUniform ( GL . programs [ program ] , GL . uniforms [ location ] ) ; if ( typeof data = = " number " | | typeof data = = " boolean " ) { switch ( type ) { case " Integer " : HEAP32 [ params > > 2 ] = data ; break ; case " Float " : HEAPF32 [ params > > 2 ] = data ; break ; default : throw " internal emscriptenWebGLGetUniform ( ) error , bad type : " + type } } else { for ( var i = 0 ; i < data . length ; i + + ) { switch ( type ) { case " Integer " : HEAP32 [ params + i * 4 > > 2 ] = data [ i ] ; break ; case " Float " : HEAPF32 [ params + i * 4 > > 2 ] = data [ i ] ; break ; default : throw " internal emscriptenWebGLGetUniform ( ) error , bad type : " + type } } } } function _emscripten_glGetUniformfv ( program , location , params ) { emscriptenWebGLGetUniform ( program , location , params , " Float " ) } function _emscripten_glGetUniformiv ( program , location , params ) { emscriptenWebGLGetUniform ( program , location , params , " Integer " ) } function _emscripten_glGetUniformuiv ( program , location , params ) { emscriptenWebGLGetUniform ( program , location , params , " Integer " ) } function emscriptenWebGLGetVertexAttrib ( index , pname , params , type ) { if ( ! params ) { GL . recordError ( 1281 ) ; return } var data = GLctx . getVertexAttrib ( index , pname ) ; if ( pname = = 34975 ) { HEAP32 [ params > > 2 ] = data [ " name " ] } else if ( typeof data = = " number " | | typeof data = = " boolean " ) { switch ( type ) { case " Integer " : HEAP32 [ params > > 2 ] = data ; break ; case " Float " : HEAPF32 [ params > > 2 ] = data ; break ; case " FloatToInteger " : HEAP32 [ params > > 2 ] = Math . fround ( data ) ; break ; default : throw " internal emscriptenWebGLGetVertexAttrib ( ) error , bad type : " + type } } else { for ( var i = 0 ; i < data . length ; i + + ) { switch ( type ) { case " Integer " : HEAP32 [ params + i * 4 > > 2 ] = data [ i ] ; break ; case " Float " : HEAPF32 [ params + i * 4 > > 2 ] = data [ i ] ; break ; case " FloatToInteger " : HEAP32 [ params + i * 4 > > 2 ] = Math . fround ( data [ i ] ) ; break ; default : throw " internal emscriptenWebGLGetVertexAttrib ( ) error , bad type : " + type } } } } function _emscripten_glGetVertexAttribIiv ( index , pname , params ) { emscriptenWebGLGetVertexAttrib ( index , pname , params , " Integer " ) } function _emscripten_glGetVertexAttribIuiv ( index , pname , params ) { emscriptenWebGLGetVertexAttrib ( index , pname , params , " Integer " ) } function _emscripten_glGetVertexAttribPointerv ( index , pname , pointer ) { if ( ! pointer ) { GL . recordError ( 1281 ) ; return } HEAP32 [ pointer > > 2 ] = GLctx . getVertexAttribOffset ( index , pname ) } function _emscripten_glGetVertexAttribfv ( index , pname , params ) { emscriptenWebGLGetVertexAttrib ( index , pname , params , " Float " ) } function _emscripten_glGetVertexAttribiv ( index , pname , params ) { emscriptenWebGLGetVertexAttrib ( index , pname , params , " FloatToInteger " ) } function _emscripten_glHint ( x0 , x1 ) { GLctx [ " hint " ] ( x0 , x1 ) } function _emscripten_glInvalidateFramebuffer ( target , numAttachments , attachments ) { var list = __tempFixedLengthArray [ numAttachments ] ; for ( var i = 0 ; i < numAttachments ; i + + ) { list [ i ] = HEAP32 [ attachments + i * 4 > > 2 ] } GLctx [ " invalidateFramebuffer " ] ( target , list ) } function _emscripten_glInvalidateSubFramebuffer ( target , numAttachments , attachments , x , y , width , height ) { var list = __tempFixedLengthArray [ numAttachments ] ; for ( var i = 0 ; i < numAttachments ; i + + ) { list [ i ] = HEAP32 [ attachments + i * 4 > > 2 ] } GLctx [ " invalidateSubFramebuffer " ] ( target , list , x , y , width , height ) } function _emscripten_glIsBuffer ( buffer ) { var b = GL . buffers [ buffer ] ; if ( ! b ) return 0 ; return GLctx . isBuffer ( b ) } function _emscripten_glIsEnabled ( x0 ) { return GLctx [ " isEnabled " ] ( x0 ) } function _emscripten_glIsFramebuffer ( framebuffer ) { var fb = GL . framebuffers [ framebuffer ] ; if ( ! fb ) return 0 ; return GLctx . isFramebuffer ( fb ) } function _emscripten_glIsProgram ( program ) { program = GL . programs [ program ] ; if ( ! program ) return 0 ; return GLctx . isProgram ( program ) } function _emscripten_glIsQuery ( id ) { var query = GL . queries [ id ] ; if ( ! query ) return 0 ; return GLctx [ " isQuery " ] ( query ) } function _emscripten_glIsQueryEXT ( id ) { var query = GL . timerQueriesEXT [ id ] ; if ( ! query ) return 0 ; return GLctx . disjointTimerQueryExt [ " isQueryEXT " ] ( query ) } function _emscripten_glIsRenderbuffer ( renderbuffer ) { var rb = GL . renderbuffers [ renderbuffer ] ; if ( ! rb ) return 0 ; return GLctx . isRenderbuffer ( rb ) } function _emscripten_glIsSampler ( id ) { var sampler = GL . samplers [ id ] ; if ( ! sampler ) return 0 ; return GLctx [ " isSampler " ] ( sampler ) } function _emscripten_glIsShader ( shader ) { var s = GL . shaders [ shader ] ; if ( ! s ) return 0 ; return GLctx . isShader ( s ) } function _emscripten_glIsSync ( sync ) { var sync = GL . syncs [ sync ] ; if ( ! sync ) return 0 ; return GLctx . isSync ( sync ) } function _emscripten_glIsTexture ( id ) { var texture = GL . textures [ id ] ; if ( ! texture ) return 0 ; return GLctx . isTexture ( texture ) } function _emscripten_glIsTransformFeedback ( id ) { return GLctx [ " isTransformFeedback " ] ( GL . transformFeedbacks [ id ] ) } function _emscripten_glIsVertexArray ( array ) { var vao = GL . vaos [ array ] ; if ( ! vao ) return 0 ; return GLctx [ " isVertexArray " ] ( vao ) } function _emscripten_glIsVertexArrayOES ( array ) { var vao = GL . vaos [ array ] ; if ( ! vao ) return 0 ; return GLctx [ " isVertexArray " ] ( vao ) } function _emscripten_glLineWidth ( x0 ) { GLctx [ " lineWidth " ] ( x0 ) } function _emscripten_glLinkProgram ( program ) { GLctx . linkProgram ( GL . programs [ program ] ) ; GL . populateUniformTable ( program ) } function _emscripten_glMapBufferRange ( ) { err ( " missing function : emscripten_glMapBufferRange " ) ; abort ( - 1 ) } function _emscripten_glPauseTransformFeedback ( ) { GLctx [ " pauseTransformFeedback " ] ( ) } function _emscripten_glPixelStorei ( pname , param ) { if ( pname = = 3317 ) { GL . unpackAlignment = param } GLctx . pixelStorei ( pname , param ) } function _emscripten_glPolygonOffset ( x0 , x1 ) { GLctx [ " polygonOffset " ] ( x0 , x1 ) } function _emscripten_glProgramBinary ( program , binaryFormat , binary , length ) { GL . recordError ( 1280 ) } function _emscripten_glProgramParameteri ( program , pname , value ) { GL . recordError ( 1280 ) } function _emscripten_glQueryCounterEXT ( id , target ) { GLctx . disjointTimerQueryExt [ " queryCounterEXT " ] ( GL . timerQueriesEXT [ id ] , target ) } function _emscripten_glReadBuffer ( x0 ) { GLctx [ " readBuffer " ] ( x0 ) } function __computeUnpackAlignedImageSize ( width , height , sizePerPixel , alignment ) { function roundedToNextMultipleOf ( x , y ) { return x + y - 1 & - y } var plainRowSize = width * sizePerPixel ; var alignedRowSize = roundedToNextMultipleOf ( plainRowSize , alignment ) ; return height * alignedRowSize } var __colorChannelsInGlTextureFormat = { 6402 : 1 , 6403 : 1 , 6406 : 1 , 6407 : 3 , 6408 : 4 , 6409 : 1 , 6410 : 2 , 33319 : 2 , 33320 : 2 , 35904 : 3 , 35906 : 4 , 36244 : 1 , 36248 : 3 , 36249 : 4 } ; var __sizeOfGlTextureElementType = { 5120 : 1 , 5121 : 1 , 5122 : 2 , 5123 : 2 , 5124 : 4 , 5125 : 4 , 5126 : 4 , 5131 : 2 , 32819 : 2 , 32820 : 2 , 33635 : 2 , 33640 : 4 , 34042 : 4 , 35899 : 4 , 35902 : 4 , 36193 : 2 } ; function emscriptenWebGLGetTexPixelData ( type , format , width , height , pixels , internalFormat ) { var sizePerPixel = __colorChannelsInGlTextureFormat [ format ] * __sizeOfGlTextureElementType [ type ] ; if ( ! sizePerPixel ) { GL . recordError ( 1280 ) ; return } var bytes = __computeUnpackAlignedImageSize ( width , height , sizePerPixel , GL . unpackAlignment ) ; var end = pixels + bytes ; switch ( type ) { case 5120 : return HEAP8 . subarray ( pixels , end ) ; case 5121 : return HEAPU8 . subarray ( pixels , end ) ; case 5122 : return HEAP16 . subarray ( pixels > > 1 , end > > 1 ) ; case 5124 : return HEAP32 . subarray ( pixels > > 2 , end > > 2 ) ; case 5126 : return HEAPF32 . subarray ( pixels > > 2 , end > > 2 ) ; case 5125 : case 34042 : case 35902 : case 33640 : case 35899 : case 34042 : return HEAPU32 . subarray ( pixels > > 2 , end > > 2 ) ; case 5123 : case 33635 : case 32819 : case 32820 : case 36193 : case 5131 : return HEAPU16 . subarray ( pixels > > 1 , end > > 1 ) ; default : GL . recordError ( 1280 ) } } function __heapObjectForWebGLType ( type ) { switch ( type ) { case 5120 : return HEAP8 ; case 5121 : return HEAPU8 ; case 5122 : return HEAP16 ; case 5123 : case 33635 : case 32819 : case 32820 : case 36193 : case 5131 : return HEAPU16 ; case 5124 : return HEAP32 ; case 5125 : case 34042 : case 35902 : case 33640 : case 35899 : case 34042 : return HEAPU32 ; case 5126 : return HEAPF32 } } var __heapAccessShiftForWebGLType = { 5122 : 1 , 5123 : 1 , 5124 : 2 , 5125 : 2 , 5126 : 2 , 5131 : 1 , 32819 : 1 , 32820 : 1 , 33635 : 1 , 33640 : 2 , 34042 : 2 , 35899 : 2 , 35902 : 2 , 36193 : 1 } ; function _emscripten_glReadPixels ( x , y , width , height , format , type , pixels ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { if ( GLctx . currentPixelPackBufferBinding ) { GLctx . readPixels ( x , y , width , height , format , type , pixels ) } else { GLctx . readPixels ( x , y , width , height , format , type , __heapObjectForWebGLType ( type ) , pixels > > ( __heapAccessShiftForWebGLType [ type ] | 0 ) ) } return } var pixelData = emscriptenWebGLGetTexPixelData ( type , format , width , height , pixels , format ) ; if ( ! pixelData ) { GL . recordError ( 1280 ) ; return } GLctx . readPixels ( x , y , width , height , format , type , pixelData ) } function _emscripten_glReleaseShaderCompiler ( ) { } function _emscripten_glRenderbufferStorage ( x0 , x1 , x2 , x3 ) { GLctx [ " renderbufferStorage " ] ( x0 , x1 , x2 , x3 ) } function _emscripten_glRenderbufferStorageMultisample ( x0 , x1 , x2 , x3 , x4 ) { GLctx [ " renderbufferStorageMultisample " ] ( x0 , x1 , x2 , x3 , x4 ) } function _emscripten_glResumeTransformFeedback ( ) { GLctx [ " resumeTransformFeedback " ] ( ) } function _emscripten_glSampleCoverage ( value , invert ) { GLctx . sampleCoverage ( value , ! ! invert ) } function _emscripten_glSamplerParameterf ( sampler , pname , param ) { GLctx [ " samplerParameterf " ] ( GL . samplers [ sampler ] , pname , param ) } function _emscripten_glSamplerParameterfv ( sampler , pname , params ) { var param = HEAPF32 [ params > > 2 ] ; GLctx [ " samplerParameterf " ] ( GL . samplers [ sampler ] , pname , param ) } function _emscripten_glSamplerParameteri ( sampler , pname , param ) { GLctx [ " samplerParameteri " ] ( GL . samplers [ sampler ] , pname , param ) } function _emscripten_glSamplerParameteriv ( sampler , pname , params ) { var param = HEAP32 [ params > > 2 ] ; GLctx [ " samplerParameteri " ] ( GL . samplers [ sampler ] , pname , param ) } function _emscripten_glScissor ( x0 , x1 , x2 , x3 ) { GLctx [ " scissor " ] ( x0 , x1 , x2 , x3 ) } function _emscripten_glShaderBinary ( ) { GL . recordError ( 1280 ) } function _emscripten_glShaderSource ( shader , count , string , length ) { var source = GL . getSource ( shader , count , string , length ) ; GLctx . shaderSource ( GL . shaders [ shader ] , source ) } function _emscripten_glStencilFunc ( x0 , x1 , x2 ) { GLctx [ " stencilFunc " ] ( x0 , x1 , x2 ) } function _emscripten_glStencilFuncSeparate ( x0 , x1 , x2 , x3 ) { GLctx [ " stencilFuncSeparate " ] ( x0 , x1 , x2 , x3 ) } function _emscripten_glStencilMask ( x0 ) { GLctx [ " stencilMask " ] ( x0 ) } function _emscripten_glStencilMaskSeparate ( x0 , x1 ) { GLctx [ " stencilMaskSeparate " ] ( x0 , x1 ) } function _emscripten_glStencilOp ( x0 , x1 , x2 ) { GLctx [ " stencilOp " ] ( x0 , x1 , x2 ) } function _emscripten_glStencilOpSeparate ( x0 , x1 , x2 , x3 ) { GLctx [ " stencilOpSeparate " ] ( x0 , x1 , x2 , x3 ) } function _emscripten_glTexImage2D ( target , level , internalFormat , width , height , border , format , type , pixels ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { if ( GLctx . currentPixelUnpackBufferBinding ) { GLctx . texImage2D ( target , level , internalFormat , width , height , border , format , type , pixels ) } else if ( pixels ! = 0 ) { GLctx . texImage2D ( target , level , internalFormat , width , height , border , format , type , __heapObjectForWebGLType ( type ) , pixels > > ( __heapAccessShiftForWebGLType [ type ] | 0 ) ) } else { GLctx . texImage2D ( target , level , internalFormat , width , height , border , format , type , null ) } return } GLctx . texImage2D ( target , level , internalFormat , width , height , border , format , type , pixels ? emscriptenWebGLGetTexPixelData ( type , format , width , height , pixels , internalFormat ) : null ) } function _emscripten_glTexImage3D ( target , level , internalFormat , width , height , depth , border , format , type , pixels ) { if ( GLctx . currentPixelUnpackBufferBinding ) { GLctx [ " texImage3D " ] ( target , level , internalFormat , width , height , depth , border , format , type , pixels ) } else if ( pixels ! = 0 ) { GLctx [ " texImage3D " ] ( target , level , internalFormat , width , height , depth , border , format , type , __heapObjectForWebGLType ( type ) , pixels > > ( __heapAccessShiftForWebGLType [ type ] | 0 ) ) } else { GLctx [ " texImage3D " ] ( target , level , internalFormat , width , height , depth , border , format , type , null ) } } function _emscripten_glTexParameterf ( x0 , x1 , x2 ) { GLctx [ " texParameterf " ] ( x0 , x1 , x2 ) } function _emscripten_glTexParameterfv ( target , pname , params ) { var param = HEAPF32 [ params > > 2 ] ; GLctx . texParameterf ( target , pname , param ) } function _emscripten_glTexParameteri ( x0 , x1 , x2 ) { GLctx [ " texParameteri " ] ( x0 , x1 , x2 ) } function _emscripten_glTexParameteriv ( target , pname , params ) { var param = HEAP32 [ params > > 2 ] ; GLctx . texParameteri ( target , pname , param ) } function _emscripten_glTexStorage2D ( x0 , x1 , x2 , x3 , x4 ) { GLctx [ " texStorage2D " ] ( x0 , x1 , x2 , x3 , x4 ) } function _emscripten_glTexStorage3D ( x0 , x1 , x2 , x3 , x4 , x5 ) { GLctx [ " texStorage3D " ] ( x0 , x1 , x2 , x3 , x4 , x5 ) } function _emscripten_glTexSubImage2D ( target , level , xoffset , yoffset , width , height , format , type , pixels ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { if ( GLctx . currentPixelUnpackBufferBinding ) { GLctx . texSubImage2D ( target , level , xoffset , yoffset , width , height , format , type , pixels ) } else if ( pixels ! = 0 ) { GLctx . texSubImage2D ( target , level , xoffset , yoffset , width , height , format , type , __heapObjectForWebGLType ( type ) , pixels > > ( __heapAccessShiftForWebGLType [ type ] | 0 ) ) } else { GLctx . texSubImage2D ( target , level , xoffset , yoffset , width , height , format , type , null ) } return } var pixelData = null ; if ( pixels ) pixelData = emscriptenWebGLGetTexPixelData ( type , format , width , height , pixels , 0 ) ; GLctx . texSubImage2D ( target , level , xoffset , yoffset , width , height , format , type , pixelData ) } function _emscripten_glTexSubImage3D ( target , level , xoffset , yoffset , zoffset , width , height , depth , format , type , pixels ) { if ( GLctx . currentPixelUnpackBufferBinding ) { GLctx [ " texSubImage3D " ] ( target , level , xoffset , yoffset , zoffset , width , height , depth , format , type , pixels ) } else if ( pixels ! = 0 ) { GLctx [ " texSubImage3D " ] ( target , level , xoffset , yoffset , zoffset , width , height , depth , format , type , __heapObjectForWebGLType ( type ) , pixels > > ( __heapAccessShiftForWebGLType [ type ] | 0 ) ) } else { GLctx [ " texSubImage3D " ] ( target , level , xoffset , yoffset , zoffset , width , height , depth , format , type , null ) } } function _emscripten_glTransformFeedbackVaryings ( program , count , varyings , bufferMode ) { program = GL . programs [ program ] ; var vars = [ ] ; for ( var i = 0 ; i < count ; i + + ) vars . push ( UTF8ToString ( HEAP32 [ varyings + i * 4 > > 2 ] ) ) ; GLctx [ " transformFeedbackVaryings " ] ( program , vars , bufferMode ) } function _emscripten_glUniform1f ( location , v0 ) { GLctx . uniform1f ( GL . uniforms [ location ] , v0 ) } function _emscripten_glUniform1fv ( location , count , value ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { GLctx . uniform1fv ( GL . uniforms [ location ] , HEAPF32 , value > > 2 , count ) ; return } if ( count < = GL . MINI_TEMP_BUFFER_SIZE ) { var view = GL . miniTempBufferViews [ count - 1 ] ; for ( var i = 0 ; i < count ; + + i ) { view [ i ] = HEAPF32 [ value + 4 * i > > 2 ] } } else { var view = HEAPF32 . subarray ( value > > 2 , value + count * 4 > > 2 ) } GLctx . uniform1fv ( GL . uniforms [ location ] , view ) } function _emscripten_glUniform1i ( location , v0 ) { GLctx . uniform1i ( GL . uniforms [ location ] , v0 ) } function _emscripten_glUniform1iv ( location , count , value ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { GLctx . uniform1iv ( GL . uniforms [ location ] , HEAP32 , value > > 2 , count ) ; return } GLctx . uniform1iv ( GL . uniforms [ location ] , HEAP32 . subarray ( value > > 2 , value + count * 4 > > 2 ) ) } function _emscripten_glUniform1ui ( location , v0 ) { GLctx . uniform1ui ( GL . uniforms [ location ] , v0 ) } function _emscripten_glUniform1uiv ( location , count , value ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { GLctx . uniform1uiv ( GL . uniforms [ location ] , HEAPU32 , value > > 2 , count ) } else { GLctx . uniform1uiv ( GL . uniforms [ location ] , HEAPU32 . subarray ( value > > 2 , value + count * 4 > > 2 ) ) } } function _emscripten_glUniform2f ( location , v0 , v1 ) { GLctx . uniform2f ( GL . uniforms [ location ] , v0 , v1 ) } function _emscripten_glUniform2fv ( location , count , value ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { GLctx . uniform2fv ( GL . uniforms [ location ] , HEAPF32 , value > > 2 , count * 2 ) ; return } if ( 2 * count < = GL . MINI_TEMP_BUFFER_SIZE ) { var view = GL . miniTempBufferViews [ 2 * count - 1 ] ; for ( var i = 0 ; i < 2 * count ; i + = 2 ) { view [ i ] = HEAPF32 [ value + 4 * i > > 2 ] ; view [ i + 1 ] = HEAPF32 [ value + ( 4 * i + 4 ) > > 2 ] } } else { var view = HEAPF32 . subarray ( value > > 2 , value + count * 8 > > 2 ) } GLctx . uniform2fv ( GL . uniforms [ location ] , view ) } function _emscripten_glUniform2i ( location , v0 , v1 ) { GLctx . uniform2i ( GL . uniforms [ location ] , v0 , v1 ) } function _emscripten_glUniform2iv ( location , count , value ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { GLctx . uniform2iv ( GL . uniforms [ location ] , HEAP32 , value > > 2 , count * 2 ) ; return } GLctx . uniform2iv ( GL . uniforms [ location ] , HEAP32 . subarray ( value > > 2 , value + count * 8 > > 2 ) ) } function _emscripten_glUniform2ui ( location , v0 , v1 ) { GLctx . uniform2ui ( GL . uniforms [ location ] , v0 , v1 ) } function _emscripten_glUniform2uiv ( location , count , value ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { GLctx . uniform2uiv ( GL . uniforms [ location ] , HEAPU32 , value > > 2 , count * 2 ) } else { GLctx . uniform2uiv ( GL . uniforms [ location ] , HEAPU32 . subarray ( value > > 2 , value + count * 8 > > 2 ) ) } } function _emscripten_glUniform3f ( location , v0 , v1 , v2 ) { GLctx . uniform3f ( GL . uniforms [ location ] , v0 , v1 , v2 ) } function _emscripten_glUniform3fv ( location , count , value ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { GLctx . uniform3fv ( GL . uniforms [ location ] , HEAPF32 , value > > 2 , count * 3 ) ; return } if ( 3 * count < = GL . MINI_TEMP_BUFFER_SIZE ) { var view = GL . miniTempBufferViews [ 3 * count - 1 ] ; for ( var i = 0 ; i < 3 * count ; i + = 3 ) { view [ i ] = HEAPF32 [ value + 4 * i > > 2 ] ; view [ i + 1 ] = HEAPF32 [ value + ( 4 * i + 4 ) > > 2 ] ; view [ i + 2 ] = HEAPF32 [ value + ( 4 * i + 8 ) > > 2 ] } } else { var view = HEAPF32 . subarray ( value > > 2 , value + count * 12 > > 2 ) } GLctx . uniform3fv ( GL . uniforms [ location ] , view ) } function _emscripten_glUniform3i ( location , v0 , v1 , v2 ) { GLctx . uniform3i ( GL . uniforms [ location ] , v0 , v1 , v2 ) } function _emscripten_glUniform3iv ( location , count , value ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { GLctx . uniform3iv ( GL . uniforms [ location ] , HEAP32 , value > > 2 , count * 3 ) ; return } GLctx . uniform3iv ( GL . uniforms [ location ] , HEAP32 . subarray ( value > > 2 , value + count * 12 > > 2 ) ) } function _emscripten_glUniform3ui ( location , v0 , v1 , v2 ) { GLctx . uniform3ui ( GL . uniforms [ location ] , v0 , v1 , v2 ) } function _emscripten_glUniform3uiv ( location , count , value ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { GLctx . uniform3uiv ( GL . uniforms [ location ] , HEAPU32 , value > > 2 , count * 3 ) } else { GLctx . uniform3uiv ( GL . uniforms [ location ] , HEAPU32 . subarray ( value > > 2 , value + count * 12 > > 2 ) ) } } function _emscripten_glUniform4f ( location , v0 , v1 , v2 , v3 ) { GLctx . uniform4f ( GL . uniforms [ location ] , v0 , v1 , v2 , v3 ) } function _emscripten_glUniform4fv ( location , count , value ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { GLctx . uniform4fv ( GL . uniforms [ location ] , HEAPF32 , value > > 2 , count * 4 ) ; return } if ( 4 * count < = GL . MINI_TEMP_BUFFER_SIZE ) { var view = GL . miniTempBufferViews [ 4 * count - 1 ] ; for ( var i = 0 ; i < 4 * count ; i + = 4 ) { view [ i ] = HEAPF32 [ value + 4 * i > > 2 ] ; view [ i + 1 ] = HEAPF32 [ value + ( 4 * i + 4 ) > > 2 ] ; view [ i + 2 ] = HEAPF32 [ value + ( 4 * i + 8 ) > > 2 ] ; view [ i + 3 ] = HEAPF32 [ value + ( 4 * i + 12 ) > > 2 ] } } else { var view = HEAPF32 . subarray ( value > > 2 , value + count * 16 > > 2 ) } GLctx . uniform4fv ( GL . uniforms [ location ] , view ) } function _emscripten_glUniform4i ( location , v0 , v1 , v2 , v3 ) { GLctx . uniform4i ( GL . uniforms [ location ] , v0 , v1 , v2 , v3 ) } function _emscripten_glUniform4iv ( location , count , value ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { GLctx . uniform4iv ( GL . uniforms [ location ] , HEAP32 , value > > 2 , count * 4 ) ; return } GLctx . uniform4iv ( GL . uniforms [ location ] , HEAP32 . subarray ( value > > 2 , value + count * 16 > > 2 ) ) } function _emscripten_glUniform4ui ( location , v0 , v1 , v2 , v3 ) { GLctx . uniform4ui ( GL . uniforms [ location ] , v0 , v1 , v2 , v3 ) } function _emscripten_glUniform4uiv ( location , count , value ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { GLctx . uniform4uiv ( GL . uniforms [ location ] , HEAPU32 , value > > 2 , count * 4 ) } else { GLctx . uniform4uiv ( GL . uniforms [ location ] , HEAPU32 . subarray ( value > > 2 , value + count * 16 > > 2 ) ) } } function _emscripten_glUniformBlockBinding ( program , uniformBlockIndex , uniformBlockBinding ) { program = GL . programs [ program ] ; GLctx [ " uniformBlockBinding " ] ( program , uniformBlockIndex , uniformBlockBinding ) } function _emscripten_glUniformMatrix2fv ( location , count , transpose , value ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { GLctx . uniformMatrix2fv ( GL . uniforms [ location ] , ! ! transpose , HEAPF32 , value > > 2 , count * 4 ) ; return } if ( 4 * count < = GL . MINI_TEMP_BUFFER_SIZE ) { var view = GL . miniTempBufferViews [ 4 * count - 1 ] ; for ( var i = 0 ; i < 4 * count ; i + = 4 ) { view [ i ] = HEAPF32 [ value + 4 * i > > 2 ] ; view [ i + 1 ] = HEAPF32 [ value + ( 4 * i + 4 ) > > 2 ] ; view [ i + 2 ] = HEAPF32 [ value + ( 4 * i + 8 ) > > 2 ] ; view [ i + 3 ] = HEAPF32 [ value + ( 4 * i + 12 ) > > 2 ] } } else { var view = HEAPF32 . subarray ( value > > 2 , value + count * 16 > > 2 ) } GLctx . uniformMatrix2fv ( GL . uniforms [ location ] , ! ! transpose , view ) } function _emscripten_glUniformMatrix2x3fv ( location , count , transpose , value ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { GLctx . uniformMatrix2x3fv ( GL . uniforms [ location ] , ! ! transpose , HEAPF32 , value > > 2 , count * 6 ) } else { GLctx . uniformMatrix2x3fv ( GL . uniforms [ location ] , ! ! transpose , HEAPF32 . subarray ( value > > 2 , value + count * 24 > > 2 ) ) } } function _emscripten_glUniformMatrix2x4fv ( location , count , transpose , value ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { GLctx . uniformMatrix2x4fv ( GL . uniforms [ location ] , ! ! transpose , HEAPF32 , value > > 2 , count * 8 ) } else { GLctx . uniformMatrix2x4fv ( GL . uniforms [ location ] , ! ! transpose , HEAPF32 . subarray ( value > > 2 , value + count * 32 > > 2 ) ) } } function _emscripten_glUniformMatrix3fv ( location , count , transpose , value ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { GLctx . uniformMatrix3fv ( GL . uniforms [ location ] , ! ! transpose , HEAPF32 , value > > 2 , count * 9 ) ; return } if ( 9 * count < = GL . MINI_TEMP_BUFFER_SIZE ) { var view = GL . miniTempBufferViews [ 9 * count - 1 ] ; for ( var i = 0 ; i < 9 * count ; i + = 9 ) { view [ i ] = HEAPF32 [ value + 4 * i > > 2 ] ; view [ i + 1 ] = HEAPF32 [ value + ( 4 * i + 4 ) > > 2 ] ; view [ i + 2 ] = HEAPF32 [ value + ( 4 * i + 8 ) > > 2 ] ; view [ i + 3 ] = HEAPF32 [ value + ( 4 * i + 12 ) > > 2 ] ; view [ i + 4 ] = HEAPF32 [ value + ( 4 * i + 16 ) > > 2 ] ; view [ i + 5 ] = HEAPF32 [ value + ( 4 * i + 20 ) > > 2 ] ; view [ i + 6 ] = HEAPF32 [ value + ( 4 * i + 24 ) > > 2 ] ; view [ i + 7 ] = HEAPF32 [ value + ( 4 * i + 28 ) > > 2 ] ; view [ i + 8 ] = HEAPF32 [ value + ( 4 * i + 32 ) > > 2 ] } } else { var view = HEAPF32 . subarray ( value > > 2 , value + count * 36 > > 2 ) } GLctx . uniformMatrix3fv ( GL . uniforms [ location ] , ! ! transpose , view ) } function _emscripten_glUniformMatrix3x2fv ( location , count , transpose , value ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { GLctx . uniformMatrix3x2fv ( GL . uniforms [ location ] , ! ! transpose , HEAPF32 , value > > 2 , count * 6 ) } else { GLctx . uniformMatrix3x2fv ( GL . uniforms [ location ] , ! ! transpose , HEAPF32 . subarray ( value > > 2 , value + count * 24 > > 2 ) ) } } function _emscripten_glUniformMatrix3x4fv ( location , count , transpose , value ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { GLctx . uniformMatrix3x4fv ( GL . uniforms [ location ] , ! ! transpose , HEAPF32 , value > > 2 , count * 12 ) } else { GLctx . uniformMatrix3x4fv ( GL . uniforms [ location ] , ! ! transpose , HEAPF32 . subarray ( value > > 2 , value + count * 48 > > 2 ) ) } } function _emscripten_glUniformMatrix4fv ( location , count , transpose , value ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { GLctx . uniformMatrix4fv ( GL . uniforms [ location ] , ! ! transpose , HEAPF32 , value > > 2 , count * 16 ) ; return } if ( 16 * count < = GL . MINI_TEMP_BUFFER_SIZE ) { var view = GL . miniTempBufferViews [ 16 * count - 1 ] ; for ( var i = 0 ; i < 16 * count ; i + = 16 ) { view [ i ] = HEAPF32 [ value + 4 * i > > 2 ] ; view [ i + 1 ] = HEAPF32 [ value + ( 4 * i + 4 ) > > 2 ] ; view [ i + 2 ] = HEAPF32 [ value + ( 4 * i + 8 ) > > 2 ] ; view [ i + 3 ] = HEAPF32 [ value + ( 4 * i + 12 ) > > 2 ] ; view [ i + 4 ] = HEAPF32 [ value + ( 4 * i + 16 ) > > 2 ] ; view [ i + 5 ] = HEAPF32 [ value + ( 4 * i + 20 ) > > 2 ] ; view [ i + 6 ] = HEAPF32 [ value + ( 4 * i + 24 ) > > 2 ] ; view [ i + 7 ] = HEAPF32 [ value + ( 4 * i + 28 ) > > 2 ] ; view [ i + 8 ] = HEAPF32 [ value + ( 4 * i + 32 ) > > 2 ] ; view [ i + 9 ] = HEAPF32 [ value + ( 4 * i + 36 ) > > 2 ] ; view [ i + 10 ] = HEAPF32 [ value + ( 4 * i + 40 ) > > 2 ] ; view [ i + 11 ] = HEAPF32 [ value + ( 4 * i + 44 ) > > 2 ] ; view [ i + 12 ] = HEAPF32 [ value + ( 4 * i + 48 ) > > 2 ] ; view [ i + 13 ] = HEAPF32 [ value + ( 4 * i + 52 ) > > 2 ] ; view [ i + 14 ] = HEAPF32 [ value + ( 4 * i + 56 ) > > 2 ] ; view [ i + 15 ] = HEAPF32 [ value + ( 4 * i + 60 ) > > 2 ] } } else { var view = HEAPF32 . subarray ( value > > 2 , value + count * 64 > > 2 ) } GLctx . uniformMatrix4fv ( GL . uniforms [ location ] , ! ! transpose , view ) } function _emscripten_glUniformMatrix4x2fv ( location , count , transpose , value ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { GLctx . uniformMatrix4x2fv ( GL . uniforms [ location ] , ! ! transpose , HEAPF32 , value > > 2 , count * 8 ) } else { GLctx . uniformMatrix4x2fv ( GL . uniforms [ location ] , ! ! transpose , HEAPF32 . subarray ( value > > 2 , value + count * 32 > > 2 ) ) } } function _emscripten_glUniformMatrix4x3fv ( location , count , transpose , value ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { GLctx . uniformMatrix4x3fv ( GL . uniforms [ location ] , ! ! transpose , HEAPF32 , value > > 2 , count * 12 ) } else { GLctx . uniformMatrix4x3fv ( GL . uniforms [ location ] , ! ! transpose , HEAPF32 . subarray ( value > > 2 , value + count * 48 > > 2 ) ) } } function _emscripten_glUnmapBuffer ( ) { err ( " missing function : emscripten_glUnmapBuffer " ) ; abort ( - 1 ) } function _emscripten_glUseProgram ( program ) { GLctx . useProgram ( GL . programs [ program ] ) } function _emscripten_glValidateProgram ( program ) { GLctx . validateProgram ( GL . programs [ program ] ) } function _emscripten_glVertexAttrib1f ( x0 , x1 ) { GLctx [ " vertexAttrib1f " ] ( x0 , x1 ) } function _emscripten_glVertexAttrib1fv ( index , v ) { GLctx . vertexAttrib1f ( index , HEAPF32 [ v > > 2 ] ) } function _emscripten_glVertexAttrib2f ( x0 , x1 , x2 ) { GLctx [ " vertexAttrib2f " ] ( x0 , x1 , x2 ) } function _emscripten_glVertexAttrib2fv ( index , v ) { GLctx . vertexAttrib2f ( index , HEAPF32 [ v > > 2 ] , HEAPF32 [ v + 4 > > 2 ] ) } function _emscripten_glVertexAttrib3f ( x0 , x1 , x2 , x3 ) { GLctx [ " vertexAttrib3f " ] ( x0 , x1 , x2 , x3 ) } function _emscripten_glVertexAttrib3fv ( index , v ) { GLctx . vertexAttrib3f ( index , HEAPF32 [ v > > 2 ] , HEAPF32 [ v + 4 > > 2 ] , HEAPF32 [ v + 8 > > 2 ] ) } function _emscripten_glVertexAttrib4f ( x0 , x1 , x2 , x3 , x4 ) { GLctx [ " vertexAttrib4f " ] ( x0 , x1 , x2 , x3 , x4 ) } function _emscripten_glVertexAttrib4fv ( index , v ) { GLctx . vertexAttrib4f ( index , HEAPF32 [ v > > 2 ] , HEAPF32 [ v + 4 > > 2 ] , HEAPF32 [ v + 8 > > 2 ] , HEAPF32 [ v + 12 > > 2 ] ) } function _emscripten_glVertexAttribDivisor ( index , divisor ) { GLctx [ " vertexAttribDivisor " ] ( index , divisor ) } function _emscripten_glVertexAttribDivisorANGLE ( index , divisor ) { GLctx [ " vertexAttribDivisor " ] ( index , divisor ) } function _emscripten_glVertexAttribDivisorARB ( index , divisor ) { GLctx [ " vertexAttribDivisor " ] ( index , divisor ) } function _emscripten_glVertexAttribDivisorEXT ( index , divisor ) { GLctx [ " vertexAttribDivisor " ] ( index , divisor ) } function _emscripten_glVertexAttribDivisorNV ( index , divisor ) { GLctx [ " vertexAttribDivisor " ] ( index , divisor ) } function _emscripten_glVertexAttribI4i ( x0 , x1 , x2 , x3 , x4 ) { GLctx [ " vertexAttribI4i " ] ( x0 , x1 , x2 , x3 , x4 ) } function _emscripten_glVertexAttribI4iv ( index , v ) { GLctx . vertexAttribI4i ( index , HEAP32 [ v > > 2 ] , HEAP32 [ v + 4 > > 2 ] , HEAP32 [ v + 8 > > 2 ] , HEAP32 [ v + 12 > > 2 ] ) } function _emscripten_glVertexAttribI4ui ( x0 , x1 , x2 , x3 , x4 ) { GLctx [ " vertexAttribI4ui " ] ( x0 , x1 , x2 , x3 , x4 ) } function _emscripten_glVertexAttribI4uiv ( index , v ) { GLctx . vertexAttribI4ui ( index , HEAPU32 [ v > > 2 ] , HEAPU32 [ v + 4 > > 2 ] , HEAPU32 [ v + 8 > > 2 ] , HEAPU32 [ v + 12 > > 2 ] ) } function _emscripten_glVertexAttribIPointer ( index , size , type , stride , ptr ) { GLctx [ " vertexAttribIPointer " ] ( index , size , type , stride , ptr ) } function _emscripten_glVertexAttribPointer ( index , size , type , normalized , stride , ptr ) { GLctx . vertexAttribPointer ( index , size , type , ! ! normalized , stride , ptr ) } function _emscripten_glViewport ( x0 , x1 , x2 , x3 ) { GLctx [ " viewport " ] ( x0 , x1 , x2 , x3 ) } function _emscripten_glWaitSync ( sync , flags , timeoutLo , timeoutHi ) { timeoutLo = timeoutLo > > > 0 ; timeoutHi = timeoutHi > > > 0 ; var timeout = timeoutLo = = 4294967295 & & timeoutHi = = 4294967295 ? - 1 : makeBigInt ( timeoutLo , timeoutHi , true ) ; GLctx . waitSync ( GL . syncs [ sync ] , flags , timeout ) } function abortOnCannotGrowMemory ( requestedSize ) { abort ( " OOM " ) } function emscripten_realloc_buffer ( size ) { var PAGE_MULTIPLE = 65536 ; size = alignUp ( size , PAGE_MULTIPLE ) ; var old = Module [ " buffer " ] ; var oldSize = old . byteLength ; try { var result = wasmMemory . grow ( ( size - oldSize ) / 65536 ) ; if ( result ! = = ( - 1 | 0 ) ) { return Module [ " buffer " ] = wasmMemory . buffer } else { return null } } catch ( e ) { return null } } function _emscripten_resize_heap ( requestedSize ) { var oldSize = _emscripten_get_heap_size ( ) ; var PAGE_MULTIPLE = 65536 ; var LIMIT = 2147483648 - PAGE_MULTIPLE ; if ( requestedSize > LIMIT ) { return false } var MIN_TOTAL_MEMORY = 16777216 ; var newSize = Math . max ( oldSize , MIN_TOTAL_MEMORY ) ; while ( newSize < requestedSize ) { if ( newSize < = 536870912 ) { newSize = alignUp ( 2 * newSize , PAGE_MULTIPLE ) } else { newSize = Math . min ( alignUp ( ( 3 * newSize + 2147483648 ) / 4 , PAGE_MULTIPLE ) , LIMIT ) } } var replacement = emscripten_realloc_buffer ( newSize ) ; if ( ! replacement | | replacement . byteLength ! = newSize ) { return false } updateGlobalBuffer ( replacement ) ; updateGlobalBufferViews ( ) ; TOTAL_MEMORY = newSize ; HEAPU32 [ DYNAMICTOP_PTR > > 2 ] = requestedSize ; return true } function _glActiveTexture ( x0 ) { GLctx [ " activeTexture " ] ( x0 ) } function _glAttachShader ( program , shader ) { GLctx . attachShader ( GL . programs [ program ] , GL . shaders [ shader ] ) } function _glBindBuffer ( target , buffer ) { if ( target = = 35051 ) { GLctx . currentPixelPackBufferBinding = buffer } else if ( target = = 35052 ) { GLctx . currentPixelUnpackBufferBinding = buffer } GLctx . bindBuffer ( target , GL . buffers [ buffer ] ) } function _glBindBufferRange ( target , index , buffer , offset , ptrsize ) { GLctx [ " bindBufferRange " ] ( target , index , GL . buffers [ buffer ] , offset , ptrsize ) } function _glBindFramebuffer ( target , framebuffer ) { GLctx . bindFramebuffer ( target , GL . framebuffers [ framebuffer ] ) } function _glBindRenderbuffer ( target , renderbuffer ) { GLctx . bindRenderbuffer ( target , GL . renderbuffers [ renderbuffer ] ) } function _glBindSampler ( unit , sampler ) { GLctx [ " bindSampler " ] ( unit , GL . samplers [ sampler ] ) } function _glBindTexture ( target , texture ) { GLctx . bindTexture ( target , GL . textures [ texture ] ) } function _glBindVertexArray ( vao ) { GLctx [ " bindVertexArray " ] ( GL . vaos [ vao ] ) } function _glBlendEquationSeparate ( x0 , x1 ) { GLctx [ " blendEquationSeparate " ] ( x0 , x1 ) } function _glBlendFuncSeparate ( x0 , x1 , x2 , x3 ) { GLctx [ " blendFuncSeparate " ] ( x0 , x1 , x2 , x3 ) } function _glBlitFramebuffer ( x0 , x1 , x2 , x3 , x4 , x5 , x6 , x7 , x8 , x9 ) { GLctx [ " blitFramebuffer " ] ( x0 , x1 , x2 , x3 , x4 , x5 , x6 , x7 , x8 , x9 ) } function _glBufferData ( target , size , data , usage ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { if ( data ) { GLctx . bufferData ( target , HEAPU8 , usage , data , size ) } else { GLctx . bufferData ( target , size , usage ) } } else { GLctx . bufferData ( target , data ? HEAPU8 . subarray ( data , data + size ) : size , usage ) } } function _glBufferSubData ( target , offset , size , data ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { GLctx . bufferSubData ( target , offset , HEAPU8 , data , size ) ; return } GLctx . bufferSubData ( target , offset , HEAPU8 . subarray ( data , data + size ) ) } function _glClear ( x0 ) { GLctx [ " clear " ] ( x0 ) } function _glClearColor ( x0 , x1 , x2 , x3 ) { GLctx [ " clearColor " ] ( x0 , x1 , x2 , x3 ) } function _glClearDepthf ( x0 ) { GLctx [ " clearDepth " ] ( x0 ) } function _glClearStencil ( x0 ) { GLctx [ " clearStencil " ] ( x0 ) } function _glColorMask ( red , green , blue , alpha ) { GLctx . colorMask ( ! ! red , ! ! green , ! ! blue , ! ! alpha ) } function _glCompileShader ( shader ) { GLctx . compileShader ( GL . shaders [ shader ] ) } function _glCompressedTexSubImage2D ( target , level , xoffset , yoffset , width , height , format , imageSize , data ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { if ( GLctx . currentPixelUnpackBufferBinding ) { GLctx [ " compressedTexSubImage2D " ] ( target , level , xoffset , yoffset , width , height , format , imageSize , data ) } else { GLctx [ " compressedTexSubImage2D " ] ( target , level , xoffset , yoffset , width , height , format , HEAPU8 , data , imageSize ) } return } GLctx [ " compressedTexSubImage2D " ] ( target , level , xoffset , yoffset , width , height , format , data ? HEAPU8 . subarray ( data , data + imageSize ) : null ) } function _glCompressedTexSubImage3D ( target , level , xoffset , yoffset , zoffset , width , height , depth , format , imageSize , data ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { if ( GLctx . currentPixelUnpackBufferBinding ) { GLctx [ " compressedTexSubImage3D " ] ( target , level , xoffset , yoffset , zoffset , width , height , depth , format , imageSize , data ) } else { GLctx [ " compressedTexSubImage3D " ] ( target , level , xoffset , yoffset , zoffset , width , height , depth , format , HEAPU8 , data , imageSize ) } } else { GLctx [ " compressedTexSubImage3D " ] ( target , level , xoffset , yoffset , zoffset , width , height , depth , format , data ? HEAPU8 . subarray ( data , data + imageSize ) : null ) } } function _glCreateProgram ( ) { var id = GL . getNewId ( GL . programs ) ; var program = GLctx . createProgram ( ) ; program . name = id ; GL . programs [ id ] = program ; return id } function _glCreateShader ( shaderType ) { var id = GL . getNewId ( GL . shaders ) ; GL . shaders [ id ] = GLctx . createShader ( shaderType ) ; return id } function _glCullFace ( x0 ) { GLctx [ " cullFace " ] ( x0 ) } function _glDeleteBuffers ( n , buffers ) { for ( var i = 0 ; i < n ; i + + ) { var id = HEAP32 [ buffers + i * 4 > > 2 ] ; var buffer = GL . buffers [ id ] ; if ( ! buffer ) continue ; GLctx . deleteBuffer ( buffer ) ; buffer . name = 0 ; GL . buffers [ id ] = null ; if ( id = = GL . currArrayBuffer ) GL . currArrayBuffer = 0 ; if ( id = = GL . currElementArrayBuffer ) GL . currElementArrayBuffer = 0 ; if ( id = = GLctx . currentPixelPackBufferBinding ) GLctx . currentPixelPackBufferBinding = 0 ; if ( id = = GLctx . currentPixelUnpackBufferBinding ) GLctx . currentPixelUnpackBufferBinding = 0 } } function _glDeleteFramebuffers ( n , framebuffers ) { for ( var i = 0 ; i < n ; + + i ) { var id = HEAP32 [ framebuffers + i * 4 > > 2 ] ; var framebuffer = GL . framebuffers [ id ] ; if ( ! framebuffer ) continue ; GLctx . deleteFramebuffer ( framebuffer ) ; framebuffer . name = 0 ; GL . framebuffers [ id ] = null } } function _glDeleteProgram ( id ) { if ( ! id ) return ; var program = GL . programs [ id ] ; if ( ! program ) { GL . recordError ( 1281 ) ; return } GLctx . deleteProgram ( program ) ; program . name = 0 ; GL . programs [ id ] = null ; GL . programInfos [ id ] = null } function _glDeleteRenderbuffers ( n , renderbuffers ) { for ( var i = 0 ; i < n ; i + + ) { var id = HEAP32 [ renderbuffers + i * 4 > > 2 ] ; var renderbuffer = GL . renderbuffers [ id ] ; if ( ! renderbuffer ) continue ; GLctx . deleteRenderbuffer ( renderbuffer ) ; renderbuffer . name = 0 ; GL . renderbuffers [ id ] = null } } function _glDeleteSamplers ( n , samplers ) { for ( var i = 0 ; i < n ; i + + ) { var id = HEAP32 [ samplers + i * 4 > > 2 ] ; var sampler = GL . samplers [ id ] ; if ( ! sampler ) continue ; GLctx [ " deleteSampler " ] ( sampler ) ; sampler . name = 0 ; GL . samplers [ id ] = null } } function _glDeleteShader ( id ) { if ( ! id ) return ; var shader = GL . shaders [ id ] ; if ( ! shader ) { GL . recordError ( 1281 ) ; return } GLctx . deleteShader ( shader ) ; GL . shaders [ id ] = null } function _glDeleteSync ( id ) { if ( ! id ) return ; var sync = GL . syncs [ id ] ; if ( ! sync ) { GL . recordError ( 1281 ) ; return } GLctx . deleteSync ( sync ) ; sync . name = 0 ; GL . syncs [ id ] = null } function _glDeleteTextures ( n , textures ) { for ( var i = 0 ; i < n ; i + + ) { var id = HEAP32 [ textures + i * 4 > > 2 ] ; var texture = GL . textures [ id ] ; if ( ! texture ) continue ; GLctx . deleteTexture ( texture ) ; texture . name = 0 ; GL . textures [ id ] = null } } function _glDeleteVertexArrays ( n , vaos ) { for ( var i = 0 ; i < n ; i + + ) { var id = HEAP32 [ vaos + i * 4 > > 2 ] ; GLctx [ " deleteVertexArray " ] ( GL . vaos [ id ] ) ; GL . vaos [ id ] = null } } function _glDepthFunc ( x0 ) { GLctx [ " depthFunc " ] ( x0 ) } function _glDepthMask ( flag ) { GLctx . depthMask ( ! ! flag ) } function _glDetachShader ( program , shader ) { GLctx . detachShader ( GL . programs [ program ] , GL . shaders [ shader ] ) } function _glDisable ( x0 ) { GLctx [ " disable " ] ( x0 ) } function _glDisableVertexAttribArray ( index ) { GLctx . disableVertexAttribArray ( index ) } function _glDrawArrays ( mode , first , count ) { GLctx . drawArrays ( mode , first , count ) } function _glDrawRangeElements ( mode , start , end , count , type , indices ) { _glDrawElements ( mode , count , type , indices ) } function _glEnable ( x0 ) { GLctx [ " enable " ] ( x0 ) } function _glEnableVertexAttribArray ( index ) { GLctx . enableVertexAttribArray ( index ) } function _glFenceSync ( condition , flags ) { var sync = GLctx . fenceSync ( condition , flags ) ; if ( sync ) { var id = GL . getNewId ( GL . syncs ) ; sync . name = id ; GL . syncs [ id ] = sync ; return id } else { return 0 } } function _glFlush ( ) { GLctx [ " flush " ] ( ) } function _glFramebufferRenderbuffer ( target , attachment , renderbuffertarget , renderbuffer ) { GLctx . framebufferRenderbuffer ( target , attachment , renderbuffertarget , GL . renderbuffers [ renderbuffer ] ) } function _glFramebufferTexture2D ( target , attachment , textarget , texture , level ) { GLctx . framebufferTexture2D ( target , attachment , textarget , GL . textures [ texture ] , level ) } function _glFramebufferTextureLayer ( target , attachment , texture , level , layer ) { GLctx . framebufferTextureLayer ( target , attachment , GL . textures [ texture ] , level , layer ) } function _glFrontFace ( x0 ) { GLctx [ " frontFace " ] ( x0 ) } function _glGenBuffers ( n , buffers ) { __glGenObject ( n , buffers , " createBuffer " , GL . buffers ) } function _glGenFramebuffers ( n , ids ) { __glGenObject ( n , ids , " createFramebuffer " , GL . framebuffers ) } function _glGenRenderbuffers ( n , renderbuffers ) { __glGenObject ( n , renderbuffers , " createRenderbuffer " , GL . renderbuffers ) } function _glGenSamplers ( n , samplers ) { __glGenObject ( n , samplers , " createSampler " , GL . samplers ) } function _glGenTextures ( n , textures ) { __glGenObject ( n , textures , " createTexture " , GL . textures ) } function _glGenVertexArrays ( n , arrays ) { __glGenObject ( n , arrays , " createVertexArray " , GL . vaos ) } function _glGenerateMipmap ( x0 ) { GLctx [ " generateMipmap " ] ( x0 ) } function _glGetError ( ) { if ( GL . lastError ) { var error = GL . lastError ; GL . lastError = 0 ; return error } else { return GLctx . getError ( ) } } function _glGetFloatv ( name_ , p ) { emscriptenWebGLGet ( name_ , p , " Float " ) } function _glGetIntegerv ( name_ , p ) { emscriptenWebGLGet ( name_ , p , " Integer " ) } function _glGetProgramInfoLog ( program , maxLength , length , infoLog ) { var log = GLctx . getProgramInfoLog ( GL . programs [ program ] ) ; if ( log = = = null ) log = " ( unknown error ) " ; if ( maxLength > 0 & & infoLog ) { var numBytesWrittenExclNull = stringToUTF8 ( log , infoLog , maxLength ) ; if ( length ) HEAP32 [ length > > 2 ] = numBytesWrittenExclNull } else { if ( length ) HEAP32 [ length > > 2 ] = 0 } } function _glGetProgramiv ( program , pname , p ) { if ( ! p ) { GL . recordError ( 1281 ) ; return } if ( program > = GL . counter ) { GL . recordError ( 1281 ) ; return } var ptable = GL . programInfos [ program ] ; if ( ! ptable ) { GL . recordError ( 1282 ) ; return } if ( pname = = 35716 ) { var log = GLctx . getProgramInfoLog ( GL . programs [ program ] ) ; if ( log = = = null ) log = " ( unknown error ) " ; HEAP32 [ p > > 2 ] = log . length + 1 } else if ( pname = = 35719 ) { HEAP32 [ p > > 2 ] = ptable . maxUniformLength } else if ( pname = = 35722 ) { if ( ptable . maxAttributeLength = = - 1 ) { program = GL . programs [ program ] ; var numAttribs = GLctx . getProgramParameter ( program , 35721 ) ; ptable . maxAttributeLength = 0 ; for ( var i = 0 ; i < numAttribs ; + + i ) { var activeAttrib = GLctx . getActiveAttrib ( program , i ) ; ptable . maxAttributeLength = Math . max ( ptable . maxAttributeLength , activeAttrib . name . length + 1 ) } } HEAP32 [ p > > 2 ] = ptable . maxAttributeLength } else if ( pname = = 35381 ) { if ( ptable . maxUniformBlockNameLength = = - 1 ) { program = GL . programs [ program ] ; var numBlocks = GLctx . getProgramParameter ( program , 35382 ) ; ptable . maxUniformBlockNameLength = 0 ; for ( var i = 0 ; i < numBlocks ; + + i ) { var activeBlockName = GLctx . getActiveUniformBlockName ( program , i ) ; ptable . maxUniformBlockNameLength = Math . max ( ptable . maxUniformBlockNameLength , activeBlockName . length + 1 ) } } HEAP32 [ p > > 2 ] = ptable . maxUniformBlockNameLength } else { HEAP32 [ p > > 2 ] = GLctx . getProgramParameter ( GL . programs [ program ] , pname ) } } function _glGetShaderInfoLog ( shader , maxLength , length , infoLog ) { var log = GLctx . getShaderInfoLog ( GL . shaders [ shader ] ) ; if ( log = = = null ) log = " ( unknown error ) " ; if ( maxLength > 0 & & infoLog ) { var numBytesWrittenExclNull = stringToUTF8 ( log , infoLog , maxLength ) ; if ( length ) HEAP32 [ length > > 2 ] = numBytesWrittenExclNull } else { if ( length ) HEAP32 [ length > > 2 ] = 0 } } function _glGetShaderiv ( shader , pname , p ) { if ( ! p ) { GL . recordError ( 1281 ) ; return } if ( pname = = 35716 ) { var log = GLctx . getShaderInfoLog ( GL . shaders [ shader ] ) ; if ( log = = = null ) log = " ( unknown error ) " ; HEAP32 [ p > > 2 ] = log . length + 1 } else if ( pname = = 35720 ) { var source = GLctx . getShaderSource ( GL . shaders [ shader ] ) ; var sourceLength = source = = = null | | source . length = = 0 ? 0 : source . length + 1 ; HEAP32 [ p > > 2 ] = sourceLength } else { HEAP32 [ p > > 2 ] = GLctx . getShaderParameter ( GL . shaders [ shader ] , pname ) } } function _glGetString ( name_ ) { if ( GL . stringCache [ name_ ] ) return GL . stringCache [ name_ ] ; var ret ; switch ( name_ ) { case 7939 : var exts = GLctx . getSupportedExtensions ( ) ; var gl_exts = [ ] ; for ( var i = 0 ; i < exts . length ; + + i ) { gl_exts . push ( exts [ i ] ) ; gl_exts . push ( " GL_ " + exts [ i ] ) } ret = stringToNewUTF8 ( gl_exts . join ( " " ) ) ; break ; case 7936 : case 7937 : case 37445 : case 37446 : var s = GLctx . getParameter ( name_ ) ; if ( ! s ) { GL . recordError ( 1280 ) } ret = stringToNewUTF8 ( s ) ; break ; case 7938 : var glVersion = GLctx . getParameter ( GLctx . VERSION ) ; if ( GL . currentContext . version > = 2 ) glVersion = " OpenGL ES 3 . 0 ( " + glVersion + " ) " ; else { glVersion = " OpenGL ES 2 . 0 ( " + glVersion + " ) " } ret = stringToNewUTF8 ( glVersion ) ; break ; case 35724 : var glslVersion = GLctx . getParameter ( GLctx . SHADING_LANGUAGE_VERSION ) ; var ver_re = / ^ WebGL GLSL ES ( [ 0 - 9 ] \ . [ 0 - 9 ] [ 0 - 9 ] ? ) ( ? : $ | . * ) / ; var ver_num = glslVersion . match ( ver_re ) ; if ( ver_num ! = = null ) { if ( ver_num [ 1 ] . length = = 3 ) ver_num [ 1 ] = ver_num [ 1 ] + " 0 " ; glslVersion = " OpenGL ES GLSL ES " + ver_num [ 1 ] + " ( " + glslVersion + " ) " } ret = stringToNewUTF8 ( glslVersion ) ; break ; default : GL . recordError ( 1280 ) ; return 0 } GL . stringCache [ name_ ] = ret ; return ret } function _glGetStringi ( name , index ) { if ( GL . currentContext . version < 2 ) { GL . recordError ( 1282 ) ; return 0 } var stringiCache = GL . stringiCache [ name ] ; if ( stringiCache ) { if ( index < 0 | | index > = stringiCache . length ) { GL . recordError ( 1281 ) ; return 0 } return stringiCache [ index ] } switch ( name ) { case 7939 : var exts = GLctx . getSupportedExtensions ( ) ; var gl_exts = [ ] ; for ( var i = 0 ; i < exts . length ; + + i ) { gl_exts . push ( stringToNewUTF8 ( exts [ i ] ) ) ; gl_exts . push ( stringToNewUTF8 ( " GL_ " + exts [ i ] ) ) } stringiCache = GL . stringiCache [ name ] = gl_exts ; if ( index < 0 | | index > = stringiCache . length ) { GL . recordError ( 1281 ) ; return 0 } return stringiCache [ index ] ; default : GL . recordError ( 1280 ) ; return 0 } } function _glGetUniformBlockIndex ( program , uniformBlockName ) { return GLctx [ " getUniformBlockIndex " ] ( GL . programs [ program ] , UTF8ToString ( uniformBlockName ) ) } function _glGetUniformLocation ( program , name ) { name = UTF8ToString ( name ) ; var arrayIndex = 0 ; if ( name [ name . length - 1 ] = = " ] " ) { var leftBrace = name . lastIndexOf ( " [ " ) ; arrayIndex = name [ leftBrace + 1 ] ! = " ] " ? parseInt ( name . slice ( leftBrace + 1 ) ) : 0 ; name = name . slice ( 0 , leftBrace ) } var uniformInfo = GL . programInfos [ program ] & & GL . programInfos [ program ] . uniforms [ name ] ; if ( uniformInfo & & arrayIndex > = 0 & & arrayIndex < uniformInfo [ 0 ] ) { return uniformInfo [ 1 ] + arrayIndex } else { return - 1 } } function _glGetVertexAttribiv ( index , pname , params ) { emscriptenWebGLGetVertexAttrib ( index , pname , params , " FloatToInteger " ) } function _glHint ( x0 , x1 ) { GLctx [ " hint " ] ( x0 , x1 ) } function _glInvalidateFramebuffer ( target , numAttachments , attachments ) { var list = __tempFixedLengthArray [ numAttachments ] ; for ( var i = 0 ; i < numAttachments ; i + + ) { list [ i ] = HEAP32 [ attachments + i * 4 > > 2 ] } GLctx [ " invalidateFramebuffer " ] ( target , list ) } function _glInvalidateSubFramebuffer ( target , numAttachments , attachments , x , y , width , height ) { var list = __tempFixedLengthArray [ numAttachments ] ; for ( var i = 0 ; i < numAttachments ; i + + ) { list [ i ] = HEAP32 [ attachments + i * 4 > > 2 ] } GLctx [ " invalidateSubFramebuffer " ] ( target , list , x , y , width , height ) } function _glIsEnabled ( x0 ) { return GLctx [ " isEnabled " ] ( x0 ) } function _glLinkProgram ( program ) { GLctx . linkProgram ( GL . programs [ program ] ) ; GL . populateUniformTable ( program ) } function _glPixelStorei ( pname , param ) { if ( pname = = 3317 ) { GL . unpackAlignment = param } GLctx . pixelStorei ( pname , param ) } function _glPolygonOffset ( x0 , x1 ) { GLctx [ " polygonOffset " ] ( x0 , x1 ) } function _glReadPixels ( x , y , width , height , format , type , pixels ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { if ( GLctx . currentPixelPackBufferBinding ) { GLctx . readPixels ( x , y , width , height , format , type , pixels ) } else { GLctx . readPixels ( x , y , width , height , format , type , __heapObjectForWebGLType ( type ) , pixels > > ( __heapAccessShiftForWebGLType [ type ] | 0 ) ) } return } var pixelData = emscriptenWebGLGetTexPixelData ( type , format , width , height , pixels , format ) ; if ( ! pixelData ) { GL . recordError ( 1280 ) ; return } GLctx . readPixels ( x , y , width , height , format , type , pixelData ) } function _glRenderbufferStorage ( x0 , x1 , x2 , x3 ) { GLctx [ " renderbufferStorage " ] ( x0 , x1 , x2 , x3 ) } function _glRenderbufferStorageMultisample ( x0 , x1 , x2 , x3 , x4 ) { GLctx [ " renderbufferStorageMultisample " ] ( x0 , x1 , x2 , x3 , x4 ) } function _glSamplerParameteri ( sampler , pname , param ) { GLctx [ " samplerParameteri " ] ( GL . samplers [ sampler ] , pname , param ) } function _glScissor ( x0 , x1 , x2 , x3 ) { GLctx [ " scissor " ] ( x0 , x1 , x2 , x3 ) } function _glShaderSource ( shader , count , string , length ) { var source = GL . getSource ( shader , count , string , length ) ; GLctx . shaderSource ( GL . shaders [ shader ] , source ) } function _glTexParameteri ( x0 , x1 , x2 ) { GLctx [ " texParameteri " ] ( x0 , x1 , x2 ) } function _glTexStorage2D ( x0 , x1 , x2 , x3 , x4 ) { GLctx [ " texStorage2D " ] ( x0 , x1 , x2 , x3 , x4 ) } function _glTexStorage2DMultisample ( ) { err ( " missing function : glTexStorage2DMultisample " ) ; abort ( - 1 ) } function _glTexStorage3D ( x0 , x1 , x2 , x3 , x4 , x5 ) { GLctx [ " texStorage3D " ] ( x0 , x1 , x2 , x3 , x4 , x5 ) } function _glTexSubImage2D ( target , level , xoffset , yoffset , width , height , format , type , pixels ) { if ( GL . currentContext . supportsWebGL2EntryPoints ) { if ( GLctx . currentPixelUnpackBufferBinding ) { GLctx . texSubImage2D ( target , level , xoffset , yoffset , width , height , format , type , pixels ) } else if ( pixels ! = 0 ) { GLctx . texSubImage2D ( target , level , xoffset , yoffset , width , height , format , type , __heapObjectForWebGLType ( type ) , pixels > > ( __heapAccessShiftForWebGLType [ type ] | 0 ) ) } else { GLctx . texSubImage2D ( target , level , xoffset , yoffset , width , height , format , type , null ) } return } var pixelData = null ; if ( pixels ) pixelData = emscriptenWebGLGetTexPixelData ( type , format , width , height , pixels , 0 ) ; GLctx . texSubImage2D ( target , level , xoffset , yoffset , width , height , format , type , pixelData ) } function _glTexSubImage3D ( target , level , xoffset , yoffset , zoffset , width , height , depth , format , type , pixels ) { if ( GLctx . currentPixelUnpackBufferBinding ) { GLctx [ " texSubImage3D " ] ( target , level , xoffset , yoffset , zoffset , width , height , depth , format , type , pixels ) } else if ( pixels ! = 0 ) { GLctx [ " texSubImage3D " ] ( target , level , xoffset , yoffset , zoffset , width , height , depth , format , type , __heapObjectForWebGLType ( type ) , pixels > > ( __heapAccessShiftForWebGLType [ type ] | 0 ) ) } else { GLctx [ " texSubImage3D " ] ( target , level , xoffset , yoffset , zoffset , width , height , depth , format , type , null ) } } function _glUniform1f ( location , v0 ) { GLctx . uniform1f ( GL . uniforms [ location ] , v0 ) } function _glUniform1i ( location , v0 ) { GLctx . uniform1i ( GL . uniforms [ location ] , v0 ) } function _glUniform4f ( location , v0 , v1 , v2 , v3 ) { GLctx . uniform4f ( GL . uniforms [ location ] , v0 , v1 , v2 , v3 ) } function _glUniformBlockBinding ( program , uniformBlockIndex , uniformBlockBinding ) { program = GL . programs [ program ] ; GLctx [ " uniformBlockBinding " ] ( program , uniformBlockIndex , uniformBlockBinding ) } function _glUseProgram ( program ) { GLctx . useProgram ( GL . programs [ program ] ) } function _glVertexAttribIPointer ( index , size , type , stride , ptr ) { GLctx [ " vertexAttribIPointer " ] ( index , size , type , stride , ptr ) } function _glVertexAttribPointer ( index , size , type , normalized , stride , ptr ) { GLctx . vertexAttribPointer ( index , size , type , ! ! normalized , stride , ptr ) } function _glViewport ( x0 , x1 , x2 , x3 ) { GLctx [ " viewport " ] ( x0 , x1 , x2 , x3 ) } function _glWaitSync ( sync , flags , timeoutLo , timeoutHi ) { timeoutLo = timeoutLo > > > 0 ; timeoutHi = timeoutHi > > > 0 ; var timeout = timeoutLo = = 4294967295 & & timeoutHi = = 4294967295 ? - 1 : makeBigInt ( timeoutLo , timeoutHi , true ) ; GLctx . waitSync ( GL . syncs [ sync ] , flags , timeout ) } function _llvm_exp2_f32 ( x ) { return Math . pow ( 2 , x ) } function _llvm_exp2_f64 ( a0 ) { return _llvm_exp2_f32 ( a0 ) } function _llvm_log2_f32 ( x ) { return Math . log ( x ) / Math . LN2 } function _llvm_trap ( ) { abort ( " trap ! " ) } function _emscripten_memcpy_big ( dest , src , num ) { HEAPU8 . set ( HEAPU8 . subarray ( src , src + num ) , dest ) } function _pthread_cond_destroy ( ) { return 0 } function _pthread_cond_signal ( ) { return 0 } function _pthread_cond_timedwait ( ) { return 0 } function _pthread_cond_wait ( ) { return 0 } function _pthread_create ( ) { return 11 } function _pthread_join ( ) { } function _sysconf ( name ) { switch ( name ) { case 30 : return PAGE_SIZE ; case 85 : var maxHeapSize = 2 * 1024 * 1024 * 1024 - 65536 ; return maxHeapSize / PAGE_SIZE ; case 132 : case 133 : case 12 : case 137 : case 138 : case 15 : case 235 : case 16 : case 17 : case 18 : case 19 : case 20 : case 149 : case 13 : case 10 : case 236 : case 153 : case 9 : case 21 : case 22 : case 159 : case 154 : case 14 : case 77 : case 78 : case 139 : case 80 : case 81 : case 82 : case 68 : case 67 : case 164 : case 11 : case 29 : case 47 : case 48 : case 95 : case 52 : case 51 : case 46 : return 200809 ; case 79 : return 0 ; case 27 : case 246 : case 127 : case 128 : case 23 : case 24 : case 160 : case 161 : case 181 : case 182 : case 242 : case 183 : case 184 : case 243 : case 244 : case 245 : case 165 : case 178 : case 179 : case 49 : case 50 : case 168 : case 169 : case 175 : case 170 : case 171 : case 172 : case 97 : case 76 : case 32 : case 173 : case 35 : return - 1 ; case 176 : case 177 : case 7 : case 155 : case 8 : case 157 : case 125 : case 126 : case 92 : case 93 : case 129 : case 130 : case 131 : case 94 : case 91 : return 1 ; case 74 : case 60 : case 69 : case 70 : case 4 : return 1024 ; case 31 : case 42 : case 72 : return 32 ; case 87 : case 26 : case 33 : return 2147483647 ; case 34 : case 1 : return 47839 ; case 38 : case 36 : return 99 ; case 43 : case 37 : return 2048 ; case 0 : return 2097152 ; case 3 : return 65536 ; case 28 : return 32768 ; case 44 : return 32767 ; case 75 : return 16384 ; case 39 : return 1e3 ; case 89 : return 700 ; case 71 : return 256 ; case 40 : return 255 ; case 2 : return 100 ; case 180 : return 64 ; case 25 : return 20 ; case 5 : return 16 ; case 6 : return 6 ; case 73 : return 4 ; case 84 : { if ( typeof navigator = = = " object " ) return navigator [ " hardwareConcurrency " ] | | 1 ; return 1 } } ___setErrNo ( 22 ) ; return - 1 } FS . staticInit ( ) ; if ( ENVIRONMENT_IS_NODE ) { var fs = require ( " fs " ) ; var NODEJS_PATH = require ( " path " ) ; NODEFS . staticInit ( ) } InternalError = Module [ " InternalError " ] = extendError ( Error , " InternalError " ) ; embind_init_charCodes ( ) ; BindingError = Module [ " BindingError " ] = extendError ( Error , " BindingError " ) ; init_ClassHandle ( ) ; init_RegisteredPointer ( ) ; init_embind ( ) ; UnboundTypeError = Module [ " UnboundTypeError " ] = extendError ( Error , " UnboundTypeError " ) ; init_emval ( ) ; if ( ENVIRONMENT_IS_NODE ) { _emscripten_get_now = function _emscripten_get_now_actual ( ) { var t = process [ " hrtime " ] ( ) ; return t [ 0 ] * 1e3 + t [ 1 ] / 1e6 } } else if ( typeof dateNow ! = = " undefined " ) { _emscripten_get_now = dateNow } else if ( typeof self = = = " object " & & self [ " performance " ] & & typeof self [ " performance " ] [ " now " ] = = = " function " ) { _emscripten_get_now = function ( ) { return self [ " performance " ] [ " now " ] ( ) } } else if ( typeof performance = = = " object " & & typeof performance [ " now " ] = = = " function " ) { _emscripten_get_now = function ( ) { return performance [ " now " ] ( ) } } else { _emscripten_get_now = Date . now } var GLctx ; GL . init ( ) ; for ( var i = 0 ; i < 32 ; i + + ) __tempFixedLengthArray . push ( new Array ( i ) ) ; var ASSERTIONS = false ; function intArrayFromString ( stringy , dontAddNull , length ) { var len = length > 0 ? length : lengthBytesUTF8 ( stringy ) + 1 ; var u8array = new Array ( len ) ; var numBytesWritten = stringToUTF8Array ( stringy , u8array , 0 , u8array . length ) ; if ( dontAddNull ) u8array . length = numBytesWritten ; return u8array } var asmGlobalArg = { } ; var asmLibraryArg = { " abort " : abort , " setTempRet0 " : setTempRet0 , " getTempRet0 " : getTempRet0 , " ClassHandle " : ClassHandle , " ClassHandle_clone " : ClassHandle_clone , " ClassHandle_delete " : ClassHandle_delete , " ClassHandle_deleteLater " : ClassHandle_deleteLater , " ClassHandle_isAliasOf " : ClassHandle_isAliasOf , " ClassHandle_isDeleted " : ClassHandle_isDeleted , " RegisteredClass " : RegisteredClass , " RegisteredPointer " : RegisteredPointer , " RegisteredPointer_deleteObject " : RegisteredPointer_deleteObject , " RegisteredPointer_destructor " : RegisteredPointer_destructor , " RegisteredPointer_fromWireType " : RegisteredPointer_fromWireType , " RegisteredPointer_getPointee " : RegisteredPointer_getPointee , " __ZSt18uncaught_exceptionv " : __ZSt18uncaught_exceptionv , " ___atomic_compare_exchange_8 " : ___atomic_compare_exchange_8 , " ___atomic_fetch_sub_8 " : ___atomic_fetch_sub_8 , " ___cxa_begin_catch " : ___cxa_begin_catch , " ___cxa_find_matching_catch " : ___cxa_find_matching_catch , " ___cxa_free_exception " : ___cxa_free_exception , " ___cxa_pure_virtual " : ___cxa_pure_virtual , " ___gxx_personality_v0 " : ___gxx_personality_v0 , " ___resumeException " : ___resumeException , " ___setErrNo " : ___setErrNo , " ___syscall140 " : ___syscall140 , " ___syscall146 " : ___syscall146 , " ___syscall221 " : ___syscall221 , " ___syscall3 " : ___syscall3 , " ___syscall5 " : ___syscall5 , " ___syscall54 " : ___syscall54 , " ___syscall6 " : ___syscall6 , " __computeUnpackAlignedImageSize " : __computeUnpackAlignedImageSize , " __embind_finalize_value_array " : __embind_finalize_value_array , " __embind_finalize_value_object " : __embind_finalize_value_object , " __embind_register_bool " : __embind_register_bool , " __embind_register_class " : __embind_register_class , " __embind_register_class_class_function " : __embind_register_class_class_function , " __embind_register_class_constructor " : __embind_register_class_constructor , " __embind_register_class_function " : __embind_register_class_function , " __embind_register_class_property " : __embind_register_class_property , " __embind_register_emval " : __embind_register_emval , " __embind_register_enum " : __embind_register_enum , " __embind_register_enum_value " : __embind_register_enum_value , " __embind_register_float " : __embind_register_float , " __embind_register_function " : __embind_register_function , " __embind_register_integer " : __embind_register_integer , " __embind_register_memory_view " : __embind_register_memory_view , " __embind_register_std_string " : __embind_register_std_string , " __embind_register_std_wstring " : __embind_register_std_wstring , " __embind_register_value_array " : __embind_register_value_array , " __embind_register_value_array_element " : __embind_register_value_array_element , " __embind_register_value_object " : __embind_register_value_object , " __embind_register_value_object_field " : __embind_register_value_object_field , " __embind_register_void " : __embind_register_void , " __emval_as " : __emval_as , " __emval_decref " : __emval_decref , " __emval_get_property " : __emval_get_property , " __emval_incref " : __emval_incref , " __emval_new_cstring " : __emval_new_cstring , " __emval_register " : __emval_register , " __emval_run_destructors " : __emval_run_destructors , " __emval_take_value " : __emval_take_value , " __glGenObject " : __glGenObject , " __heapObjectForWebGLType " : __heapObjectForWebGLType , " _abort " : _abort , " _clock_gettime " : _clock_gettime , " _embind_repr " : _embind_repr , " _emscripten_get_heap_size " : _emscripten_get_heap_size , " _emscripten_get_now " : _emscripten_get_now , " _emscripten_get_now_is_monotonic " : _emscripten_get_now_is_monotonic , " _emscripten_glActiveTexture " : _emscripten_glActiveTexture , " _emscripten_glAttachShader " : _emscripten_glAttachShader , " _emscripten_glBeginQuery " : _emscripten_glBeginQuery , " _emscripten_glBeginQueryEXT " : _emscripten_glBeginQueryEXT , " _emscripten_glBeginTransformFeedback " : _emscripten_glBeginTransformFeedback , " _emscripten_glBindAttribLocation " : _emscripten_glBindAttribLocation , " _emscripten_glBindBuffer " : _emscripten_glBindBuffer , " _emscripten_glBindBufferBase " : _emscripten_glBindBufferBase , " _emscripten_glBindBufferRange " : _emscripten_glBindBufferRange , " _emscripten_glBindFramebuffer " : _emscripten_glBindFramebuffer , " _emscripten_glBindRenderbuffer " : _emscripten_glBindRenderbuffer , " _emscripten_glBindSampler " : _emscripten_glBindSampler , " _emscripten_glBindTexture " : _emscripten_glBindTexture , " _emscripten_glBindTransformFeedback " : _emscripten_glBindTransformFeedback , " _emscripten_glBindVertexArray " : _emscripten_glBindVertexArray , " _emscripten_glBindVertexArrayOES " : _emscripten_glBindVertexArrayOES , " _emscripten_glBlendColor " : _emscripten_glBlendColor , " _emscripten_glBlendEquation " : _emscripten_glBlendEquation , " _emscripten_glBlendEquationSeparate " : _emscripten_glBlendEquationSeparate , " _emscripten_glBlendFunc " : _emscripten_glBlendFunc , " _emscripten_glBlendFuncSeparate " : _emscripten_glBlendFuncSeparate , " _emscripten_glBlitFramebuffer " : _emscripten_glBlitFramebuffer , " _emscripten_glBufferData " : _emscripten_glBufferData , " _emscripten_glBufferSubData " : _emscripten_glBufferSubData , " _emscripten_glCheckFramebufferStatus " : _emscripten_glCheckFramebufferStatus , " _emscripten_glClear " : _emscripten_glClear , " _emscripten_glClearBufferfi " : _emscripten_glClearBufferfi , " _emscripten_glClearBufferfv " : _emscripten_glClearBufferfv , " _emscripten_glClearBufferiv " : _emscripten_glClearBufferiv , " _emscripten_glClearBufferuiv " : _emscripten_glClearBufferuiv , " _emscripten_glClearColor " : _emscripten_glClearColor , " _emscripten_glClearDepthf " : _emscripten_glClearDepthf , " _emscripten_glClearStencil " : _emscripten_glClearStencil , " _emscripten_glClientWaitSync " : _emscripten_glClientWaitSync , " _emscripten_glColorMask " : _emscripten_glColorMask , " _emscripten_glCompileShader " : _emscripten_glCompileShader , " _emscripten_glCompressedTexImage2D " : _emscripten_glCompressedTexImage2D , " _emscripten_glCompressedTexImage3D " : _emscripten_glCompressedTexImage3D , " _emscripten_glCompressedTexSubImage2D " : _emscripten_glCompressedTexSubImage2D , " _emscripten_glCompressedTexSubImage3D " : _emscripten_glCompressedTexSubImage3D , " _emscripten_glCopyBufferSubData " : _emscripten_glCopyBufferSubData , " _emscripten_glCopyTexImage2D " : _emscripten_glCopyTexImage2D , " _emscripten_glCopyTexSubImage2D " : _emscripten_glCopyTexSubImage2D , " _emscripten_glCopyTexSubImage3D " : _emscripten_glCopyTexSubImage3D , " _emscripten_glCreateProgram " : _emscripten_glCreateProgram , " _emscripten_glCreateShader " : _emscripten_glCreateShader , " _emscripten_glCullFace " : _emscripten_glCullFace , " _emscripten_glDeleteBuffers " : _emscripten_glDeleteBuffers , " _emscripten_glDeleteFramebuffers " : _emscripten_glDeleteFramebuffers , " _emscripten_glDeleteProgram " : _emscripten_glDeleteProgram , " _emscripten_glDeleteQueries " : _emscripten_glDeleteQueries , " _emscripten_glDeleteQueriesEXT " : _emscripten_glDeleteQueriesEXT , " _emscripten_glDeleteRenderbuffers " : _emscripten_glDeleteRenderbuffers , " _emscripten_glDeleteSamplers " : _emscripten_glDeleteSamplers , " _emscripten_glDeleteShader " : _emscripten_glDeleteShader , " _emscripten_glDeleteSync " : _emscripten_glDeleteSync , " _emscripten_glDeleteTextures " : _emscripten_glDeleteTextures , " _emscripten_glDeleteTransformFeedbacks " : _emscripten_glDeleteTransformFeedbacks , " _emscripten_glDeleteVertexArrays " : _emscripten_glDeleteVertexArrays , " _emscripten_glDeleteVertexArraysOES " : _emscripten_glDeleteVertexArraysOES , " _emscripten_glDepthFunc " : _emscripten_glDepthFunc , " _emscripten_glDepthMask " : _emscripten_glDepthMask , " _emscripten_glDepthRangef " : _emscripten_glDepthRangef , " _emscripten_glDetachShader " : _emscripten_glDetachShader , " _emscripten_glDisable " : _emscripten_glDisable , " _emscripten_glDisableVertexAttribArray " : _emscripten_glDisableVertexAttribArray , " _emscripten_glDrawArrays " : _emscripten_glDrawArrays , " _emscripten_glDrawArraysInstanced " : _emscripten_glDrawArraysInstanced , " _emscripten_glDrawArraysInstancedANGLE " : _emscripten_glDrawArraysInstancedANGLE , " _emscripten_glDrawArraysInstancedARB " : _emscripten_glDrawArraysInstancedARB , " _emscripten_glDrawArraysInstancedEXT " : _emscripten_glDrawArraysInstancedEXT , " _emscripten_glDrawArraysInstancedNV " : _emscripten_glDrawArraysInstancedNV , " _emscripten_glDrawBuffers " : _emscripten_glDrawBuffers , " _emscripten_glDrawBuffersEXT " : _emscripten_glDrawBuffersEXT , " _emscripten_glDrawBuffersWEBGL " : _emscripten_glDrawBuffersWEBGL , " _emscripten_glDrawElements " : _emscripten_glDrawElements , " _emscripten_glDrawElementsInstanced " : _emscripten_glDrawElementsInstanced , " _emscripten_glDrawElementsInstancedANGLE " : _emscripten_glDrawElementsInstancedANGLE , " _emscripten_glDrawElementsInstancedARB " : _emscripten_glDrawElementsInstancedARB , " _emscripten_glDrawElementsInstancedEXT " : _emscripten_glDrawElementsInstancedEXT , " _emscripten_glDrawElementsInstancedNV " : _emscripten_glDrawElementsInstancedNV , " _emscripten_glDrawRangeElements " : _emscripten_glDrawRangeElements , " _emscripten_glEnable " : _emscripten_glEnable , " _emscripten_glEnableVertexAttribArray " : _emscripten_glEnableVertexAttribArray , " _emscripten_glEndQuery " : _emscripten_glEndQuery , " _emscripten_glEndQueryEXT " : _emscripten_glEndQueryEXT , " _emscripten_glEndTransformFeedback " : _emscripten_glEndTransformFeedback , " _emscripten_glFenceSync " : _emscripten_glFenceSync , " _emscripten_glFinish " : _emscripten_glFinish , " _emscripten_glFlush " : _emscripten_glFlush , " _emscripten_glFlushMappedBufferRange " : _emscripten_glFlushMappedBufferRange , " _emscripten_glFramebufferRenderbuffer " : _emscripten_glFramebufferRenderbuffer , " _emscripten_glFramebufferTexture2D " : _emscripten_glFramebufferTexture2D , " _emscripten_glFramebufferTextureLayer " : _emscripten_glFramebufferTextureLayer , " _emscripten_glFrontFace " : _emscripten_glFrontFace , " _emscripten_glGenBuffers " : _emscripten_glGenBuffers , " _emscripten_glGenFramebuffers " : _emscripten_glGenFramebuffers , " _emscripten_glGenQueries " : _emscripten_glGenQueries , " _emscripten_glGenQueriesEXT " : _emscripten_glGenQueriesEXT , " _emscripten_glGenRenderbuffers " : _emscripten_glGenRenderbuffers , " _emscripten_glGenSamplers " : _emscripten_glGenSamplers , " _emscripten_glGenTextures " : _emscripten_glGenTextures , " _emscripten_glGenTransformFeedbacks " : _emscripten_glGenTransformFeedbacks , " _emscripten_glGenVertexArrays " : _emscripten_glGenVertexArrays , " _emscripten_glGenVertexArraysOES " : _emscripten_glGenVertexArraysOES , " _emscripten_glGenerateMipmap " : _emscripten_glGenerateMipmap , " _emscripten_glGetActiveAttrib " : _emscripten_glGetActiveAttrib , " _emscripten_glGetActiveUniform " : _emscripten_glGetActiveUniform , " _emscripten_glGetActiveUniformBlockName " : _emscripten_glGetActiveUniformBlockName , " _emscripten_glGetActiveUniformBlockiv " : _emscripten_glGetActiveUniformBlockiv , " _emscripten_glGetActiveUniformsiv " : _emscripten_glGetActiveUniformsiv , " _emscripten_glGetAttachedShaders " : _emscripten_glGetAttachedShaders , " _emscripten_glGetAttribLocation " : _emscripten_glGetAttribLocation , " _emscripten_glGetBooleanv " : _emscripten_glGetBooleanv , " _emscripten_glGetBufferParameteri64v " : _emscripten_glGetBufferParameteri64v , " _emscripten_glGetBufferParameteriv " : _emscripten_glGetBufferParameteriv , " _emscripten_glGetBufferPointerv " : _emscripten_glGetBufferPointerv , " _emscripten_glGetError " : _emscripten_glGetError , " _emscripten_glGetFloatv " : _emscripten_glGetFloatv , " _emscripten_glGetFragDataLocation " : _emscripten_glGetFragDataLocation , " _emscripten_glGetFramebufferAttachmentParameteriv " : _emscripten_glGetFramebufferAttachmentParameteriv , " _emscripten_glGetInteger64i_v " : _emscripten_glGetInteger64i_v , " _emscripten_glGetInteger64v " : _emscripten_glGetInteger64v , " _emscripten_glGetIntegeri_v " : _emscripten_glGetIntegeri_v , " _emscripten_glGetIntegerv " : _emscripten_glGetIntegerv , " _emscripten_glGetInternalformativ " : _emscripten_glGetInternalformativ , " _emscripten_glGetProgramBinary " : _emscripten_glGetProgramBinary , " _emscripten_glGetProgramInfoLog " : _emscripten_glGetProgramInfoLog , " _emscripten_glGetProgramiv " : _emscripten_glGetProgramiv , " _emscripten_glGetQueryObjecti64vEXT " : _emscripten_glGetQueryObjecti64vEXT , " _emscripten_glGetQueryObjectivEXT " : _emscripten_glGetQueryObjectivEXT , " _emscripten_glGetQueryObjectui64vEXT " : _emscripten_glGetQueryObjectui64vEXT , " _emscripten_glGetQueryObjectuiv " : _emscripten_glGetQueryObjectuiv , " _emscripten_glGetQueryObjectuivEXT " : _emscripten_glGetQueryObjectuivEXT , " _emscripten_glGetQueryiv " : _emscripten_glGetQueryiv , " _emscripten_glGetQueryivEXT " : _emscripten_glGetQueryivEXT , " _emscripten_glGetRenderbufferParameteriv " : _emscripten_glGetRenderbufferParameteriv , " _emscripten_glGetSamplerParameterfv " : _emscripten_glGetSamplerParameterfv , " _emscripten_glGetSamplerParameteriv " : _emscripten_glGetSamplerParameteriv , " _emscripten_glGetShaderInfoLog " : _emscripten_glGetShaderInfoLog , " _emscripten_glGetShaderPrecisionFormat " : _emscripten_glGetShaderPrecisionFormat , " _emscripten_glGetShaderSource " : _emscripten_glGetShaderSource , " _emscripten_glGetShaderiv " : _emscripten_glGetShaderiv , " _emscripten_glGetString " : _emscripten_glGetString , " _emscripten_glGetStringi " : _emscripten_glGetStringi , " _emscripten_glGetSynciv " : _emscripten_glGetSynciv , " _emscripten_glGetTexParameterfv " : _emscripten_glGetTexParameterfv , " _emscripten_glGetTexParameteriv " : _emscripten_glGetTexParameteriv , " _emscripten_glGetTransformFeedbackVarying " : _emscripten_glGetTransformFeedbackVarying , " _emscripten_glGetUniformBlockIndex " : _emscripten_glGetUniformBlockIndex , " _emscripten_glGetUniformIndices " : _emscripten_glGetUniformIndices , " _emscripten_glGetUniformLocation " : _emscripten_glGetUniformLocation , " _emscripten_glGetUniformfv " : _emscripten_glGetUniformfv , " _emscripten_glGetUniformiv " : _emscripten_glGetUniformiv , " _emscripten_glGetUniformuiv " : _emscripten_glGetUniformuiv , " _emscripten_glGetVertexAttribIiv " : _emscripten_glGetVertexAttribIiv , " _emscripten_glGetVertexAttribIuiv " : _emscripten_glGetVertexAttribIuiv , " _emscripten_glGetVertexAttribPointerv " : _emscripten_glGetVertexAttribPointerv , " _emscripten_glGetVertexAttribfv " : _emscripten_glGetVertexAttribfv , " _emscripten_glGetVertexAttribiv " : _emscripten_glGetVertexAttribiv , " _emscripten_glHint " : _emscripten_glHint , " _emscripten_glInvalidateFramebuffer " : _emscripten_glInvalidateFramebuffer , " _emscripten_glInvalidateSubFramebuffer " : _emscripten_glInvalidateSubFramebuffer , " _emscripten_glIsBuffer " : _emscripten_glIsBuffer , " _emscripten_glIsEnabled " : _emscripten_glIsEnabled , " _emscripten_glIsFramebuffer " : _emscripten_glIsFramebuffer , " _emscripten_glIsProgram " : _emscripten_glIsProgram , " _emscripten_glIsQuery " : _emscripten_glIsQuery , " _emscripten_glIsQueryEXT " : _emscripten_glIsQueryEXT , " _emscripten_glIsRenderbuffer " : _emscripten_glIsRenderbuffer , " _emscripten_glIsSampler " : _emscripten_glIsSampler , " _emscripten_glIsShader " : _emscripten_glIsShader , " _emscripten_glIsSync " : _emscripten_glIsSync , " _emscripten_glIsTexture " : _emscripten_glIsTexture , " _emscripten_glIsTransformFeedback " : _emscripten_glIsTransformFeedback , " _emscripten_glIsVertexArray " : _emscripten_glIsVertexArray , " _emscripten_glIsVertexArrayOES " : _emscripten_glIsVertexArrayOES , " _emscripten_glLineWidth " : _emscripten_glLineWidth , " _emscripten_glLinkProgram " : _emscripten_glLinkProgram , " _emscripten_glMapBufferRange " : _emscripten_glMapBufferRange , " _emscripten_glPauseTransformFeedback " : _emscripten_glPauseTransformFeedback , " _emscripten_glPixelStorei " : _emscripten_glPixelStorei , " _emscripten_glPolygonOffset " : _emscripten_glPolygonOffset , " _emscripten_glProgramBinary " : _emscripten_glProgramBinary , " _emscripten_glProgramParameteri " : _emscripten_glProgramParameteri , " _emscripten_glQueryCounterEXT " : _emscripten_glQueryCounterEXT , " _emscripten_glReadBuffer " : _emscripten_glReadBuffer , " _emscripten_glReadPixels " : _emscripten_glReadPixels , " _emscripten_glReleaseShaderCompiler " : _emscripten_glReleaseShaderCompiler , " _emscripten_glRenderbufferStorage " : _emscripten_glRenderbufferStorage , " _emscripten_glRenderbufferStorageMultisample " : _emscripten_glRenderbufferStorageMultisample , " _emscripten_glResumeTransformFeedback " : _emscripten_glResumeTransformFeedback , " _emscripten_glSampleCoverage " : _emscripten_glSampleCoverage , " _emscripten_glSamplerParameterf " : _emscripten_glSamplerParameterf , " _emscripten_glSamplerParameterfv " : _emscripten_glSamplerParameterfv , " _emscripten_glSamplerParameteri " : _emscripten_glSamplerParameteri , " _emscripten_glSamplerParameteriv " : _emscripten_glSamplerParameteriv , " _emscripten_glScissor " : _emscripten_glScissor , " _emscripten_glShaderBinary " : _emscripten_glShaderBinary , " _emscripten_glShaderSource " : _emscripten_glShaderSource , " _emscripten_glStencilFunc " : _emscripten_glStencilFunc , " _emscripten_glStencilFuncSeparate " : _emscripten_glStencilFuncSeparate , " _emscripten_glStencilMask " : _emscripten_glStencilMask , " _emscripten_glStencilMaskSeparate " : _emscripten_glStencilMaskSeparate , " _emscripten_glStencilOp " : _emscripten_glStencilOp , " _emscripten_glStencilOpSeparate " : _emscripten_glStencilOpSeparate , " _emscripten_glTexImage2D " : _emscripten_glTexImage2D , " _emscripten_glTexImage3D " : _emscripten_glTexImage3D , " _emscripten_glTexParameterf " : _emscripten_glTexParameterf , " _emscripten_glTexParameterfv " : _emscripten_glTexParameterfv , " _emscripten_glTexParameteri " : _emscripten_glTexParameteri , " _emscripten_glTexParameteriv " : _emscripten_glTexParameteriv , " _emscripten_glTexStorage2D " : _emscripten_glTexStorage2D , " _emscripten_glTexStorage3D " : _emscripten_glTexStorage3D , " _emscripten_glTexSubImage2D " : _emscripten_glTexSubImage2D , " _emscripten_glTexSubImage3D " : _emscripten_glTexSubImage3D , " _emscripten_glTransformFeedbackVaryings " : _emscripten_glTransformFeedbackVaryings , " _emscripten_glUniform1f " : _emscripten_glUniform1f , " _emscripten_glUniform1fv " : _emscripten_glUniform1fv , " _emscripten_glUniform1i " : _emscripten_glUniform1i , " _emscripten_glUniform1iv " : _emscripten_glUniform1iv , " _emscripten_glUniform1ui " : _emscripten_glUniform1ui , " _emscripten_glUniform1uiv " : _emscripten_glUniform1uiv , " _emscripten_glUniform2f " : _emscripten_glUniform2f , " _emscripten_glUniform2fv " : _emscripten_glUniform2fv , " _emscripten_glUniform2i " : _emscripten_glUniform2i , " _emscripten_glUniform2iv " : _emscripten_glUniform2iv , " _emscripten_glUniform2ui " : _emscripten_glUniform2ui , " _emscripten_glUniform2uiv " : _emscripten_glUniform2uiv , " _emscripten_glUniform3f " : _emscripten_glUniform3f , " _emscripten_glUniform3fv " : _emscripten_glUniform3fv , " _emscripten_glUniform3i " : _emscripten_glUniform3i , " _emscripten_glUniform3iv " : _emscripten_glUniform3iv , " _emscripten_glUniform3ui " : _emscripten_glUniform3ui , " _emscripten_glUniform3uiv " : _emscripten_glUniform3uiv , " _emscripten_glUniform4f " : _emscripten_glUniform4f , " _emscripten_glUniform4fv " : _emscripten_glUniform4fv , " _emscripten_glUniform4i " : _emscripten_glUniform4i , " _emscripten_glUniform4iv " : _emscripten_glUniform4iv , " _emscripten_glUniform4ui " : _emscripten_glUniform4ui , " _emscripten_glUniform4uiv " : _emscripten_glUniform4uiv , " _emscripten_glUniformBlockBinding " : _emscripten_glUniformBlockBinding , " _emscripten_glUniformMatrix2fv " : _emscripten_glUniformMatrix2fv , " _emscripten_glUniformMatrix2x3fv " : _emscripten_glUniformMatrix2x3fv , " _emscripten_glUniformMatrix2x4fv " : _emscripten_glUniformMatrix2x4fv , " _emscripten_glUniformMatrix3fv " : _emscripten_glUniformMatrix3fv , " _emscripten_glUniformMatrix3x2fv " : _emscripten_glUniformMatrix3x2fv , " _emscripten_glUniformMatrix3x4fv " : _emscripten_glUniformMatrix3x4fv , " _emscripten_glUniformMatrix4fv " : _emscripten_glUniformMatrix4fv , " _emscripten_glUniformMatrix4x2fv " : _emscripten_glUniformMatrix4x2fv , " _emscripten_glUniformMatrix4x3fv " : _emscripten_glUniformMatrix4x3fv , " _emscripten_glUnmapBuffer " : _emscripten_glUnmapBuffer , " _emscripten_glUseProgram " : _emscripten_glUseProgram , " _emscripten_glValidateProgram " : _emscripten_glValidateProgram , " _emscripten_glVertexAttrib1f " : _emscripten_glVertexAttrib1f , " _emscripten_glVertexAttrib1fv " : _emscripten_glVertexAttrib1fv , " _emscripten_glVertexAttrib2f " : _emscripten_glVertexAttrib2f , " _emscripten_glVertexAttrib2fv " : _emscripten_glVertexAttrib2fv , " _emscripten_glVertexAttrib3f " : _emscripten_glVertexAttrib3f , " _emscripten_glVertexAttrib3fv " : _emscripten_glVertexAttrib3fv , " _emscripten_glVertexAttrib4f " : _emscripten_glVertexAttrib4f , " _emscripten_glVertexAttrib4fv " : _emscripten_glVertexAttrib4fv , " _emscripten_glVertexAttribDivisor " : _emscripten_glVertexAttribDivisor , " _emscripten_glVertexAttribDivisorANGLE " : _emscripten_glVertexAttribDivisorANGLE , " _emscripten_glVertexAttribDivisorARB " : _emscripten_glVertexAttribDivisorARB , " _emscripten_glVertexAttribDivisorEXT " : _emscripten_glVertexAttribDivisorEXT , " _emscripten_glVertexAttribDivisorNV " : _emscripten_glVertexAttribDivisorNV , " _emscripten_glVertexAttribI4i " : _emscripten_glVertexAttribI4i , " _emscripten_glVertexAttribI4iv " : _emscripten_glVertexAttribI4iv , " _emscripten_glVertexAttribI4ui " : _emscripten_glVertexAttribI4ui , " _emscripten_glVertexAttribI4uiv " : _emscripten_glVertexAttribI4uiv , " _emscripten_glVertexAttribIPointer " : _emscripten_glVertexAttribIPointer , " _emscripten_glVertexAttribPointer " : _emscripten_glVertexAttribPointer , " _emscripten_glViewport " : _emscripten_glViewport , " _emscripten_glWaitSync " : _emscripten_glWaitSync , " _emscripten_memcpy_big " : _emscripten_memcpy_big , " _emscripten_resize_heap " : _emscripten_resize_heap , " _glActiveTexture " : _glActiveTexture , " _glAttachShader " : _glAttachShader , " _glBindBuffer " : _glBindBuffer , " _glBindBufferRange " : _glBindBufferRange , " _glBindFramebuffer " : _glBindFramebuffer , " _glBindRenderbuffer " : _glBindRenderbuffer , " _glBindSampler " : _glBindSampler , " _glBindTexture " : _glBindTexture , " _glBindVertexArray " : _glBindVertexArray , " _glBlendEquationSeparate " : _glBlendEquationSeparate , " _glBlendFuncSeparate " : _glBlendFuncSeparate , " _glBlitFramebuffer " : _glBlitFramebuffer , " _glBufferData " : _glBufferData , " _glBufferSubData " : _glBufferSubData , " _glClear " : _glClear , " _glClearColor " : _glClearColor , " _glClearDepthf " : _glClearDepthf , " _glClearStencil " : _glClearStencil , " _glColorMask " : _glColorMask , " _glCompileShader " : _glCompileShader , " _glCompressedTexSubImage2D " : _glCompressedTexSubImage2D , " _glCompressedTexSubImage3D " : _glCompressedTexSubImage3D , " _glCreateProgram " : _glCreateProgram , " _glCreateShader " : _glCreateShader , " _glCullFace " : _glCullFace , " _glDeleteBuffers " : _glDeleteBuffers , " _glDeleteFramebuffers " : _glDeleteFramebuffers , " _glDeleteProgram " : _glDeleteProgram , " _glDeleteRenderbuffers " : _glDeleteRenderbuffers , " _glDeleteSamplers " : _glDeleteSamplers , " _glDeleteShader " : _glDeleteShader , " _glDeleteSync " : _glDeleteSync , " _glDeleteTextures " : _glDeleteTextures , " _glDeleteVertexArrays " : _glDeleteVertexArrays , " _glDepthFunc " : _glDepthFunc , " _glDepthMask " : _glDepthMask , " _glDetachShader " : _glDetachShader , " _glDisable " : _glDisable , " _glDisableVertexAttribArray " : _glDisableVertexAttribArray , " _glDrawArrays " : _glDrawArrays , " _glDrawElements " : _glDrawElements , " _glDrawRangeElements " : _glDrawRangeElements , " _glEnable " : _glEnable , " _glEnableVertexAttribArray " : _glEnableVertexAttribArray , " _glFenceSync " : _glFenceSync , " _glFlush " : _glFlush , " _glFramebufferRenderbuffer " : _glFramebufferRenderbuffer , " _glFramebufferTexture2D " : _glFramebufferTexture2D , " _glFramebufferTextureLayer " : _glFramebufferTextureLayer , " _glFrontFace " : _glFrontFace , " _glGenBuffers " : _glGenBuffers , " _glGenFramebuffers " : _glGenFramebuffers , " _glGenRenderbuffers " : _glGenRenderbuffers , " _glGenSamplers " : _glGenSamplers , " _glGenTextures " : _glGenTextures , " _glGenVertexArrays " : _glGenVertexArrays , " _glGenerateMipmap " : _glGenerateMipmap , " _glGetError " : _glGetError , " _glGetFloatv " : _glGetFloatv , " _glGetIntegerv " : _glGetIntegerv , " _glGetProgramInfoLog " : _glGetProgramInfoLog , " _glGetProgramiv " : _glGetProgramiv , " _glGetShaderInfoLog " : _glGetShaderInfoLog , " _glGetShaderiv " : _glGetShaderiv , " _glGetString " : _glGetString , " _glGetStringi " : _glGetStringi , " _glGetUniformBlockIndex " : _glGetUniformBlockIndex , " _glGetUniformLocation " : _glGetUniformLocation , " _glGetVertexAttribiv " : _glGetVertexAttribiv , " _glHint " : _glHint , " _glInvalidateFramebuffer " : _glInvalidateFramebuffer , " _glInvalidateSubFramebuffer " : _glInvalidateSubFramebuffer , " _glIsEnabled " : _glIsEnabled , " _glLinkProgram " : _glLinkProgram , " _glPixelStorei " : _glPixelStorei , " _glPolygonOffset " : _glPolygonOffset , " _glReadPixels " : _glReadPixels , " _glRenderbufferStorage " : _glRenderbufferStorage , " _glRenderbufferStorageMultisample " : _glRenderbufferStorageMultisample , " _glSamplerParameteri " : _glSamplerParameteri , " _glScissor " : _glScissor , " _glShaderSource " : _glShaderSource , " _glTexParameteri " : _glTexParameteri , " _glTexStorage2D " : _glTexStorage2D , " _glTexStorage2DMultisample " : _glTexStorage2DMultisample , " _glTexStorage3D " : _glTexStorage3D , " _glTexSubImage2D " : _glTexSubImage2D , " _glTexSubImage3D " : _glTexSubImage3D , " _glUniform1f " : _glUniform1f , " _glUniform1i " : _glUniform1i , " _glUniform4f " : _glUniform4f , " _glUniformBlockBinding " : _glUniformBlockBinding , " _glUseProgram " : _glUseProgram , " _glVertexAttribIPointer " : _glVertexAttribIPointer , " _glVertexAttribPointer " : _glVertexAttribPointer , " _glViewport " : _glViewport , " _glWaitSync " : _glWaitSync , " _llvm_exp2_f32 " : _llvm_exp2_f32 , " _llvm_exp2_f64 " : _llvm_exp2_f64 , " _llvm_log2_f32 " : _llvm_log2_f32 , " _llvm_trap " : _llvm_trap , " _pthread_cond_destroy " : _pthread_cond_destroy , " _pthread_cond_signal " : _pthread_cond_signal , " _pthread_cond_timedwait " : _pthread_cond_timedwait , " _pthread_cond_wait " : _pthread_cond_wait , " _pthread_create " : _pthread_create , " _pthread_join " : _pthread_join , " _sysconf " : _sysconf , " abortOnCannotGrowMemory " : abortOnCannotGrowMemory , " constNoSmartPtrRawPointerToWireType " : constNoSmartPtrRawPointerToWireType , " count_emval_handles " : count_emval_handles , " craftInvokerFunction " : craftInvokerFunction , " createNamedFunction " : createNamedFunction , " downcastPointer " : downcastPointer , " embind__requireFunction " : embind__requireFunction , " embind_init_charCodes " : embind_init_charCodes , " emscriptenWebGLGet " : emscriptenWebGLGet , " emscriptenWebGLGetIndexed " : emscriptenWebGLGetIndexed , " emscriptenWebGLGetTexPixelData " : emscriptenWebGLGetTexPixelData , " emscriptenWebGLGetUniform " : emscriptenWebGLGetUniform , " emscriptenWebGLGetVertexAttrib " : emscriptenWebGLGetVertexAttrib , " emscripten_realloc_buffer " : emscripten_realloc_buffer , " ensureOverloadTable " : ensureOverloadTable , " enumReadValueFromPointer " : enumReadValueFromPointer , " exposePublicSymbol " : exposePublicSymbol , " extendError " : extendError , " floatReadValueFromPointer " : floatReadValueFromPointer , " flushPendingDeletes " : flushPendingDeletes , " genericPointerToWireType " : genericPointerToWireType , " getBasestPointer " : getBasestPointer , " getInheritedInstance " : getInheritedInstance , " getInheritedInstanceCount " : getInheritedInstanceCount , " getLiveInheritedInstances " : getLiveInheritedInstances , " getShiftFromSize " : getShiftFromSize , " getStringOrSymbol " : getStringOrSymbol , " getTypeName " : getTypeName , " get_first_emval " : get_first_emval , " heap32VectorToArray " : heap32VectorToArray , " init_ClassHandle " : init_ClassHandle , " init_RegisteredPointer " : init_RegisteredPointer , " init_embind " : init_embind , " init_emval " : init_emval , " integerReadValueFromPointer " : integerReadValueFromPointer , " makeClassHandle " : makeClassHandle , " makeLegalFunctionName " : makeLegalFunctionName , " new_ " : new_ , " nonConstNoSmartPtrRawPointerToWireType " : nonConstNoSmartPtrRawPointerToWireType , " readLatin1String " : readLatin1String , " registerType " : registerType , " replacePublicSymbol " : replacePublicSymbol , " requireHandle " : requireHandle , " requireRegisteredType " : requireRegisteredType , " runDestructor " : runDestructor , " runDestructors " : runDestructors , " setDelayFunction " : setDelayFunction , " shallowCopyInternalPointer " : shallowCopyInternalPointer , " simpleReadValueFromPointer " : simpleReadValueFromPointer , " stringToNewUTF8 " : stringToNewUTF8 , " throwBindingError " : throwBindingError , " throwInstanceAlreadyDeleted " : throwInstanceAlreadyDeleted , " throwInternalError " : throwInternalError , " throwUnboundTypeError " : throwUnboundTypeError , " upcastPointer " : upcastPointer , " validateThis " : validateThis , " whenDependentTypesAreResolved " : whenDependentTypesAreResolved , " tempDoublePtr " : tempDoublePtr , " DYNAMICTOP_PTR " : DYNAMICTOP_PTR } ; var asm = Module [ " asm " ] ( asmGlobalArg , asmLibraryArg , buffer ) ; Module [ " asm " ] = asm ; var ___errno_location = Module [ " ___errno_location " ] = function ( ) { return Module [ " asm " ] [ " ___errno_location " ] . apply ( null , arguments ) } ; var ___getTypeName = Module [ " ___getTypeName " ] = function ( ) { return Module [ " asm " ] [ " ___getTypeName " ] . apply ( null , arguments ) } ; var _emscripten_GetProcAddress = Module [ " _emscripten_GetProcAddress " ] = function ( ) { return Module [ " asm " ] [ " _emscripten_GetProcAddress " ] . apply ( null , arguments ) } ; var _emscripten_replace_memory = Module [ " _emscripten_replace_memory " ] = function ( ) { return Module [ " asm " ] [ " _emscripten_replace_memory " ] . apply ( null , arguments ) } ; var _free = Module [ " _free " ] = function ( ) { return Module [ " asm " ] [ " _free " ] . apply ( null , arguments ) } ; var _i64Subtract = Module [ " _i64Subtract " ] = function ( ) { return Module [ " asm " ] [ " _i64Subtract " ] . apply ( null , arguments ) } ; var _llvm_round_f32 = Module [ " _llvm_round_f32 " ] = function ( ) { return Module [ " asm " ] [ " _llvm_round_f32 " ] . apply ( null , arguments ) } ; var _malloc = Module [ " _malloc " ] = function ( ) { return Module [ " asm " ] [ " _malloc " ] . apply ( null , arguments ) } ; var _memcpy = Module [ " _memcpy " ] = function ( ) { return Module [ " asm " ] [ " _memcpy " ] . apply ( null , arguments ) } ; var _memmove = Module [ " _memmove " ] = function ( ) { return Module [ " asm " ] [ " _memmove " ] . apply ( null , arguments ) } ; var _memset = Module [ " _memset " ] = function ( ) { return Module [ " asm " ] [ " _memset " ] . apply ( null , arguments ) } ; var _pthread_cond_broadcast = Module [ " _pthread_cond_broadcast " ] = function ( ) { return Module [ " asm " ] [ " _pthread_cond_broadcast " ] . apply ( null , arguments ) } ; var _sbrk = Module [ " _sbrk " ] = function ( ) { return Module [ " asm " ] [ " _sbrk " ] . apply ( null , arguments ) } ; var _strstr = Module [ " _strstr " ] = function ( ) { return Module [ " asm " ] [ " _strstr " ] . apply ( null , arguments ) } ; var establishStackSpace = Module [ " establishStackSpace " ] = function ( ) { return Module [ " asm " ] [ " establishStackSpace " ] . apply ( null , arguments ) } ; var globalCtors = Module [ " globalCtors " ] = function ( ) { return Module [ " asm " ] [ " globalCtors " ] . apply ( null , arguments ) } ; var stackAlloc = Module [ " stackAlloc " ] = function ( ) { return Module [ " asm " ] [ " stackAlloc " ] . apply ( null , arguments ) } ; var stackRestore = Module [ " stackRestore " ] = function ( ) { return Module [ " asm " ] [ " stackRestore " ] . apply ( null , arguments ) } ; var stackSave = Module [ " stackSave " ] = function ( ) { return Module [ " asm " ] [ " stackSave " ] . apply ( null , arguments ) } ; var dynCall_dii = Module [ " dynCall_dii " ] = function ( ) { return Module [ " asm " ] [ " dynCall_dii " ] . apply ( null , arguments ) } ; var dynCall_fi = Module [ " dynCall_fi " ] = function ( ) { return Module [ " asm " ] [ " dynCall_fi " ] . apply ( null , arguments ) } ; var dynCall_fii = Module [ " dynCall_fii " ] = function ( ) { return Module [ " asm " ] [ " dynCall_fii " ] . apply ( null , arguments ) } ; var dynCall_fiii = Module [ " dynCall_fiii " ] = function ( ) { return Module [ " asm " ] [ " dynCall_fiii " ] . apply ( null , arguments ) } ; var dynCall_i = Module [ " dynCall_i " ] = function ( ) { return Module [ " asm " ] [ " dynCall_i " ] . apply ( null , arguments ) } ; var dynCall_ii = Module [ " dynCall_ii " ] = function ( ) { return Module [ " asm " ] [ " dynCall_ii " ] . apply ( null , arguments ) } ; var dynCall_iif = Module [ " dynCall_iif " ] = function ( ) { return Module [ " asm " ] [ " dynCall_iif " ] . apply ( null , arguments ) } ; var dynCall_iiff = Module [ " dynCall_iiff " ] = function ( ) { return Module [ " asm " ] [ " dynCall_iiff " ] . apply ( null , arguments ) } ; var dynCall_iii = Module [ " dynCall_iii " ] = function ( ) { return Module [ " asm " ] [ " dynCall_iii " ] . apply ( null , arguments ) } ; var dynCall_iiif = Module [ " dynCall_iiif " ] = function ( ) { return Module [ " asm " ] [ " dynCall_iiif " ] . apply ( null , arguments ) } ; var dynCall_iiiff = Module [ " dynCall_iiiff " ] = function ( ) { return Module [ " asm " ] [ " dynCall_iiiff " ] . apply ( null , arguments ) } ; var dynCall_iiii = Module [ " dynCall_iiii " ] = function ( ) { return Module [ " asm " ] [ " dynCall_iiii " ] . apply ( null , arguments ) } ; var dynCall_iiiii = Module [ " dynCall_iiiii " ] = function ( ) { return Module [ " asm " ] [ " dynCall_iiiii " ] . apply ( null , arguments ) } ; var dynCall_iiiiii = Module [ " dynCall_iiiiii " ] = function ( ) { return Module [ " asm " ] [ " dynCall_iiiiii " ] . apply ( null , arguments ) } ; var dynCall_iiiiiii = Module [ " dynCall_iiiiiii " ] = function ( ) { return Module [ " asm " ] [ " dynCall_iiiiiii " ] . apply ( null , arguments ) } ; var dynCall_iiiiiiii = Module [ " dynCall_iiiiiiii " ] = function ( ) { return Module [ " asm " ] [ " dynCall_iiiiiiii " ] . apply ( null , arguments ) } ; var dynCall_iiij = Module [ " dynCall_iiij " ] = function ( ) { return Module [ " asm " ] [ " dynCall_iiij " ] . apply ( null , arguments ) } ; var dynCall_jii = Module [ " dynCall_jii " ] = function ( ) { return Module [ " asm " ] [ " dynCall_jii " ] . apply ( null , arguments ) } ; var dynCall_v = Module [ " dynCall_v " ] = function ( ) { return Module [ " asm " ] [ " dynCall_v " ] . apply ( null , arguments ) } ; var dynCall_vf = Module [ " dynCall_vf " ] = function ( ) { return Module [ " asm " ] [ " dynCall_vf " ] . apply ( null , arguments ) } ; var dynCall_vff = Module [ " dynCall_vff " ] = function ( ) { return Module [ " asm " ] [ " dynCall_vff " ] . apply ( null , arguments ) } ; var dynCall_vffff = Module [ " dynCall_vffff " ] = function ( ) { return Module [ " asm " ] [ " dynCall_vffff " ] . apply ( null , arguments ) } ; var dynCall_vfi = Module [ " dynCall_vfi " ] = function ( ) { return Module [ " asm " ] [ " dynCall_vfi " ] . apply ( null , arguments ) } ; var dynCall_vi = Module [ " dynCall_vi " ] = function ( ) { return Module [ " asm " ] [ " dynCall_vi " ] . apply ( null , arguments ) } ; var dynCall_viddd = Module [ " dynCall_viddd " ] = function ( ) { return Module [ " asm " ] [ " dynCall_viddd " ] . apply ( null , arguments ) } ; var dynCall_viddddi = Module [ " dynCall_viddddi " ] = function ( ) { return Module [ " asm " ] [ " dynCall_viddddi " ] . apply ( null , arguments ) } ; var dynCall_vif = Module [ " dynCall_vif " ] = function ( ) { return Module [ " asm " ] [ " dynCall_vif " ] . apply ( null , arguments ) } ; var dynCall_viff = Module [ " dynCall_viff " ] = function ( ) { return Module [ " asm " ] [ " dynCall_viff " ] . apply ( null , arguments ) } ; var dynCall_vifff = Module [ " dynCall_vifff " ] = function ( ) { return Module [ " asm " ] [ " dynCall_vifff " ] . apply ( null , arguments ) } ; var dynCall_viffff = Module [ " dynCall_viffff " ] = function ( ) { return Module [ " asm " ] [ " dynCall_viffff " ] . apply ( null , arguments ) } ; var dynCall_vii = Module [ " dynCall_vii " ] = function ( ) { return Module [ " asm " ] [ " dynCall_vii " ] . apply ( null , arguments ) } ; var dynCall_viid = Module [ " dynCall_viid " ] = function ( ) { return Module [ " asm " ] [ " dynCall_viid " ] . apply ( null , arguments ) } ; var dynCall_viidd = Module [ " dynCall_viidd " ] = function ( ) { return Module [ " asm " ] [ " dynCall_viidd " ] . apply ( null , arguments ) } ; var dynCall_viiddd = Module [ " dynCall_viiddd " ] = function ( ) { return Module [ " asm " ] [ " dynCall_viiddd " ] . apply ( null , arguments ) } ; var dynCall_viidddddd = Module [ " dynCall_viidddddd " ] = function ( ) { return Module [ " asm " ] [ " dynCall_viidddddd " ] . apply ( null , arguments ) } ; var dynCall_viiddddi = Module [ " dynCall_viiddddi " ] = function ( ) { return Module [ " asm " ] [ " dynCall_viiddddi " ] . apply ( null , arguments ) } ; var dynCall_viif = Module [ " dynCall_viif " ] = function ( ) { return Module [ " asm " ] [ " dynCall_viif " ] . apply ( null , arguments ) } ; var dynCall_viiff = Module [ " dynCall_viiff " ] = function ( ) { return Module [ " asm " ] [ " dynCall_viiff " ] . apply ( null , arguments ) } ; var dynCall_viifff = Module [ " dynCall_viifff " ] = function ( ) { return Module [ " asm " ] [ " dynCall_viifff " ] . apply ( null , arguments ) } ; var dynCall_viifi = Module [ " dynCall_viifi " ] = function ( ) { return Module [ " asm " ] [ " dynCall_viifi " ] . apply ( null , arguments ) } ; var dynCall_viii = Module [ " dynCall_viii " ] = function ( ) { return Module [ " asm " ] [ " dynCall_viii " ] . apply ( null , arguments ) } ; var dynCall_viiidd = Module [ " dynCall_viiidd " ] = function ( ) { return Module [ " asm " ] [ " dynCall_viiidd " ] . apply ( null , arguments ) } ; var dynCall_viiidddddd = Module [ " dynCall_viiidddddd " ] = function ( ) { return Module [ " asm " ] [ " dynCall_viiidddddd " ] . apply ( null , arguments ) } ; var dynCall_viiif = Module [ " dynCall_viiif " ] = function ( ) { return Module [ " asm " ] [ " dynCall_viiif " ] . apply ( null , arguments ) } ; var dynCall_viiii = Module [ " dynCall_viiii " ] = function ( ) { return Module [ " asm " ] [ " dynCall_viiii " ] . apply ( null , arguments ) } ; var dynCall_viiiii = Module [ " dynCall_viiiii " ] = function ( ) { return Module [ " asm " ] [ " dynCall_viiiii " ] . apply ( null , arguments ) } ; var dynCall_viiiiii = Module [ " dynCall_viiiiii " ] = function ( ) { return Module [ " asm " ] [ " dynCall_viiiiii " ] . apply ( null , arguments ) } ; var dynCall_viiiiiii = Module [ " dynCall_viiiiiii " ] = function ( ) { return Module [ " asm " ] [ " dynCall_viiiiiii " ] . apply ( null , arguments ) } ; var dynCall_viiiiiiii = Module [ " dynCall_viiiiiiii " ] = function ( ) { return Module [ " asm " ] [ " dynCall_viiiiiiii " ] . apply ( null , arguments ) } ; var dynCall_viiiiiiiii = Module [ " dynCall_viiiiiiiii " ] = function ( ) { return Module [ " asm " ] [ " dynCall_viiiiiiiii " ] . apply ( null , arguments ) } ; var dynCall_viiiiiiiiii = Module [ " dynCall_viiiiiiiiii " ] = function ( ) { return Module [ " asm " ] [ " dynCall_viiiiiiiiii " ] . apply ( null , arguments ) } ; var dynCall_viiiiiiiiiii = Module [ " dynCall_viiiiiiiiiii " ] = function ( ) { return Module [ " asm " ] [ " dynCall_viiiiiiiiiii " ] . apply ( null , arguments ) } ; var dynCall_viij = Module [ " dynCall_viij " ] = function ( ) { return Module [ " asm " ] [ " dynCall_viij " ] . apply ( null , arguments ) } ; var dynCall_vij = Module [ " dynCall_vij " ] = function ( ) { return Module [ " asm " ] [ " dynCall_vij " ] . apply ( null , arguments ) } ; Module [ " asm " ] = asm ; function ExitStatus ( status ) { this . name = " ExitStatus " ; this . message = " Program terminated with exit ( " + status + " ) " ; this . status = status } ExitStatus . prototype = new Error ; ExitStatus . prototype . constructor = ExitStatus ; dependenciesFulfilled = function runCaller ( ) { if ( ! Module [ " calledRun " ] ) run ( ) ; if ( ! Module [ " calledRun " ] ) dependenciesFulfilled = runCaller } ; function run ( args ) { args = args | | Module [ " arguments " ] ; if ( runDependencies > 0 ) { return } preRun ( ) ; if ( runDependencies > 0 ) return ; if ( Module [ " calledRun " ] ) return ; function doRun ( ) { if ( Module [ " calledRun " ] ) return ; Module [ " calledRun " ] = true ; if ( ABORT ) return ; ensureInitRuntime ( ) ; preMain ( ) ; if ( Module [ " onRuntimeInitialized " ] ) Module [ " onRuntimeInitialized " ] ( ) ; postRun ( ) } if ( Module [ " setStatus " ] ) { Module [ " setStatus " ] ( " Running . . . " ) ; setTimeout ( function ( ) { setTimeout ( function ( ) { Module [ " setStatus " ] ( " " ) } , 1 ) ; doRun ( ) } , 1 ) } else { doRun ( ) } } Module [ " run " ] = run ; function abort ( what ) { if ( Module [ " onAbort " ] ) { Module [ " onAbort " ] ( what ) } if ( what ! = = undefined ) { out ( what ) ; err ( what ) ; what = JSON . stringify ( what ) } else { what = " " } ABORT = true ; EXITSTATUS = 1 ; throw " abort ( " + what + " ) . Build with - s ASSERTIONS = 1 for more info . " } Module [ " abort " ] = abort ; if ( Module [ " preInit " ] ) { if ( typeof Module [ " preInit " ] = = " function " ) Module [ " preInit " ] = [ Module [ " preInit " ] ] ; while ( Module [ " preInit " ] . length > 0 ) { Module [ " preInit " ] . pop ( ) ( ) } } Module [ " noExitRuntime " ] = true ; run ( ) ; Filament . remainingInitializationTasks = 1 ; Filament . init = function ( assets , onready ) { Filament . onReady = onready ; Filament . remainingInitializationTasks + = assets . length ; Filament . assets = { } ; if ( typeof glMatrix ! = = " undefined " ) { Filament . loadMathExtensions ( ) } Filament . fetch ( assets , null , function ( name ) { if ( - - Filament . remainingInitializationTasks = = 0 & & Filament . onReady ) { Filament . onReady ( ) } } ) } ; Filament . postRun = function ( ) { Filament . loadClassExtensions ( ) ; if ( - - Filament . remainingInitializationTasks = = 0 & & Filament . onReady ) { Filament . onReady ( ) } } ; Filament . fetch = function ( assets , onDone , onFetched ) { var remainingAssets = assets . length ; assets . forEach ( function ( name ) { const lower = name . toLowerCase ( ) ; if ( lower . endsWith ( " . jpeg " ) | | lower . endsWith ( " . jpg " ) ) { var img = new Image ; img . src = name ; img . decoding = " async " ; img . onload = function ( ) { Filament . assets [ name ] = img ; if ( onFetched ) { onFetched ( name ) } if ( - - remainingAssets = = = 0 & & onDone ) { onDone ( ) } } ; return } fetch ( name ) . then ( function ( response ) { if ( ! response . ok ) { throw new Error ( name ) } return response . arrayBuffer ( ) } ) . then ( function ( arrayBuffer ) { Filament . assets [ name ] = new Uint8Array ( arrayBuffer ) ; if ( onFetched ) { onFetched ( name ) } if ( - - remainingAssets = = = 0 & & onDone ) { onDone ( ) } } ) } ) } ; function getBufferDescriptor ( buffer ) { if ( " string " = = typeof buffer | | buffer instanceof String ) { buffer = Filament . assets [ buffer ] } if ( buffer . buffer instanceof ArrayBuffer ) { buffer = Filament . Buffer ( buffer ) } return buffer } Filament . loadClassExtensions = function ( ) { Filament . Engine . create = function ( canvas , options ) { const defaults = { majorVersion : 2 , minorVersion : 0 , antialias : false , depth : false , alpha : false } ; options = Object . assign ( defaults , options ) ; const ctx = canvas . getContext ( " webgl2 " , options ) ; const handle = GL . registerContext ( ctx , options ) ; GL . makeContextCurrent ( handle ) ; ctx . getExtension ( " WEBGL_compressed_texture_s3tc " ) ; ctx . getExtension ( " WEBGL_compressed_texture_astc " ) ; ctx . getExtension ( " WEBGL_compressed_texture_etc " ) ; return Filament . Engine . _create ( ) } ; Filament . Engine . prototype . createMaterial = function ( buffer ) { buffer = getBufferDescriptor ( buffer ) ; const result = this . _createMaterial ( buffer ) ; buffer . delete ( ) ; return result } ; Filament . Engine . prototype . createTextureFromKtx = function ( buffer , options ) { buffer = getBufferDescriptor ( buffer ) ; const result = Filament . _createTextureFromKtx ( buffer , this , options ) ; buffer . delete ( ) ; return result } ; Filament . Engine . prototype . createIblFromKtx = function ( buffer , options ) { buffer = getBufferDescriptor ( buffer ) ; const result = Filament . _createIblFromKtx ( buffer , this , options ) ; buffer . delete ( ) ; return result } ; Filament . Engine . prototype . createSkyFromKtx = function ( buffer , options ) { const skytex = this . createTextureFromKtx ( buffer , options ) ; return Filament . Skybox . Builder ( ) . environment ( skytex ) . build ( this ) } ; Filament . Engine . prototype . createTextureFromPng = function ( buffer , options ) { buffer = getBufferDescriptor ( buffer ) ; const result = Filament . _createTextureFromPng ( buffer , this , options ) ; buffer . delete ( ) ; return result } ; Filament . Engine . prototype . createTextureFromJpeg = function ( image , options ) { options = options | | { } ; if ( " string " = = typeof image | | image instanceof String ) { image = Filament . assets [ image ] } return Filament . _createTextureFromJpeg ( image , this , options ) } ; Filament . Engine . prototype . loadFilamesh = function ( buffer , definstance , matinstances ) { buffer = getBufferDescriptor ( buffer ) ; const result = Filament . _loadFilamesh ( this , buffer , definstance , matinstances ) ; buffer . delete ( ) ; return result } ; Filament . Engine . prototype . createAssetLoader = function ( ) { const materials = new Filament . gltfio $ UbershaderLoader ( this ) ; return new Filament . gltfio $ AssetLoader ( this , materials ) } ; Filament . VertexBuffer . prototype . setBufferAt = function ( engine , bufferIndex , buffer ) { buffer = getBufferDescriptor ( buffer ) ; this . _setBufferAt ( engine , bufferIndex , buffer ) ; buffer . delete ( ) } ; Filament . IndexBuffer . prototype . setBuffer = function ( engine , buffer ) { buffer = getBufferDescriptor ( buffer ) ; this . _setBuffer ( engine , buffer ) ; buffer . delete ( ) } ; Filament . RenderableManager $ Builder . prototype . build = Filament . LightManager $ Builder . prototype . build = function ( engine , entity ) { const result = this . _build ( engine , entity ) ; this . delete ( ) ; return result } ; Filament . RenderTarget $ Builder . prototype . build = Filament . VertexBuffer $ Builder . prototype . build = Filament . IndexBuffer $ Builder . prototype . build = Filament . Texture $ Builder . prototype . build = Filament . IndirectLight $ Builder . prototype . build = Filament . Skybox $ Builder . prototype . build = function ( engine ) { const result = this . _build ( engine ) ; this . delete ( ) ; return result } ; Filament . KtxBundle . prototype . getBlob = function ( index ) { const blob = this . _getBlob ( index ) ; const result = blob . getBytes ( ) ; blob . delete ( ) ; return result } ; Filament . KtxBundle . prototype . getCubeBlob = function ( miplevel ) { const blob = this . _getCubeBlob ( miplevel ) ; const result = blob . getBytes ( ) ; blob . delete ( ) ; return result } ; Filament . Texture . prototype . setImage = function ( engine , level , pbd ) { this . _setImage ( engine , level , pbd ) ; pbd . delete ( ) } ; Filament . Texture . prototype . setImageCube = function ( engine , level , pbd ) { this . _setImageCube ( engine , level , pbd ) ; pbd . delete ( ) } ; Filament . SurfaceOrientation $ Builder . prototype . build = function ( ) { const result = this . _build ( ) ; this . delete ( ) ; return result } ; Filament . gltfio $ AssetLoader . prototype . createAssetFromJson = function ( buffer ) { if ( " string " = = typeof buffer & & buffer . endsWith ( " . glb " ) ) { console . error ( " Please use createAssetFromBinary for glb files . " ) } buffer = getBufferDescriptor ( buffer ) ; const result = this . _createAssetFromJson ( buffer ) ; buffer . delete ( ) ; return result } ; Filament . gltfio $ AssetLoader . prototype . createAssetFromBinary = function ( buffer ) { if ( " string " = = typeof buffer & & buffer . endsWith ( " . gltf " ) ) { console . error ( " Please use createAssetFromJson for gltf files . " ) } buffer = getBufferDescriptor ( buffer ) ; const result = this . _createAssetFromBinary ( buffer ) ; buffer . delete ( ) ; return result } ; Filament . gltfio $ FilamentAsset . prototype . loadResources = function ( onDone , onFetched ) { const asset = this ; const engine = this . getEngine ( ) ; const urlkeys = this . getResourceUrls ( ) ; const urlset = new Set ; for ( var i = 0 ; i < urlkeys . size ( ) ; i + + ) { const url = urlkeys . get ( i ) ; if ( url ) { urlset . add ( url ) } } const resourceLoader = new Filament . gltfio $ ResourceLoader ( engine ) ; const onComplete = function ( ) { const finalize = function ( ) { resourceLoader . loadResources ( asset ) ; window . requestAnimationFrame ( function ( ) { window . requestAnimationFrame ( function ( ) { resourceLoader . delete ( ) } ) } ) } ; if ( onDone ) { onDone ( finalize ) } else { finalize ( ) } } ; if ( urlset . size = = 0 ) { onComplete ( ) ; return } Filament . fetch ( Array . from ( urlset ) , onComplete , function ( name ) { var buffer = getBufferDescriptor ( name ) ; resourceLoader . addResourceData ( name , buffer ) ; buffer . delete ( ) ; if ( onFetched ) { onFetched ( name ) } } ) } } ; Filament . Buffer = function ( typedarray ) { console . assert ( typedarray . buffer instanceof ArrayBuffer ) ; console . assert ( typedarray . byteLength > 0 ) ; if ( Filament . HEAPU32 . buffer = = typedarray . buffer ) { typedarray = new Uint8Array ( typedarray ) } const ta = typedarray ; const bd = new Filament . driver $ BufferDescriptor ( ta ) ; const uint8array = new Uint8Array ( ta . buffer , ta . byteOffset , ta . byteLength ) ; bd . getBytes ( ) . set ( uint8array ) ; return bd } ; Filament . PixelBuffer = function ( typedarray , format , datatype ) { console . assert ( typedarray . buffer instanceof ArrayBuffer ) ; console . assert ( typedarray . byteLength > 0 ) ; if ( Filament . HEAPU32 . buffer = = typedarray . buffer ) { typedarray = new Uint8Array ( typedarray ) } const ta = typedarray ; const bd = new Filament . driver $ PixelBufferDescriptor ( ta , format , datatype ) ; const uint8array = new Uint8Array ( ta . buffer , ta . byteOffset , ta . byteLength ) ; bd . getBytes ( ) . set ( uint8array ) ; return bd } ; Filament . CompressedPixelBuffer = function ( typedarray , cdatatype , faceSize ) { console . assert ( typedarray . buffer instanceof ArrayBuffer ) ; console . assert ( typedarray . byteLength > 0 ) ; faceSize = faceSize | | typedarray . byteLength ; if ( Filament . HEAPU32 . buffer = = typedarray . buffer ) { typedarray = new Uint8Array ( typedarray ) } const ta = typedarray ; const bd = new Filament . driver $ PixelBufferDescriptor ( ta , cdatatype , faceSize , true ) ; const uint8array = new Uint8Array ( ta . buffer , ta . byteOffset , ta . byteLength ) ; bd . getBytes ( ) . set ( uint8array ) ; return bd } ; Filament . _loadFilamesh = function ( engine , buffer , definstance , matinstances ) { matinstances = matinstances | | { } ; const registry = new Filament . MeshReader $ MaterialRegistry ; for ( var key in matinstances ) { registry . set ( key , matinstances [ key ] ) } if ( definstance ) { registry . set ( " DefaultMaterial " , definstance ) } const mesh = Filament . MeshReader . loadMeshFromBuffer ( engine , buffer , registry ) ; const keys = registry . keys ( ) ; for ( var i = 0 ; i < keys . size ( ) ; i + + ) { const key = keys . get ( i ) ; const minstance = registry . get ( key ) ; matinstances [ key ] = minstance } return { " renderable " : mesh . renderable ( ) , " vertexBuffer " : mesh . vertexBuffer ( ) , " indexBuffer " : mesh . indexBuffer ( ) } } ; Filament . IcoSphere = function ( nsubdivs ) { const X = . 5257311121191336 ; const Z = . 8506508083520399 ; const N = 0 ; this . vertices = new Float32Array ( [ - X , + N , + Z , + X , + N , + Z , - X , + N , - Z , + X , + N , - Z , + N , + Z , + X , + N , + Z , - X , + N , - Z , + X , + N , - Z , - X , + Z , + X , + N , - Z , + X , + N , + Z , - X , + N , - Z , - X , + N ] ) ; this . triangles = new Uint16Array ( [ 1 , 4 , 0 , 4 , 9 , 0 , 4 , 5 , 9 , 8 , 5 , 4 , 1 , 8 , 4 , 1 , 10 , 8 , 10 , 3 , 8 , 8 , 3 , 5 , 3 , 2 , 5 , 3 , 7 , 2 , 3 , 10 , 7 , 10 , 6 , 7 , 6 , 11 , 7 , 6 , 0 , 11 , 6 , 1 , 0 , 10 , 1 , 6 , 11 , 0 , 9 , 2 , 11 , 9 , 5 , 2 , 9 , 11 , 2 , 7 ] ) ; if ( nsubdivs ) { while ( nsubdivs - - > 0 ) { this . subdivide ( ) } } const nverts = this . vertices . length / 3 ; const normals = Filament . _malloc ( this . vertices . length * this . vertices . BYTES_PER_ELEMENT ) ; Module . HEAPU8 . set ( new Uint8Array ( this . vertices . buffer ) , normals ) ; const sob = new Filament . SurfaceOrientation $ Builder ; sob . vertexCount ( nverts ) ; sob . normals ( normals , 0 ) ; const orientation = sob . build ( ) ; Filament . _free ( normals ) ; const quatsBufferSize = 8 * nverts ; const quatsBuffer = Filament . _malloc ( quatsBufferSize ) ; orientation . getQuats ( quatsBuffer , nverts , Filament . VertexBuffer $ AttributeType . SHORT4 ) ; const tangentsMemory = Module . HEAPU8 . subarray ( quatsBuffer , quatsBuffer + quatsBufferSize ) . slice ( ) . buffer ; Filament . _free ( quatsBuffer ) ; this . tangents = new Int16Array ( tangentsMemory ) ; orientation . delete ( ) } ; Filament . IcoSphere . prototype . subdivide = function ( ) { const srctris = this . triangles ; const srcverts = this . vertices ; const nsrctris = srctris . length / 3 ; const ndsttris = nsrctris * 4 ; const nsrcverts = srcverts . length / 3 ; const ndstverts = nsrcverts + nsrctris * 3 ; const dsttris = new Uint16Array ( ndsttris * 3 ) ; const dstverts = new Float32Array ( ndstverts * 3 ) ; dstverts . set ( srcverts ) ; var srcind = 0 , dstind = 0 , i3 = nsrcverts * 3 , i4 = i3 + 3 , i5 = i4 + 3 ; for ( var tri = 0 ; tri < nsrctris ; tri + + , i3 + = 9 , i4 + = 9 , i5 + = 9 ) { const i0 = srctris [ srcind + + ] * 3 ; const i1 = srctris [ srcind + + ] * 3 ; const i2 = srctris [ srcind + + ] * 3 ; const v0 = srcverts . subarray ( i0 , i0 + 3 ) ; const v1 = srcverts . subarray ( i1 , i1 + 3 ) ; const v2 = srcverts . subarray ( i2 , i2 + 3 ) ; const v3 = dstverts . subarray ( i3 , i3 + 3 ) ; const v4 = dstverts . subarray ( i4 , i4 + 3 ) ; const v5 = dstverts . subarray ( i5 , i5 + 3 ) ; vec3 . normalize ( v3 , vec3 . add ( v3 , v0 , v1 ) ) ; vec3 . normalize ( v4 , vec3 . add ( v4 , v1 , v2 ) ) ; vec3 . normalize ( v5 , vec3 . add ( v5 , v2 , v0 ) ) ; dsttris [ dstind + + ] = i0 / 3 ; dsttris [ dstind + + ] = i3 / 3 ; dsttris [ dstind + + ] = i5 / 3 ; dsttris [ dstind + + ] = i3 / 3 ; dsttris [ dstind + + ] = i1 / 3 ; dsttris [ dstind + + ] = i4 / 3 ; dsttris [ dstind + + ] = i5 / 3 ; dsttris [ dstind + + ] = i3 / 3 ; dsttris [ dstind + + ] = i4 / 3 ; dsttris [ dstind + + ] = i2 / 3 ; dsttris [ dstind + + ] = i5 / 3 ; dsttris [ dstind + + ] = i4 / 3 } this . triangles = dsttris ; this . vertices = dstverts } ; function clamp ( v , least , most ) { return Math . max ( Math . min ( most , v ) , least ) } Filament . packSnorm16 = function ( value ) { return Math . round ( clamp ( value , - 1 , 1 ) * 32767 ) } ; Filament . loadMathExtensions = function ( ) { vec4 . packSnorm16 = function ( out , src ) { out [ 0 ] = Filament . packSnorm16 ( src [ 0 ] ) ; out [ 1 ] = Filament . packSnorm16 ( src [ 1 ] ) ; out [ 2 ] = Filament . packSnorm16 ( src [ 2 ] ) ; out [ 3 ] = Filament . packSnorm16 ( src [ 3 ] ) ; return out } ; const fromRotationZ = mat3 . fromRotation ; mat3 . fromRotation = function ( out , radians , axis ) { if ( axis ) { return mat3 . fromMat4 ( out , mat4 . fromRotation ( mat4 . create ( ) , radians , axis ) ) } return fromRotationZ ( out , radians ) } } ; Filament . _createTextureFromKtx = function ( ktxdata , engine , options ) { options = options | | { } ; const ktx = options [ " ktx " ] | | new Filament . KtxBundle ( ktxdata ) ; const srgb = ! ! options [ " srgb " ] ; return Filament . KtxUtility $ createTexture ( engine , ktx , srgb ) } ; Filament . _createIblFromKtx = function ( ktxdata , engine , options ) { options = options | | { } ; const iblktx = options [ " ktx " ] = new Filament . KtxBundle ( ktxdata ) ; const format = iblktx . info ( ) . glInternalFormat ; if ( format ! = this . ctx . R11F_G11F_B10F & & format ! = this . ctx . RGB16F & & format ! = this . ctx . RGB32F ) { console . warn ( " IBL texture format is 0x " + format . toString ( 16 ) + " which is not an expected floating - point format . Please use cmgen to generate IBL . " ) } const ibltex = Filament . _createTextureFromKtx ( ktxdata , engine , options ) ; const shstring = iblktx . getMetadata ( " sh " ) ; const shfloats = shstring . split ( / \ s / , 9 * 3 ) . map ( parseFloat ) ; return Filament . IndirectLight . Builder ( ) . reflections ( ibltex ) . irradianceSh ( 3 , shfloats ) . build ( engine ) } ; Filament . _createTextureFromPng = function ( pngdata , engine , options ) { const Sampler = Filament . Texture $ Sampler ; const TextureFormat = Filament . Texture $ InternalFormat ; const PixelDataFormat = Filament . PixelDataFormat ; options = options | | { } ; const srgb = ! ! options [ " srgb " ] ; const noalpha = ! ! options [ " noalpha " ] ; const nomips = ! ! options [ " nomips " ] ; const decodedpng = Filament . decodePng ( pngdata , noalpha ? 3 : 4 ) ; var texformat , pbformat , pbtype ; if ( noalpha ) { texformat = srgb ? TextureFormat . SRGB8 : TextureFormat . RGB8 ; pbformat = PixelDataFormat . RGB ; pbtype = Filament . PixelDataType . UBYTE } else { texformat = srgb ? TextureFormat . SRGB8_A8 : TextureFormat . RGBA8 ; pbformat = PixelDataFormat . RGBA ; pbtype = Filament . PixelDataType . UBYTE } const tex = Filament . Texture . Builder ( ) . width ( decodedpng . width ) . height ( decodedpng . height ) . levels ( nomips ? 1 : 255 ) . sampler ( Sampler . SAMPLER_2D ) . format ( texformat ) . build ( engine ) ; const pixelbuffer = Filament . PixelBuffer ( decodedpng . data . getBytes ( ) , pbformat , pbtype ) ; tex . setImage ( engine , 0 , pixelbuffer ) ; if ( ! nomips ) { tex . generateMipmaps ( engine ) } return tex } ; Filament . _createTextureFromJpeg = function ( image , engine , options ) { options = options | | { } ; const srgb = ! ! options [ " srgb " ] ; const nomips = ! ! options [ " nomips " ] ; var context2d = document . createElement ( " canvas " ) . getContext ( " 2d " ) ; context2d . canvas . width = image . width ; context2d . canvas . height = image . height ; context2d . width = image . width ; context2d . height = image . height ; context2d . globalCompositeOperation = " copy " ; context2d . drawImage ( image , 0 , 0 ) ; var imgdata = context2d . getImageData ( 0 , 0 , image . width , image . height ) . data . buffer ; var decodedjpeg = new Uint8Array ( imgdata ) ; const TF = Filament . Texture $ InternalFormat ; const texformat = srgb ? TF . SRGB8_A8 : TF . RGBA8 ; const pbformat = Filament . PixelDataFormat . RGBA ; const pbtype = Filament . PixelDataType . UBYTE ; const tex = Filament . Texture . Builder ( ) . width ( image . width ) . height ( image . height ) . levels ( nomips ? 1 : 255 ) . sampler ( Filament . Texture $ Sampler . SAMPLER_2D ) . format ( texformat ) . build ( engine ) ; const pixelbuffer = Filament . PixelBuffer ( decodedjpeg , pbformat , pbtype ) ; tex . setImage ( engine , 0 , pixelbuffer ) ; if ( ! nomips ) { tex . generateMipmaps ( engine ) } return tex } ; Filament . getSupportedFormats = function ( ) { if ( Filament . supportedFormats ) { return Filament . supportedFormats } const options = { majorVersion : 2 , minorVersion : 0 } ; var ctx = document . createElement ( " canvas " ) . getContext ( " webgl2 " , options ) ; const result = { s3tc : false , astc : false , etc : false } ; var exts = ctx . getSupportedExtensions ( ) , nexts = exts . length , i ; for ( i = 0 ; i < nexts ; i + + ) { var ext = exts [ i ] ; if ( ext = = " WEBGL_compressed_texture_s3tc " ) { result . s3tc = true } else if ( ext = = " WEBGL_compressed_texture_astc " ) { result . astc = true } else if ( ext = = " WEBGL_compressed_texture_etc " ) { result . etc = true } } return Filament . supportedFormats = result } ; Filament . getSupportedFormatSuffix = function ( desiredFormats ) { desiredFormats = desiredFormats . split ( " " ) ; var exts = Filament . getSupportedFormats ( ) ; for ( var key in exts ) { if ( exts [ key ] & & desiredFormats . includes ( key ) ) { return " _ " + key } } return " " } ; <nl> <nl> <nl> return Filament <nl> Binary files a / docs / webgl / filament . wasm and b / docs / webgl / filament . wasm differ <nl> Binary files a / docs / webgl / helmet . filamat and b / docs / webgl / helmet . filamat differ <nl> mmm a / docs / webgl / helmet . html <nl> ppp b / docs / webgl / helmet . html <nl> <nl> < script src = " https : / / unpkg . com / gltumble " > < / script > <nl> < script > <nl> <nl> - const ibl_suffix = Filament . getSupportedFormatSuffix ( ' etc s3tc ' ) ; <nl> const env = ' syferfontein_18d_clear_2k ' ; <nl> - const ibl_url = ` $ { env } / $ { env } _ibl $ { ibl_suffix } . ktx ` ; <nl> + const ibl_url = ` $ { env } / $ { env } _ibl . ktx ` ; <nl> const sky_url = ` $ { env } / $ { env } _skybox . ktx ` ; <nl> const mesh_url = ' helmet . gltf ' ; <nl> <nl> <nl> . color ( [ 0 . 98 , 0 . 92 , 0 . 89 ] ) <nl> . intensity ( 100000 . 0 ) <nl> . direction ( [ 0 . 6 , - 1 . 0 , - 0 . 8 ] ) <nl> - . castShadows ( false ) / / TODO : re - enable shadows after Chrome 74 <nl> . sunAngularRadius ( 1 . 9 ) <nl> . sunHaloSize ( 10 . 0 ) <nl> . sunHaloFalloff ( 80 . 0 ) <nl> Binary files a / docs / webgl / nonlit . filamat and b / docs / webgl / nonlit . filamat differ <nl> Binary files a / docs / webgl / parquet . filamat and b / docs / webgl / parquet . filamat differ <nl> mmm a / docs / webgl / parquet . html <nl> ppp b / docs / webgl / parquet . html <nl> <nl> const iblfile = ' venetian_crossroads_2k / venetian_crossroads_2k_ibl . ktx ' ; <nl> const skyfile = ' venetian_crossroads_2k / venetian_crossroads_2k_skybox . ktx ' ; <nl> <nl> + let ready = true ; <nl> + <nl> Filament . init ( [ <nl> ' parquet . filamat ' , <nl> ' shader_ball . filamesh ' , <nl> Binary files a / docs / webgl / pillars_2k / pillars_2k_ibl . ktx and b / docs / webgl / pillars_2k / pillars_2k_ibl . ktx differ <nl> Binary files a / docs / webgl / pillars_2k / pillars_2k_ibl_etc . ktx and b / docs / webgl / pillars_2k / pillars_2k_ibl_etc . ktx differ <nl> Binary files a / docs / webgl / pillars_2k / pillars_2k_ibl_s3tc . ktx and b / docs / webgl / pillars_2k / pillars_2k_ibl_s3tc . ktx differ <nl> Binary files a / docs / webgl / pillars_2k / pillars_2k_skybox . ktx and b / docs / webgl / pillars_2k / pillars_2k_skybox . ktx differ <nl> Binary files a / docs / webgl / pillars_2k / pillars_2k_skybox_tiny . ktx and b / docs / webgl / pillars_2k / pillars_2k_skybox_tiny . ktx differ <nl> Binary files a / docs / webgl / plastic . filamat and b / docs / webgl / plastic . filamat differ <nl> mmm a / docs / webgl / reference . html <nl> ppp b / docs / webgl / reference . html <nl> < h3 > < a id = " enums " href = " # enums " > Enumerations < / a > < / h3 > <nl> < td align = " left " > < / td > <nl> < / tr > <nl> < tr > <nl> + < td align = " left " > < a href = " # RenderTarget % 24AttachmentPoint " > RenderTarget $ AttachmentPoint < / a > < / td > <nl> + < td align = " left " > < / td > <nl> + < / tr > <nl> + < tr > <nl> < td align = " left " > < a href = " # RenderableManager % 24PrimitiveType " > RenderableManager $ PrimitiveType < / a > < / td > <nl> < td align = " left " > < / td > <nl> < / tr > <nl> < h3 > < a id = " enums " href = " # enums " > Enumerations < / a > < / h3 > <nl> < td align = " left " > < / td > <nl> < / tr > <nl> < tr > <nl> + < td align = " left " > < a href = " # Texture % 24CubemapFace " > Texture $ CubemapFace < / a > < / td > <nl> + < td align = " left " > < / td > <nl> + < / tr > <nl> + < tr > <nl> < td align = " left " > < a href = " # Texture % 24InternalFormat " > Texture $ InternalFormat < / a > < / td > <nl> < td align = " left " > < / td > <nl> < / tr > <nl> < h2 > class < a id = ' Engine ' href = ' # Engine ' > Engine < / a > < / h2 > <nl> < ul > <nl> < li > Utility that creates an [ IndirectLight ] from a KTX file . < / li > <nl> < li > < em > buffer < / em > asset string , or Uint8Array , or < a href = " # Buffer " > Buffer < / a > with KTX file contents < / li > <nl> - < li > < em > options < / em > Options dictionary . For now , the < code > rgbm < / code > boolean is the only option . < / li > <nl> + < li > < em > options < / em > Options dictionary . < / li > <nl> < li > < em > returns < / em > < a href = " # IndirectLight " > IndirectLight < / a > < / li > <nl> < / ul > <nl> < / li > <nl> < h2 > class < a id = ' Engine ' href = ' # Engine ' > Engine < / a > < / h2 > <nl> < ul > <nl> < li > Utility function that creates a [ Skybox ] from a KTX file . < / li > <nl> < li > < em > buffer < / em > asset string , or Uint8Array , or < a href = " # Buffer " > Buffer < / a > with KTX file contents < / li > <nl> - < li > < em > options < / em > Options dictionary . For now , the < code > rgbm < / code > boolean is the only option . < / li > <nl> + < li > < em > options < / em > Options dictionary . < / li > <nl> < li > < em > returns < / em > < a href = " # Skybox " > Skybox < / a > < / li > <nl> < / ul > <nl> < / li > <nl> < h2 > class < a id = ' Engine ' href = ' # Engine ' > Engine < / a > < / h2 > <nl> < ul > <nl> < li > Utility function that creates a [ Texture ] from a KTX file . < / li > <nl> < li > < em > buffer < / em > asset string , or Uint8Array , or < a href = " # Buffer " > Buffer < / a > with KTX file contents < / li > <nl> - < li > < em > options < / em > Options dictionary . For now , the < code > rgbm < / code > boolean is the only option . < / li > <nl> + < li > < em > options < / em > Options dictionary . < / li > <nl> < li > < em > returns < / em > < a href = " # Texture " > Texture < / a > < / li > <nl> < / ul > <nl> < / li > <nl> < h2 > class < a id = ' Engine ' href = ' # Engine ' > Engine < / a > < / h2 > <nl> < ul > <nl> < li > Creates a 2D [ Texture ] from the raw contents of a PNG file . < / li > <nl> < li > < em > buffer < / em > asset string , or Uint8Array , or < a href = " # Buffer " > Buffer < / a > with PNG file contents < / li > <nl> - < li > < em > options < / em > object with optional < code > srgb < / code > , < code > rgbm < / code > , < code > noalpha < / code > , and < code > nomips < / code > keys . < / li > <nl> + < li > < em > options < / em > object with optional < code > srgb < / code > , < code > noalpha < / code > , and < code > nomips < / code > keys . < / li > <nl> < li > < em > returns < / em > < a href = " # Texture " > Texture < / a > < / li > <nl> < / ul > <nl> < / li > <nl> < h2 > class < a id = ' Engine ' href = ' # Engine ' > Engine < / a > < / h2 > <nl> < li > < em > instance < / em > the < a href = " # MaterialInstance " > MaterialInstance < / a > to destroy < / li > <nl> < / ul > <nl> < / li > <nl> + < li > < strong > engine . destroyRenderTarget ( rt ) < / strong > <nl> + < ul > <nl> + < li > < em > rt < / em > the < a href = " # RenderTarget " > RenderTarget < / a > to destroy < / li > <nl> + < / ul > <nl> + < / li > <nl> < li > < strong > engine . destroyRenderer ( renderer ) < / strong > <nl> < ul > <nl> < li > < em > renderer < / em > an instance of < a href = " # Renderer " > Renderer < / a > < / li > <nl> < h2 > class < a id = ' KtxBundle ' href = ' # KtxBundle ' > KtxBundle < / a > < / h2 > <nl> < li > < em > returns < / em > string < / li > <nl> < / ul > <nl> < / li > <nl> - < li > < strong > ktxBundle . getPixelDataFormat ( rgbm ) < / strong > <nl> + < li > < strong > ktxBundle . getPixelDataFormat ( ) < / strong > <nl> < ul > <nl> - < li > < em > rgbm < / em > boolean that configures the alpha channel into an HDR scale . < / li > <nl> < li > < em > returns < / em > < a href = " # PixelDataFormat " > PixelDataFormat < / a > < / li > <nl> < / ul > <nl> < / li > <nl> < h2 > enum < a id = ' PixelDataFormat ' href = ' # PixelDataFormat ' > PixelDataFormat < / a > < / h2 <nl> < li > RGB_INTEGER < / li > <nl> < li > RGBA < / li > <nl> < li > RGBA_INTEGER < / li > <nl> - < li > RGBM < / li > <nl> < li > DEPTH_COMPONENT < / li > <nl> < li > DEPTH_STENCIL < / li > <nl> < li > ALPHA < / li > <nl> < h2 > enum < a id = ' PixelDataType ' href = ' # PixelDataType ' > PixelDataType < / a > < / h2 > <nl> < / ul > <nl> < / div > <nl> < div class = ' enumdoc ' > <nl> + < h2 > enum < a id = ' RenderTarget $ AttachmentPoint ' href = ' # RenderTarget $ AttachmentPoint ' > RenderTarget $ AttachmentPoint < / a > < / h2 > <nl> + < ul > <nl> + < li > COLOR < / li > <nl> + < li > DEPTH < / li > <nl> + < / ul > <nl> + < / div > <nl> + < div class = ' enumdoc ' > <nl> < h2 > enum < a id = ' RenderableManager $ PrimitiveType ' href = ' # RenderableManager $ PrimitiveType ' > RenderableManager $ PrimitiveType < / a > < / h2 > <nl> < ul > <nl> < li > POINTS < / li > <nl> < h2 > enum < a id = ' RgbaType ' href = ' # RgbaType ' > RgbaType < / a > < / h2 > <nl> < / ul > <nl> < / div > <nl> < div class = ' enumdoc ' > <nl> + < h2 > enum < a id = ' Texture $ CubemapFace ' href = ' # Texture $ CubemapFace ' > Texture $ CubemapFace < / a > < / h2 > <nl> + < ul > <nl> + < li > POSITIVE_X < / li > <nl> + < li > NEGATIVE_X < / li > <nl> + < li > POSITIVE_Y < / li > <nl> + < li > NEGATIVE_Y < / li > <nl> + < li > POSITIVE_Z < / li > <nl> + < li > NEGATIVE_Z < / li > <nl> + < / ul > <nl> + < / div > <nl> + < div class = ' enumdoc ' > <nl> < h2 > enum < a id = ' Texture $ InternalFormat ' href = ' # Texture $ InternalFormat ' > Texture $ InternalFormat < / a > < / h2 > <nl> < ul > <nl> < li > R8 < / li > <nl> < h2 > enum < a id = ' Texture $ Usage ' href = ' # Texture $ Usage ' > Texture $ Usage < / a > < / h2 > <nl> < li > DEFAULT < / li > <nl> < li > COLOR_ATTACHMENT < / li > <nl> < li > DEPTH_ATTACHMENT < / li > <nl> + < li > STENCIL_ATTACHMENT < / li > <nl> + < li > UPLOADABLE < / li > <nl> + < li > SAMPLEABLE < / li > <nl> < / ul > <nl> < / div > <nl> < div class = ' enumdoc ' > <nl> < h2 > enum < a id = ' VertexAttribute ' href = ' # VertexAttribute ' > VertexAttribute < / a > < / h2 <nl> < li > UV1 < / li > <nl> < li > BONE_INDICES < / li > <nl> < li > BONE_WEIGHTS < / li > <nl> + < li > CUSTOM0 < / li > <nl> + < li > CUSTOM1 < / li > <nl> + < li > CUSTOM2 < / li > <nl> + < li > CUSTOM3 < / li > <nl> + < li > CUSTOM4 < / li > <nl> + < li > CUSTOM5 < / li > <nl> + < li > CUSTOM6 < / li > <nl> + < li > CUSTOM7 < / li > <nl> < / ul > <nl> < / div > <nl> < div class = ' enumdoc ' > <nl> Binary files a / docs / webgl / sandbox . filamat and b / docs / webgl / sandbox . filamat differ <nl> Binary files a / docs / webgl / shader_ball . filamesh and b / docs / webgl / shader_ball . filamesh differ <nl> new file mode 100644 <nl> index 000000000 . . bd8b4ed92 <nl> mmm / dev / null <nl> ppp b / docs / webgl / skinning . html <nl> <nl> + < ! DOCTYPE html > <nl> + < html lang = " en " > <nl> + < head > <nl> + < title > Skinning < / title > <nl> + < meta charset = " utf - 8 " > <nl> + < meta name = " viewport " content = " width = device - width , user - scalable = no , initial - scale = 1 " > <nl> + < link href = " https : / / google . github . io / filament / favicon . png " rel = " icon " type = " image / x - icon " / > <nl> + < style > <nl> + body { margin : 0 ; overflow : hidden ; } <nl> + canvas { touch - action : none ; width : 100 % ; height : 100 % ; } <nl> + < / style > <nl> + < / head > <nl> + < body > <nl> + < canvas > < / canvas > <nl> + < script src = " filament . js " > < / script > <nl> + < script src = " gl - matrix - min . js " > < / script > <nl> + < script > <nl> + <nl> + let buffer0 = " AAABAAMAAAADAAIAAgADAAUAAgAFAAQABAAFAAcABAAHAAYABgAHAAkABgAJAAgAAAAAAAAAAAAAAAAAAACAPwAAAAAAAAAAAAAAAAAAAD8AAAAAAACAPwAAAD8AAAAAAAAAAAAAgD8AAAAAAACAPwAAgD8AAAAAAAAAAAAAwD8AAAAAAACAPwAAwD8AAAAAAAAAAAAAAEAAAAAAAACAPwAAAEAAAAAA " ; <nl> + let buffer1 = " AAABAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAgD8AAAAAAAAAAAAAAAAAAIA / AAAAAAAAAAAAAAAAAABAPwAAgD4AAAAAAAAAAAAAQD8AAIA + AAAAAAAAAAAAAAA / AAAAPwAAAAAAAAAAAAAAPwAAAD8AAAAAAAAAAAAAgD4AAEA / AAAAAAAAAAAAAIA + AABAPwAAAAAAAAAAAAAAAAAAgD8AAAAAAAAAAAAAAAAAAIA / AAAAAAAAAAA = " ; <nl> + let buffer2 = " AACAPwAAAAAAAAAAAAAAAAAAAAAAAIA / AAAAAAAAAAAAAAAAAAAAAAAAgD8AAAAAAAAAvwAAgL8AAAAAAACAPwAAgD8AAAAAAAAAAAAAAAAAAAAAAACAPwAAAAAAAAAAAAAAAAAAAAAAAIA / AAAAAAAAAL8AAIC / AAAAAAAAgD8 = " ; <nl> + let buffer3 = " AAAAAAAAAD8AAIA / AADAPwAAAEAAACBAAABAQAAAYEAAAIBAAACQQAAAoEAAALBAAAAAAAAAAAAAAAAAAACAPwAAAAAAAAAAkxjEPkSLbD8AAAAAAAAAAPT9ND / 0 / TQ / AAAAAAAAAAD0 / TQ / 9P00PwAAAAAAAAAAkxjEPkSLbD8AAAAAAAAAAAAAAAAAAIA / AAAAAAAAAAAAAAAAAACAPwAAAAAAAAAAkxjEvkSLbD8AAAAAAAAAAPT9NL / 0 / TQ / AAAAAAAAAAD0 / TS / 9P00PwAAAAAAAAAAkxjEvkSLbD8AAAAAAAAAAAAAAAAAAIA / " ; <nl> + <nl> + buffer0 = Uint8Array . from ( atob ( buffer0 ) , c = > c . charCodeAt ( 0 ) ) . buffer ; <nl> + buffer1 = Uint8Array . from ( atob ( buffer1 ) , c = > c . charCodeAt ( 0 ) ) . buffer ; <nl> + buffer2 = Uint8Array . from ( atob ( buffer2 ) , c = > c . charCodeAt ( 0 ) ) . buffer ; <nl> + buffer3 = Uint8Array . from ( atob ( buffer3 ) , c = > c . charCodeAt ( 0 ) ) . buffer ; <nl> + <nl> + const bufview0 = new Uint16Array ( buffer0 , 0 , 24 ) ; / / ushort indices <nl> + const bufview1 = new Float32Array ( buffer0 , 48 ) ; / / vec3 positions <nl> + const bufview2 = new Uint8Array ( buffer1 , 0 ) ; / / bone indices and weights <nl> + const bufview3 = new Float32Array ( buffer2 ) ; / / two bone matrices ( inverseBindMatrices ) <nl> + const bufview4 = new Float32Array ( buffer3 , 0 , 12 ) ; / / 12 floats ( time in seconds ) <nl> + const bufview5 = new Float32Array ( buffer3 , 48 ) ; / / 12 rotation quaternions <nl> + <nl> + / * <nl> + <nl> + This demo is heavily inspired by gltfTutorial_019_SimpleSkin : <nl> + <nl> + " nodes " : [ <nl> + { " skin " : 0 , " mesh " : 0 , " children " : [ 1 ] } , <nl> + { " children " : [ 2 ] , " translation " : [ 0 . 0 , 1 . 0 , 0 . 0 ] } , <nl> + { " rotation " : [ 0 . 0 , 0 . 0 , 0 . 0 , 1 . 0 ] } <nl> + ] , <nl> + " skins " : [ { <nl> + " inverseBindMatrices " : 4 , / / points to an accessor with two matrices <nl> + " joints " : [ 1 , 2 ] / / the 2nd and 3rd nodes ( which have no geometry ) are the joints <nl> + } ] , <nl> + " animations " : [ { <nl> + " channels " : [ { <nl> + " sampler " : 0 , <nl> + " target " : { " node " : 2 , " path " : " rotation " } / / the animation only applies to the 3rd node <nl> + } ] , <nl> + " samplers " : [ { " input " : 5 , " interpolation " : " LINEAR " , " output " : 6 } ] <nl> + } ] , <nl> + . . . <nl> + <nl> + * / <nl> + <nl> + Filament . init ( [ ' nonlit . filamat ' ] , ( ) = > { <nl> + window . AttributeType = Filament . VertexBuffer $ AttributeType ; <nl> + window . Fov = Filament . Camera $ Fov ; <nl> + window . Projection = Filament . Camera $ Projection ; <nl> + window . VertexAttribute = Filament . VertexAttribute ; <nl> + window . app = new App ( document . getElementsByTagName ( ' canvas ' ) [ 0 ] ) ; <nl> + } ) ; <nl> + <nl> + class App { <nl> + constructor ( canvas ) { <nl> + this . canvas = canvas ; <nl> + const engine = this . engine = Filament . Engine . create ( this . canvas ) ; <nl> + this . scene = engine . createScene ( ) ; <nl> + this . mesh = Filament . EntityManager . get ( ) . create ( ) ; <nl> + this . scene . addEntity ( this . mesh ) ; <nl> + <nl> + this . ib = Filament . IndexBuffer . Builder ( ) <nl> + . indexCount ( 24 ) <nl> + . bufferType ( Filament . IndexBuffer $ IndexType . USHORT ) <nl> + . build ( engine ) ; <nl> + <nl> + this . ib . setBuffer ( engine , bufview0 ) ; <nl> + <nl> + this . vb = Filament . VertexBuffer . Builder ( ) <nl> + . vertexCount ( 10 ) <nl> + . bufferCount ( 3 ) <nl> + . attribute ( VertexAttribute . POSITION , 0 , AttributeType . FLOAT3 , 0 , 12 ) <nl> + . attribute ( VertexAttribute . BONE_INDICES , 1 , AttributeType . USHORT4 , 0 , 16 ) <nl> + . attribute ( VertexAttribute . BONE_WEIGHTS , 2 , AttributeType . FLOAT4 , 0 , 16 ) <nl> + . build ( engine ) ; <nl> + <nl> + this . vb . setBufferAt ( engine , 0 , bufview1 ) ; <nl> + this . vb . setBufferAt ( engine , 1 , bufview2 . subarray ( 0 , 160 ) ) ; <nl> + this . vb . setBufferAt ( engine , 2 , bufview2 . subarray ( 160 , 320 ) ) ; <nl> + <nl> + const mat = engine . createMaterial ( ' nonlit . filamat ' ) ; <nl> + const matinst = mat . getDefaultInstance ( ) ; <nl> + Filament . RenderableManager . Builder ( 1 ) <nl> + . boundingBox ( { center : [ - 1 , - 1 , - 1 ] , halfExtent : [ 1 , 1 , 1 ] } ) <nl> + . material ( 0 , matinst ) <nl> + . geometry ( 0 , Filament . RenderableManager $ PrimitiveType . TRIANGLES , this . vb , this . ib ) <nl> + . skinning ( 2 ) <nl> + . build ( engine , this . mesh ) ; <nl> + <nl> + this . swapChain = engine . createSwapChain ( ) ; <nl> + this . renderer = engine . createRenderer ( ) ; <nl> + <nl> + this . camera = engine . createCamera ( ) ; <nl> + const eye = [ 0 , 0 , 4 ] , center = [ 0 , 0 , 0 ] , up = [ 0 , 1 , 0 ] ; <nl> + this . camera . lookAt ( eye , center , up ) ; <nl> + <nl> + this . view = engine . createView ( ) ; <nl> + this . view . setCamera ( this . camera ) ; <nl> + this . view . setScene ( this . scene ) ; <nl> + this . view . setClearColor ( [ 0 . 0 , 0 . 1 , 0 . 2 , 1 . 0 ] ) ; <nl> + <nl> + this . resize ( ) ; <nl> + this . render = this . render . bind ( this ) ; <nl> + this . resize = this . resize . bind ( this ) ; <nl> + window . addEventListener ( ' resize ' , this . resize ) ; <nl> + window . requestAnimationFrame ( this . render ) ; <nl> + } <nl> + <nl> + render ( ) { <nl> + <nl> + const endTime = 5 . 5 ; <nl> + const timepoints = bufview4 ; <nl> + const quats = bufview5 ; <nl> + const inverseBindMatrices = bufview3 ; <nl> + <nl> + const nframes = timepoints . length ; <nl> + const seconds = ( Date . now ( ) / 1000 ) % endTime ; <nl> + let t = - 1 ; <nl> + let q = [ 0 , 0 , 0 , 1 ] ; <nl> + for ( let i = 0 ; i < nframes ; i + + ) { <nl> + const j = ( i + 1 ) % nframes ; <nl> + const next = ( i = = nframes - 1 ) ? ( endTime + timepoints [ 0 ] ) : timepoints [ i + 1 ] ; <nl> + const curr = timepoints [ i ] ; <nl> + if ( seconds > = curr & & seconds < next ) { <nl> + t = ( seconds - curr ) / ( next - curr ) ; <nl> + const q0 = quats . subarray ( 4 * i , 4 * ( i + 1 ) ) ; <nl> + const q1 = quats . subarray ( 4 * j , 4 * ( j + 1 ) ) ; <nl> + quat . slerp ( q , q0 , q1 , t ) ; <nl> + break ; <nl> + } <nl> + } <nl> + <nl> + const transforms = [ mat4 . create ( ) , mat4 . create ( ) ] ; <nl> + <nl> + mat4 . multiply ( transforms [ 0 ] , transforms [ 0 ] , inverseBindMatrices . subarray ( 0 , 16 ) ) ; <nl> + mat4 . multiply ( transforms [ 1 ] , transforms [ 1 ] , inverseBindMatrices . subarray ( 16 , 32 ) ) ; <nl> + <nl> + const m = mat4 . fromQuat ( mat4 . create ( ) , q ) ; <nl> + mat4 . multiply ( transforms [ 1 ] , m , transforms [ 1 ] ) ; <nl> + <nl> + const rm = this . engine . getRenderableManager ( ) ; <nl> + const renderable = rm . getInstance ( this . mesh ) ; <nl> + rm . setBonesFromMatrices ( renderable , transforms , 0 ) ; <nl> + renderable . delete ( ) ; <nl> + <nl> + this . renderer . render ( this . swapChain , this . view ) ; <nl> + window . requestAnimationFrame ( this . render ) ; <nl> + } <nl> + <nl> + resize ( ) { <nl> + const dpr = window . devicePixelRatio ; <nl> + const width = this . canvas . width = window . innerWidth * dpr ; <nl> + const height = this . canvas . height = window . innerHeight * dpr ; <nl> + this . view . setViewport ( [ 0 , 0 , width , height ] ) ; <nl> + <nl> + const aspect = width / height ; <nl> + const fov = aspect < 1 ? Fov . HORIZONTAL : Fov . VERTICAL ; <nl> + this . camera . setProjectionFov ( 45 , aspect , 1 . 0 , 10 . 0 , fov ) ; <nl> + } <nl> + } <nl> + < / script > <nl> + < / body > <nl> + < / html > <nl> Binary files a / docs / webgl / suzanne . filamesh and b / docs / webgl / suzanne . filamesh differ <nl> mmm a / docs / webgl / suzanne . html <nl> ppp b / docs / webgl / suzanne . html <nl> <nl> < script src = " gl - matrix - min . js " > < / script > <nl> < script > <nl> <nl> - const ibl_suffix = Filament . getSupportedFormatSuffix ( ' etc s3tc ' ) ; <nl> const albedo_suffix = Filament . getSupportedFormatSuffix ( ' astc s3tc ' ) ; <nl> const texture_suffix = Filament . getSupportedFormatSuffix ( ' etc ' ) ; <nl> <nl> const env = ' syferfontein_18d_clear_2k ' <nl> - const ibl_url = ` $ { env } / $ { env } _ibl $ { ibl_suffix } . ktx ` ; <nl> + const ibl_url = ` $ { env } / $ { env } _ibl . ktx ` ; <nl> const sky_url = ` $ { env } / $ { env } _skybox . ktx ` ; <nl> const albedo_url = ` albedo $ { albedo_suffix } . ktx ` ; <nl> const ao_url = ` ao $ { texture_suffix } . ktx ` ; <nl> Binary files a / docs / webgl / syferfontein_18d_clear_2k / syferfontein_18d_clear_2k_ibl . ktx and b / docs / webgl / syferfontein_18d_clear_2k / syferfontein_18d_clear_2k_ibl . ktx differ <nl> Binary files a / docs / webgl / syferfontein_18d_clear_2k / syferfontein_18d_clear_2k_ibl_etc . ktx and b / docs / webgl / syferfontein_18d_clear_2k / syferfontein_18d_clear_2k_ibl_etc . ktx differ <nl> Binary files a / docs / webgl / syferfontein_18d_clear_2k / syferfontein_18d_clear_2k_ibl_s3tc . ktx and b / docs / webgl / syferfontein_18d_clear_2k / syferfontein_18d_clear_2k_ibl_s3tc . ktx differ <nl> Binary files a / docs / webgl / syferfontein_18d_clear_2k / syferfontein_18d_clear_2k_skybox . ktx and b / docs / webgl / syferfontein_18d_clear_2k / syferfontein_18d_clear_2k_skybox . ktx differ <nl> Binary files a / docs / webgl / syferfontein_18d_clear_2k / syferfontein_18d_clear_2k_skybox_tiny . ktx and b / docs / webgl / syferfontein_18d_clear_2k / syferfontein_18d_clear_2k_skybox_tiny . ktx differ <nl> Binary files a / docs / webgl / textured . filamat and b / docs / webgl / textured . filamat differ <nl> Binary files a / docs / webgl / triangle . filamat and b / docs / webgl / triangle . filamat differ <nl> mmm a / docs / webgl / tutorial_redball . html <nl> ppp b / docs / webgl / tutorial_redball . html <nl> < h2 > Add lighting < / h2 > <nl> parameters because Filament will automatically draw a disk into the skybox . < / p > <nl> < p > Next we need to create an < code > IndirectLight < / code > object from the KTX IBL . One way of doing this is the <nl> following ( don ' t type this out , there ' s an easier way ) . < / p > <nl> - < div class = " highlight " style = " background : # f8f8f8 " > < pre style = " line - height : 125 % " > < span > < / span > < span style = " color : # 008000 ; font - weight : bold " > const < / span > format < span style = " color : # 666666 " > = < / span > Filament . PixelDataFormat . RGBM ; <nl> - < span style = " color : # 008000 ; font - weight : bold " > const < / span > datatype < span style = " color : # 666666 " > = < / span > Filament . PixelDataType . UBYTE ; <nl> + < div class = " highlight " style = " background : # f8f8f8 " > < pre style = " line - height : 125 % " > < span > < / span > < span style = " color : # 008000 ; font - weight : bold " > const < / span > format < span style = " color : # 666666 " > = < / span > Filament . PixelDataFormat . RGB ; <nl> + < span style = " color : # 008000 ; font - weight : bold " > const < / span > datatype < span style = " color : # 666666 " > = < / span > Filament . PixelDataType . UINT_10F_11F_11F_REV ; <nl> <nl> < span style = " color : # 408080 ; font - style : italic " > / / Create a Texture object for the mipmapped cubemap . < / span > <nl> < span style = " color : # 008000 ; font - weight : bold " > const < / span > ibl_package < span style = " color : # 666666 " > = < / span > Filament . Buffer ( Filament . assets [ ibl_url ] ) ; <nl> < h2 > Add lighting < / h2 > <nl> . levels ( iblktx . getNumMipLevels ( ) ) <nl> . sampler ( Filament . Texture $ Sampler . SAMPLER_CUBEMAP ) <nl> . format ( Filament . Texture $ InternalFormat . RGBA8 ) <nl> - . rgbm ( < span style = " color : # 008000 ; font - weight : bold " > true < / span > ) <nl> . build ( engine ) ; <nl> <nl> < span style = " color : # 008000 ; font - weight : bold " > for < / span > ( < span style = " color : # 008000 ; font - weight : bold " > let < / span > level < span style = " color : # 666666 " > = < / span > < span style = " color : # 666666 " > 0 < / span > ; level < span style = " color : # 666666 " > & lt ; < / span > iblktx . getNumMipLevels ( ) ; < span style = " color : # 666666 " > + + < / span > level ) { <nl> < h2 > Add background < / h2 > <nl> . levels ( < span style = " color : # 666666 " > 1 < / span > ) <nl> . sampler ( Filament . Texture $ Sampler . SAMPLER_CUBEMAP ) <nl> . format ( Filament . Texture $ InternalFormat . RGBA8 ) <nl> - . rgbm ( < span style = " color : # 008000 ; font - weight : bold " > true < / span > ) <nl> . build ( engine ) ; <nl> <nl> < span style = " color : # 008000 ; font - weight : bold " > const < / span > uint8array < span style = " color : # 666666 " > = < / span > skyktx . getCubeBlob ( < span style = " color : # 666666 " > 0 < / span > ) . getBytes ( ) ; <nl> mmm a / docs / webgl / tutorial_redball . js <nl> ppp b / docs / webgl / tutorial_redball . js <nl> class App { <nl> const material = engine . createMaterial ( filamat_url ) ; <nl> const matinstance = material . createInstance ( ) ; <nl> const red = [ 0 . 8 , 0 . 0 , 0 . 0 ] ; <nl> - matinstance . setColorParameter ( ' baseColor ' , Filament . RgbType . sRGB , red ) ; <nl> + matinstance . setColor3Parameter ( ' baseColor ' , Filament . RgbType . sRGB , red ) ; <nl> matinstance . setFloatParameter ( ' roughness ' , 0 . 5 ) ; <nl> matinstance . setFloatParameter ( ' clearCoat ' , 1 . 0 ) ; <nl> matinstance . setFloatParameter ( ' clearCoatRoughness ' , 0 . 3 ) ; <nl> mmm a / web / filament - js / utilities . js <nl> ppp b / web / filament - js / utilities . js <nl> Filament . _createIblFromKtx = function ( ktxdata , engine , options ) { <nl> <nl> const format = iblktx . info ( ) . glInternalFormat ; <nl> if ( format ! = this . ctx . R11F_G11F_B10F & & format ! = this . ctx . RGB16F & & format ! = this . ctx . RGB32F ) { <nl> - console . warning ( ' IBL texture format is 0x ' + internalFormat . toString ( 16 ) + <nl> + console . warn ( ' IBL texture format is 0x ' + format . toString ( 16 ) + <nl> ' which is not an expected floating - point format . Please use cmgen to generate IBL . ' ) ; <nl> } <nl> <nl> | Update WebGL demos and docs on site . | google/filament | f139589423cc37f08846ecda270b6347317a5c46 | 2019-07-11T19:04:43Z |
mmm a / cocos / audio / android / jni / cddandroidAndroidJavaEngine . cpp <nl> ppp b / cocos / audio / android / jni / cddandroidAndroidJavaEngine . cpp <nl> THE SOFTWARE . <nl> # include " cddandroidAndroidJavaEngine . h " <nl> # include < stdlib . h > <nl> # include < android / log . h > <nl> - # include < jni . h > <nl> # include < sys / system_properties . h > <nl> - # include " platform / android / jni / JniHelper . h " <nl> # include " ccdandroidUtils . h " <nl> # include " audio / include / AudioEngine . h " <nl> + # include " platform / android / jni / JniHelper . h " <nl> <nl> / / logging <nl> # define LOG_TAG " cocosdenshion : : android : : AndroidJavaEngine " <nl> # define LOGD ( . . . ) __android_log_print ( ANDROID_LOG_DEBUG , LOG_TAG , __VA_ARGS__ ) <nl> <nl> / / Java class <nl> - # define CLASS_NAME " org / cocos2dx / lib / Cocos2dxHelper " <nl> + static const std : : string helperClassName = " org / cocos2dx / lib / Cocos2dxHelper " ; <nl> <nl> + using namespace cocos2d ; <nl> using namespace cocos2d : : experimental ; <nl> using namespace CocosDenshion : : android ; <nl> <nl> - static inline bool getJNIStaticMethodInfo ( cocos2d : : JniMethodInfo & methodinfo , <nl> - const char * methodName , <nl> - const char * paramCode ) { <nl> - return cocos2d : : JniHelper : : getStaticMethodInfo ( methodinfo , <nl> - CLASS_NAME , <nl> - methodName , <nl> - paramCode ) ; <nl> - } <nl> - <nl> AndroidJavaEngine : : AndroidJavaEngine ( ) <nl> : _implementBaseOnAudioEngine ( false ) <nl> , _effectVolume ( 1 . f ) <nl> AndroidJavaEngine : : ~ AndroidJavaEngine ( ) <nl> { <nl> stopAllEffects ( ) ; <nl> } <nl> - cocos2d : : JniMethodInfo methodInfo ; <nl> <nl> - if ( ! getJNIStaticMethodInfo ( methodInfo , " end " , " ( ) V " ) ) { <nl> - return ; <nl> - } <nl> - <nl> - methodInfo . env - > CallStaticVoidMethod ( methodInfo . classID , methodInfo . methodID ) ; <nl> - methodInfo . env - > DeleteLocalRef ( methodInfo . classID ) ; <nl> + JniHelper : : callStaticVoidMethod ( helperClassName , " end " ) ; <nl> } <nl> <nl> void AndroidJavaEngine : : preloadBackgroundMusic ( const char * filePath ) { <nl> std : : string fullPath = CocosDenshion : : android : : getFullPathWithoutAssetsPrefix ( filePath ) ; <nl> - <nl> - / / void playBackgroundMusic ( String , boolean ) <nl> - cocos2d : : JniMethodInfo methodInfo ; <nl> - <nl> - if ( ! getJNIStaticMethodInfo ( methodInfo , " preloadBackgroundMusic " , " ( Ljava / lang / String ; ) V " ) ) { <nl> - return ; <nl> - } <nl> - <nl> - jstring stringArg = methodInfo . env - > NewStringUTF ( fullPath . c_str ( ) ) ; <nl> - methodInfo . env - > CallStaticVoidMethod ( methodInfo . classID , methodInfo . methodID , stringArg ) ; <nl> - methodInfo . env - > DeleteLocalRef ( stringArg ) ; <nl> - methodInfo . env - > DeleteLocalRef ( methodInfo . classID ) ; <nl> + JniHelper : : callStaticVoidMethod ( helperClassName , " preloadBackgroundMusic " , filePath ) ; <nl> } <nl> <nl> void AndroidJavaEngine : : playBackgroundMusic ( const char * filePath , bool loop ) { <nl> std : : string fullPath = CocosDenshion : : android : : getFullPathWithoutAssetsPrefix ( filePath ) ; <nl> - <nl> - cocos2d : : JniMethodInfo methodInfo ; <nl> - <nl> - if ( ! getJNIStaticMethodInfo ( methodInfo , " playBackgroundMusic " , " ( Ljava / lang / String ; Z ) V " ) ) { <nl> - return ; <nl> - } <nl> - <nl> - jstring stringArg = methodInfo . env - > NewStringUTF ( fullPath . c_str ( ) ) ; <nl> - methodInfo . env - > CallStaticVoidMethod ( methodInfo . classID , methodInfo . methodID , stringArg , loop ) ; <nl> - methodInfo . env - > DeleteLocalRef ( stringArg ) ; <nl> - methodInfo . env - > DeleteLocalRef ( methodInfo . classID ) ; <nl> + JniHelper : : callStaticVoidMethod ( helperClassName , " playBackgroundMusic " , filePath , loop ) ; <nl> } <nl> <nl> void AndroidJavaEngine : : stopBackgroundMusic ( bool releaseData ) { <nl> - cocos2d : : JniMethodInfo methodInfo ; <nl> - <nl> - if ( ! getJNIStaticMethodInfo ( methodInfo , " stopBackgroundMusic " , " ( ) V " ) ) { <nl> - return ; <nl> - } <nl> - <nl> - methodInfo . env - > CallStaticVoidMethod ( methodInfo . classID , methodInfo . methodID ) ; <nl> - methodInfo . env - > DeleteLocalRef ( methodInfo . classID ) ; <nl> + JniHelper : : callStaticVoidMethod ( helperClassName , " stopBackgroundMusic " ) ; <nl> } <nl> <nl> void AndroidJavaEngine : : pauseBackgroundMusic ( ) { <nl> - cocos2d : : JniMethodInfo methodInfo ; <nl> - <nl> - if ( ! getJNIStaticMethodInfo ( methodInfo , " pauseBackgroundMusic " , " ( ) V " ) ) { <nl> - return ; <nl> - } <nl> + JniHelper : : callStaticVoidMethod ( helperClassName , " pauseBackgroundMusic " ) ; <nl> <nl> - methodInfo . env - > CallStaticVoidMethod ( methodInfo . classID , methodInfo . methodID ) ; <nl> - methodInfo . env - > DeleteLocalRef ( methodInfo . classID ) ; <nl> } <nl> <nl> void AndroidJavaEngine : : resumeBackgroundMusic ( ) { <nl> - cocos2d : : JniMethodInfo methodInfo ; <nl> - <nl> - if ( ! getJNIStaticMethodInfo ( methodInfo , " resumeBackgroundMusic " , " ( ) V " ) ) { <nl> - return ; <nl> - } <nl> - <nl> - methodInfo . env - > CallStaticVoidMethod ( methodInfo . classID , methodInfo . methodID ) ; <nl> - methodInfo . env - > DeleteLocalRef ( methodInfo . classID ) ; <nl> + JniHelper : : callStaticVoidMethod ( helperClassName , " resumeBackgroundMusic " ) ; <nl> } <nl> <nl> void AndroidJavaEngine : : rewindBackgroundMusic ( ) { <nl> - cocos2d : : JniMethodInfo methodInfo ; <nl> - <nl> - if ( ! getJNIStaticMethodInfo ( methodInfo , " rewindBackgroundMusic " , " ( ) V " ) ) { <nl> - return ; <nl> - } <nl> - <nl> - methodInfo . env - > CallStaticVoidMethod ( methodInfo . classID , methodInfo . methodID ) ; <nl> - methodInfo . env - > DeleteLocalRef ( methodInfo . classID ) ; <nl> + JniHelper : : callStaticVoidMethod ( helperClassName , " rewindBackgroundMusic " ) ; <nl> } <nl> <nl> bool AndroidJavaEngine : : willPlayBackgroundMusic ( ) { <nl> bool AndroidJavaEngine : : willPlayBackgroundMusic ( ) { <nl> } <nl> <nl> bool AndroidJavaEngine : : isBackgroundMusicPlaying ( ) { <nl> - cocos2d : : JniMethodInfo methodInfo ; <nl> - jboolean ret = false ; <nl> - <nl> - if ( ! getJNIStaticMethodInfo ( methodInfo , " isBackgroundMusicPlaying " , " ( ) Z " ) ) { <nl> - return ret ; <nl> - } <nl> - <nl> - ret = methodInfo . env - > CallStaticBooleanMethod ( methodInfo . classID , methodInfo . methodID ) ; <nl> - methodInfo . env - > DeleteLocalRef ( methodInfo . classID ) ; <nl> - <nl> - return ret ; <nl> + return JniHelper : : callStaticBooleanMethod ( helperClassName , " isBackgroundMusicPlaying " ) ; <nl> } <nl> <nl> float AndroidJavaEngine : : getBackgroundMusicVolume ( ) { <nl> - cocos2d : : JniMethodInfo methodInfo ; <nl> - jfloat ret = - 1 . 0 ; <nl> - <nl> - if ( ! getJNIStaticMethodInfo ( methodInfo , " getBackgroundMusicVolume " , " ( ) F " ) ) { <nl> - return ret ; <nl> - } <nl> - <nl> - ret = methodInfo . env - > CallStaticFloatMethod ( methodInfo . classID , methodInfo . methodID ) ; <nl> - methodInfo . env - > DeleteLocalRef ( methodInfo . classID ) ; <nl> - <nl> - return ret ; <nl> + return JniHelper : : callStaticFloatMethod ( helperClassName , " getBackgroundMusicVolume " ) ; <nl> } <nl> <nl> void AndroidJavaEngine : : setBackgroundMusicVolume ( float volume ) { <nl> - cocos2d : : JniMethodInfo methodInfo ; <nl> - <nl> - if ( ! getJNIStaticMethodInfo ( methodInfo , " setBackgroundMusicVolume " , " ( F ) V " ) ) { <nl> - return ; <nl> - } <nl> - <nl> - methodInfo . env - > CallStaticVoidMethod ( methodInfo . classID , methodInfo . methodID , volume ) ; <nl> - methodInfo . env - > DeleteLocalRef ( methodInfo . classID ) ; <nl> - } <nl> - <nl> - <nl> - static float _jni_getEffectsVolume ( ) { <nl> - cocos2d : : JniMethodInfo methodInfo ; <nl> - jfloat ret = - 1 . 0 ; <nl> - <nl> - if ( ! getJNIStaticMethodInfo ( methodInfo , " getEffectsVolume " , " ( ) F " ) ) { <nl> - return ret ; <nl> - } <nl> - <nl> - ret = methodInfo . env - > CallStaticFloatMethod ( methodInfo . classID , methodInfo . methodID ) ; <nl> - methodInfo . env - > DeleteLocalRef ( methodInfo . classID ) ; <nl> - <nl> - return ret ; <nl> + JniHelper : : callStaticVoidMethod ( helperClassName , " setBackgroundMusicVolume " , volume ) ; <nl> } <nl> <nl> - static void _jni_setEffectsVolume ( float volume ) { <nl> - cocos2d : : JniMethodInfo methodInfo ; <nl> - <nl> - if ( ! getJNIStaticMethodInfo ( methodInfo , " setEffectsVolume " , " ( F ) V " ) ) { <nl> - return ; <nl> - } <nl> - <nl> - methodInfo . env - > CallStaticVoidMethod ( methodInfo . classID , methodInfo . methodID , volume ) ; <nl> - methodInfo . env - > DeleteLocalRef ( methodInfo . classID ) ; <nl> - } <nl> - <nl> - static unsigned int _jni_playEffect ( const char * filePath , bool loop , float pitch , float pan , float gain ) <nl> - { <nl> - cocos2d : : JniMethodInfo methodInfo ; <nl> - int ret = 0 ; <nl> - std : : string fullPath = CocosDenshion : : android : : getFullPathWithoutAssetsPrefix ( filePath ) ; <nl> - <nl> - if ( ! getJNIStaticMethodInfo ( methodInfo , " playEffect " , " ( Ljava / lang / String ; ZFFF ) I " ) ) { <nl> - return ret ; <nl> - } <nl> - <nl> - jstring stringArg = methodInfo . env - > NewStringUTF ( fullPath . c_str ( ) ) ; <nl> - ret = methodInfo . env - > CallStaticIntMethod ( methodInfo . classID , <nl> - methodInfo . methodID , <nl> - stringArg , <nl> - loop , <nl> - pitch , pan , gain ) ; <nl> - methodInfo . env - > DeleteLocalRef ( stringArg ) ; <nl> - methodInfo . env - > DeleteLocalRef ( methodInfo . classID ) ; <nl> - <nl> - return ( unsigned int ) ret ; <nl> - } <nl> - <nl> - static void _jni_pauseEffect ( unsigned int soundId ) { <nl> - cocos2d : : JniMethodInfo methodInfo ; <nl> - <nl> - if ( ! getJNIStaticMethodInfo ( methodInfo , " pauseEffect " , " ( I ) V " ) ) { <nl> - return ; <nl> - } <nl> - <nl> - methodInfo . env - > CallStaticVoidMethod ( methodInfo . classID , methodInfo . methodID , ( int ) soundId ) ; <nl> - methodInfo . env - > DeleteLocalRef ( methodInfo . classID ) ; <nl> - } <nl> - <nl> - static void _jni_pauseAllEffects ( ) { <nl> - cocos2d : : JniMethodInfo methodInfo ; <nl> - <nl> - if ( ! getJNIStaticMethodInfo ( methodInfo , " pauseAllEffects " , " ( ) V " ) ) { <nl> - return ; <nl> - } <nl> - <nl> - methodInfo . env - > CallStaticVoidMethod ( methodInfo . classID , methodInfo . methodID ) ; <nl> - methodInfo . env - > DeleteLocalRef ( methodInfo . classID ) ; <nl> - } <nl> - <nl> - static void _jni_resumeEffect ( unsigned int soundId ) { <nl> - cocos2d : : JniMethodInfo methodInfo ; <nl> - <nl> - if ( ! getJNIStaticMethodInfo ( methodInfo , " resumeEffect " , " ( I ) V " ) ) { <nl> - return ; <nl> - } <nl> - <nl> - methodInfo . env - > CallStaticVoidMethod ( methodInfo . classID , methodInfo . methodID , ( int ) soundId ) ; <nl> - methodInfo . env - > DeleteLocalRef ( methodInfo . classID ) ; <nl> - } <nl> - <nl> - static void _jni_resumeAllEffects ( ) { <nl> - cocos2d : : JniMethodInfo methodInfo ; <nl> - <nl> - if ( ! getJNIStaticMethodInfo ( methodInfo , " resumeAllEffects " , " ( ) V " ) ) { <nl> - return ; <nl> - } <nl> - <nl> - methodInfo . env - > CallStaticVoidMethod ( methodInfo . classID , methodInfo . methodID ) ; <nl> - methodInfo . env - > DeleteLocalRef ( methodInfo . classID ) ; <nl> - } <nl> - <nl> - static void _jni_stopEffect ( unsigned int soundId ) { <nl> - cocos2d : : JniMethodInfo methodInfo ; <nl> - <nl> - if ( ! getJNIStaticMethodInfo ( methodInfo , " stopEffect " , " ( I ) V " ) ) { <nl> - return ; <nl> - } <nl> - <nl> - methodInfo . env - > CallStaticVoidMethod ( methodInfo . classID , methodInfo . methodID , ( int ) soundId ) ; <nl> - methodInfo . env - > DeleteLocalRef ( methodInfo . classID ) ; <nl> - } <nl> - <nl> - static void _jni_stopAllEffects ( ) { <nl> - cocos2d : : JniMethodInfo methodInfo ; <nl> - <nl> - if ( ! getJNIStaticMethodInfo ( methodInfo , " stopAllEffects " , " ( ) V " ) ) { <nl> - return ; <nl> - } <nl> - <nl> - methodInfo . env - > CallStaticVoidMethod ( methodInfo . classID , methodInfo . methodID ) ; <nl> - methodInfo . env - > DeleteLocalRef ( methodInfo . classID ) ; <nl> - } <nl> - <nl> - static void loadEffect ( const char * filePath , char * loadEffectName ) { <nl> - cocos2d : : JniMethodInfo methodInfo ; <nl> - std : : string fullPath = CocosDenshion : : android : : getFullPathWithoutAssetsPrefix ( filePath ) ; <nl> - <nl> - if ( ! cocos2d : : JniHelper : : getStaticMethodInfo ( methodInfo , CLASS_NAME , loadEffectName , " ( Ljava / lang / String ; ) V " ) ) { <nl> - return ; <nl> - } <nl> - <nl> - jstring stringArg = methodInfo . env - > NewStringUTF ( fullPath . c_str ( ) ) ; <nl> - methodInfo . env - > CallStaticVoidMethod ( methodInfo . classID , methodInfo . methodID , stringArg ) ; <nl> - methodInfo . env - > DeleteLocalRef ( stringArg ) ; <nl> - methodInfo . env - > DeleteLocalRef ( methodInfo . classID ) ; <nl> - } <nl> - <nl> - static void _jni_preloadEffect ( const char * filePath ) { <nl> - loadEffect ( filePath , " preloadEffect " ) ; <nl> - } <nl> - <nl> - static void _jni_unloadEffect ( const char * filePath ) { <nl> - loadEffect ( filePath , " unloadEffect " ) ; <nl> - } <nl> - <nl> - <nl> float AndroidJavaEngine : : getEffectsVolume ( ) <nl> { <nl> if ( _implementBaseOnAudioEngine ) <nl> float AndroidJavaEngine : : getEffectsVolume ( ) <nl> } <nl> else <nl> { <nl> - return _jni_getEffectsVolume ( ) ; <nl> + return JniHelper : : callStaticFloatMethod ( helperClassName , " getEffectsVolume " ) ; <nl> } <nl> } <nl> <nl> void AndroidJavaEngine : : setEffectsVolume ( float volume ) <nl> } <nl> else <nl> { <nl> - _jni_setEffectsVolume ( volume ) ; <nl> + JniHelper : : callStaticVoidMethod ( helperClassName , " setEffectsVolume " , volume ) ; <nl> } <nl> } <nl> <nl> unsigned int AndroidJavaEngine : : playEffect ( const char * filePath , bool loop , <nl> } <nl> else <nl> { <nl> - return _jni_playEffect ( filePath , loop , pitch , pan , gain ) ; <nl> + std : : string fullPath = CocosDenshion : : android : : getFullPathWithoutAssetsPrefix ( filePath ) ; <nl> + int ret = JniHelper : : callStaticIntMethod ( helperClassName , " playEffect " , fullPath , loop , pitch , pan , gain ) ; <nl> + return ( unsigned int ) ret ; <nl> } <nl> } <nl> <nl> void AndroidJavaEngine : : pauseEffect ( unsigned int soundID ) <nl> } <nl> else <nl> { <nl> - _jni_pauseEffect ( soundID ) ; <nl> + JniHelper : : callStaticVoidMethod ( helperClassName , " pauseEffect " , ( int ) soundID ) ; <nl> } <nl> } <nl> <nl> void AndroidJavaEngine : : resumeEffect ( unsigned int soundID ) <nl> } <nl> else <nl> { <nl> - _jni_resumeEffect ( soundID ) ; <nl> + JniHelper : : callStaticVoidMethod ( helperClassName , " resumeEffect " , ( int ) soundID ) ; <nl> } <nl> } <nl> <nl> void AndroidJavaEngine : : stopEffect ( unsigned int soundID ) <nl> } <nl> else <nl> { <nl> - _jni_stopEffect ( soundID ) ; <nl> + JniHelper : : callStaticVoidMethod ( helperClassName , " stopEffect " , ( int ) soundID ) ; <nl> } <nl> } <nl> <nl> void AndroidJavaEngine : : pauseAllEffects ( ) <nl> } <nl> else <nl> { <nl> - _jni_pauseAllEffects ( ) ; <nl> + JniHelper : : callStaticVoidMethod ( helperClassName , " pauseAllEffects " ) ; <nl> } <nl> } <nl> <nl> void AndroidJavaEngine : : resumeAllEffects ( ) <nl> } <nl> else <nl> { <nl> - _jni_resumeAllEffects ( ) ; <nl> + JniHelper : : callStaticVoidMethod ( helperClassName , " resumeAllEffects " ) ; <nl> } <nl> } <nl> <nl> void AndroidJavaEngine : : stopAllEffects ( ) <nl> } <nl> else <nl> { <nl> - _jni_stopAllEffects ( ) ; <nl> + JniHelper : : callStaticVoidMethod ( helperClassName , " stopAllEffects " ) ; <nl> } <nl> } <nl> <nl> void AndroidJavaEngine : : preloadEffect ( const char * filePath ) <nl> { <nl> if ( ! _implementBaseOnAudioEngine ) <nl> { <nl> - _jni_preloadEffect ( filePath ) ; <nl> + std : : string fullPath = CocosDenshion : : android : : getFullPathWithoutAssetsPrefix ( filePath ) ; <nl> + JniHelper : : callStaticVoidMethod ( helperClassName , " preloadEffect " , fullPath ) ; <nl> } <nl> } <nl> <nl> void AndroidJavaEngine : : unloadEffect ( const char * filePath ) <nl> { <nl> if ( ! _implementBaseOnAudioEngine ) <nl> { <nl> - _jni_unloadEffect ( filePath ) ; <nl> + std : : string fullPath = CocosDenshion : : android : : getFullPathWithoutAssetsPrefix ( filePath ) ; <nl> + JniHelper : : callStaticVoidMethod ( helperClassName , " unloadEffect " , fullPath ) ; <nl> } <nl> } <nl> mmm a / cocos / base / CCController - android . cpp <nl> ppp b / cocos / base / CCController - android . cpp <nl> Controller : : Controller ( ) <nl> init ( ) ; <nl> } <nl> <nl> - void Controller : : receiveExternalKeyEvent ( int externalKeyCode , bool receive ) <nl> - { <nl> - JniMethodInfo t ; <nl> - if ( JniHelper : : getStaticMethodInfo ( t , " org / cocos2dx / lib / GameControllerHelper " , " receiveExternalKeyEvent " , " ( IIZ ) V " ) ) { <nl> - <nl> - t . env - > CallStaticVoidMethod ( t . classID , t . methodID , _deviceId , externalKeyCode , receive ) ; <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - } <nl> + void Controller : : receiveExternalKeyEvent ( int externalKeyCode , bool receive ) { <nl> + JniHelper : : callStaticVoidMethod ( " org / cocos2dx / lib / GameControllerHelper " , " receiveExternalKeyEvent " , _deviceId , externalKeyCode , receive ) ; <nl> } <nl> <nl> NS_CC_END <nl> mmm a / cocos / base / CCUserDefault - android . cpp <nl> ppp b / cocos / base / CCUserDefault - android . cpp <nl> THE SOFTWARE . <nl> # include " base / base64 . h " <nl> <nl> # if ( CC_TARGET_PLATFORM = = CC_PLATFORM_ANDROID ) <nl> - # include " platform / android / jni / Java_org_cocos2dx_lib_Cocos2dxHelper . h " <nl> + # include " platform / android / jni / JniHelper . h " <nl> <nl> / / root name of xml <nl> # define USERDEFAULT_ROOT_NAME " userDefaultRoot " <nl> THE SOFTWARE . <nl> # include " tinyxml2 . h " <nl> # endif <nl> <nl> - using namespace std ; <nl> + static const std : : string helperClassName = " org / cocos2dx / lib / Cocos2dxHelper " ; <nl> <nl> + using namespace std ; <nl> NS_CC_BEGIN <nl> <nl> / * * <nl> bool UserDefault : : getBoolForKey ( const char * pKey , bool defaultValue ) <nl> } <nl> # endif <nl> <nl> - return getBoolForKeyJNI ( pKey , defaultValue ) ; <nl> + return JniHelper : : callStaticBooleanMethod ( helperClassName , " getBoolForKey " , pKey , defaultValue ) ; <nl> } <nl> <nl> int UserDefault : : getIntegerForKey ( const char * pKey ) <nl> int UserDefault : : getIntegerForKey ( const char * pKey , int defaultValue ) <nl> } <nl> # endif <nl> <nl> - return getIntegerForKeyJNI ( pKey , defaultValue ) ; <nl> + return JniHelper : : callStaticIntMethod ( helperClassName , " getIntegerForKey " , pKey , defaultValue ) ; <nl> } <nl> <nl> float UserDefault : : getFloatForKey ( const char * pKey ) <nl> float UserDefault : : getFloatForKey ( const char * pKey , float defaultValue ) <nl> } <nl> # endif <nl> <nl> - return getFloatForKeyJNI ( pKey , defaultValue ) ; <nl> + return JniHelper : : callStaticFloatMethod ( helperClassName , " getFloatForKey " , pKey , defaultValue ) ; <nl> } <nl> <nl> double UserDefault : : getDoubleForKey ( const char * pKey ) <nl> double UserDefault : : getDoubleForKey ( const char * pKey , double defaultValue ) <nl> } <nl> # endif <nl> <nl> - return getDoubleForKeyJNI ( pKey , defaultValue ) ; <nl> + return JniHelper : : callStaticDoubleMethod ( helperClassName , " getDoubleForKey " , pKey , defaultValue ) ; <nl> } <nl> <nl> std : : string UserDefault : : getStringForKey ( const char * pKey ) <nl> string UserDefault : : getStringForKey ( const char * pKey , const std : : string & defaul <nl> } <nl> # endif <nl> <nl> - return getStringForKeyJNI ( pKey , defaultValue . c_str ( ) ) ; <nl> + return JniHelper : : callStaticStringMethod ( helperClassName , " getStringForKey " , pKey , defaultValue ) ; <nl> } <nl> <nl> Data UserDefault : : getDataForKey ( const char * pKey ) <nl> Data UserDefault : : getDataForKey ( const char * pKey , const Data & defaultValue ) <nl> char * encodedDefaultData = NULL ; <nl> unsigned int encodedDefaultDataLen = ! defaultValue . isNull ( ) ? base64Encode ( defaultValue . getBytes ( ) , defaultValue . getSize ( ) , & encodedDefaultData ) : 0 ; <nl> <nl> - string encodedStr = getStringForKeyJNI ( pKey , encodedDefaultData ) ; <nl> + string encodedStr = JniHelper : : callStaticStringMethod ( helperClassName , " getStringForKey " , pKey , ( const char * ) encodedDefaultData ) ; <nl> <nl> if ( encodedDefaultData ) <nl> free ( encodedDefaultData ) ; <nl> void UserDefault : : setBoolForKey ( const char * pKey , bool value ) <nl> deleteNodeByKey ( pKey ) ; <nl> # endif <nl> <nl> - return setBoolForKeyJNI ( pKey , value ) ; <nl> + JniHelper : : callStaticVoidMethod ( helperClassName , " setBoolForKey " , pKey , value ) ; <nl> } <nl> <nl> void UserDefault : : setIntegerForKey ( const char * pKey , int value ) <nl> void UserDefault : : setIntegerForKey ( const char * pKey , int value ) <nl> deleteNodeByKey ( pKey ) ; <nl> # endif <nl> <nl> - return setIntegerForKeyJNI ( pKey , value ) ; <nl> + JniHelper : : callStaticVoidMethod ( helperClassName , " setIntegerForKey " , pKey , value ) ; <nl> } <nl> <nl> void UserDefault : : setFloatForKey ( const char * pKey , float value ) <nl> void UserDefault : : setFloatForKey ( const char * pKey , float value ) <nl> deleteNodeByKey ( pKey ) ; <nl> # endif <nl> <nl> - return setFloatForKeyJNI ( pKey , value ) ; <nl> + JniHelper : : callStaticVoidMethod ( helperClassName , " setFloatForKey " , pKey , value ) ; <nl> } <nl> <nl> void UserDefault : : setDoubleForKey ( const char * pKey , double value ) <nl> void UserDefault : : setDoubleForKey ( const char * pKey , double value ) <nl> deleteNodeByKey ( pKey ) ; <nl> # endif <nl> <nl> - return setDoubleForKeyJNI ( pKey , value ) ; <nl> + JniHelper : : callStaticVoidMethod ( helperClassName , " setDoubleForKey " , pKey , value ) ; <nl> } <nl> <nl> - void UserDefault : : setStringForKey ( const char * pKey , const std : : string & value ) <nl> + void UserDefault : : setStringForKey ( const char * pKey , const std : : string & value ) <nl> { <nl> # ifdef KEEP_COMPATABILITY <nl> deleteNodeByKey ( pKey ) ; <nl> # endif <nl> <nl> - return setStringForKeyJNI ( pKey , value . c_str ( ) ) ; <nl> + JniHelper : : callStaticVoidMethod ( helperClassName , " setStringForKey " , pKey , value ) ; <nl> } <nl> <nl> void UserDefault : : setDataForKey ( const char * pKey , const Data & value ) <nl> void UserDefault : : setDataForKey ( const char * pKey , const Data & value ) <nl> <nl> CCLOG ( " SET DATA ENCODED : - - % s " , encodedData ) ; <nl> <nl> - setStringForKeyJNI ( pKey , encodedData ) ; <nl> + JniHelper : : callStaticVoidMethod ( helperClassName , " setStringForKey " , pKey , ( const char * ) encodedData ) ; <nl> <nl> if ( encodedData ) <nl> free ( encodedData ) ; <nl> void UserDefault : : initXMLFilePath ( ) <nl> if ( ! _isFilePathInitialized ) <nl> { <nl> / / UserDefault . xml is stored in / data / data / < package - path > / before v2 . 1 . 2 <nl> - _filePath + = " / data / data / " + getPackageNameJNI ( ) + " / " + XML_FILE_NAME ; <nl> + std : : string packageName = JniHelper : : callStaticStringMethod ( helperClassName , " getCocos2dxPackageName " ) ; <nl> + _filePath + = " / data / data / " + packageName + " / " + XML_FILE_NAME ; <nl> _isFilePathInitialized = true ; <nl> } <nl> # endif <nl> void UserDefault : : deleteValueForKey ( const char * key ) <nl> CCLOG ( " the key is invalid " ) ; <nl> } <nl> <nl> - deleteValueForKeyJNI ( key ) ; <nl> + JniHelper : : callStaticVoidMethod ( helperClassName , " deleteValueForKey " , key ) ; <nl> <nl> flush ( ) ; <nl> } <nl> mmm a / cocos / platform / CMakeLists . txt <nl> ppp b / cocos / platform / CMakeLists . txt <nl> set ( COCOS_PLATFORM_SPECIFIC_SRC <nl> platform / android / CCGLViewImpl - android . cpp <nl> platform / android / CCFileUtils - android . cpp <nl> platform / android / javaactivity - android . cpp <nl> - platform / android / jni / DPIJni . cpp <nl> - platform / android / jni / IMEJni . cpp <nl> platform / android / jni / Java_org_cocos2dx_lib_Cocos2dxAccelerometer . cpp <nl> platform / android / jni / Java_org_cocos2dx_lib_Cocos2dxBitmap . cpp <nl> platform / android / jni / Java_org_cocos2dx_lib_Cocos2dxHelper . cpp <nl> mmm a / cocos / platform / android / Android . mk <nl> ppp b / cocos / platform / android / Android . mk <nl> CCGLViewImpl - android . cpp \ <nl> CCFileUtils - android . cpp \ <nl> javaactivity - android . cpp \ <nl> CCEnhanceAPI - android . cpp \ <nl> - jni / DPIJni . cpp \ <nl> - jni / IMEJni . cpp \ <nl> jni / Java_org_cocos2dx_lib_Cocos2dxAccelerometer . cpp \ <nl> jni / Java_org_cocos2dx_lib_Cocos2dxBitmap . cpp \ <nl> jni / Java_org_cocos2dx_lib_Cocos2dxHelper . cpp \ <nl> mmm a / cocos / platform / android / CCApplication - android . cpp <nl> ppp b / cocos / platform / android / CCApplication - android . cpp <nl> THE SOFTWARE . <nl> # include " platform / CCPlatformConfig . h " <nl> # if CC_TARGET_PLATFORM = = CC_PLATFORM_ANDROID <nl> <nl> - # include " jni / JniHelper . h " <nl> - # include " jni / Java_org_cocos2dx_lib_Cocos2dxHelper . h " <nl> + # include " platform / android / jni / JniHelper . h " <nl> # include " CCApplication . h " <nl> # include " base / CCDirector . h " <nl> # include < android / log . h > <nl> extern " C " size_t __ctype_get_mb_cur_max ( void ) { <nl> } <nl> # endif <nl> <nl> + static const std : : string helperClassName = " org / cocos2dx / lib / Cocos2dxHelper " ; <nl> + <nl> NS_CC_BEGIN <nl> <nl> / / sharedApplication pointer <nl> int Application : : run ( ) <nl> return - 1 ; <nl> } <nl> <nl> - void Application : : setAnimationInterval ( float interval ) <nl> - { <nl> - JniMethodInfo methodInfo ; <nl> - if ( ! JniHelper : : getStaticMethodInfo ( methodInfo , " org / cocos2dx / lib / Cocos2dxRenderer " , " setAnimationInterval " , <nl> - " ( F ) V " ) ) <nl> - { <nl> - CCLOG ( " % s % d : error to get methodInfo " , __FILE__ , __LINE__ ) ; <nl> - } <nl> - else <nl> - { <nl> - methodInfo . env - > CallStaticVoidMethod ( methodInfo . classID , methodInfo . methodID , interval ) ; <nl> - } <nl> + void Application : : setAnimationInterval ( float interval ) { <nl> + JniHelper : : callStaticVoidMethod ( " org / cocos2dx / lib / Cocos2dxRenderer " , " setAnimationInterval " , interval ) ; <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> Application * Application : : sharedApplication ( ) <nl> const char * Application : : getCurrentLanguageCode ( ) <nl> { <nl> static char code [ 3 ] = { 0 } ; <nl> - strncpy ( code , getCurrentLanguageJNI ( ) . c_str ( ) , 2 ) ; <nl> + std : : string language = JniHelper : : callStaticStringMethod ( helperClassName , " getCurrentLanguage " ) ; <nl> + strncpy ( code , language . c_str ( ) , 2 ) ; <nl> code [ 2 ] = ' \ 0 ' ; <nl> return code ; <nl> } <nl> <nl> LanguageType Application : : getCurrentLanguage ( ) <nl> { <nl> - std : : string languageName = getCurrentLanguageJNI ( ) ; <nl> + std : : string languageName = JniHelper : : callStaticStringMethod ( helperClassName , " getCurrentLanguage " ) ; <nl> const char * pLanguageName = languageName . c_str ( ) ; <nl> LanguageType ret = LanguageType : : ENGLISH ; <nl> <nl> Application : : Platform Application : : getTargetPlatform ( ) <nl> <nl> bool Application : : openURL ( const std : : string & url ) <nl> { <nl> - return openURLJNI ( url . c_str ( ) ) ; <nl> + return JniHelper : : callStaticBooleanMethod ( helperClassName , " openURL " , url ) ; <nl> } <nl> <nl> void Application : : applicationScreenSizeChanged ( int newWidth , int newHeight ) { <nl> mmm a / cocos / platform / android / CCCommon - android . cpp <nl> ppp b / cocos / platform / android / CCCommon - android . cpp <nl> THE SOFTWARE . <nl> # if CC_TARGET_PLATFORM = = CC_PLATFORM_ANDROID <nl> <nl> # include " platform / CCCommon . h " <nl> - # include " jni / Java_org_cocos2dx_lib_Cocos2dxHelper . h " <nl> + # include " platform / android / jni / JniHelper . h " <nl> # include < android / log . h > <nl> # include < stdio . h > <nl> # include < jni . h > <nl> NS_CC_BEGIN <nl> <nl> void MessageBox ( const char * pszMsg , const char * pszTitle ) <nl> { <nl> - showDialogJNI ( pszMsg , pszTitle ) ; <nl> + JniHelper : : callStaticVoidMethod ( " org / cocos2dx / lib / Cocos2dxHelper " , " showDialog " , pszMsg , pszTitle ) ; <nl> } <nl> <nl> void LuaLog ( const char * pszFormat ) <nl> mmm a / cocos / platform / android / CCDevice - android . cpp <nl> ppp b / cocos / platform / android / CCDevice - android . cpp <nl> THE SOFTWARE . <nl> # include < android / log . h > <nl> # include < jni . h > <nl> # include " base / ccTypes . h " <nl> - # include " jni / DPIJni . h " <nl> - # include " jni / Java_org_cocos2dx_lib_Cocos2dxHelper . h " <nl> - # include " jni / JniHelper . h " <nl> + # include " platform / android / jni / JniHelper . h " <nl> # include " platform / CCFileUtils . h " <nl> <nl> + static const std : : string helperClassName = " org / cocos2dx / lib / Cocos2dxHelper " ; <nl> + <nl> NS_CC_BEGIN <nl> <nl> int Device : : getDPI ( ) <nl> int Device : : getDPI ( ) <nl> static int dpi = - 1 ; <nl> if ( dpi = = - 1 ) <nl> { <nl> - dpi = ( int ) getDPIJNI ( ) ; <nl> + dpi = JniHelper : : callStaticIntMethod ( helperClassName , " getDPI " ) ; <nl> } <nl> return dpi ; <nl> } <nl> void Device : : setAccelerometerEnabled ( bool isEnabled ) <nl> { <nl> if ( isEnabled ) <nl> { <nl> - enableAccelerometerJni ( ) ; <nl> + JniHelper : : callStaticVoidMethod ( helperClassName , " enableAccelerometer " ) ; <nl> } <nl> else <nl> { <nl> - disableAccelerometerJni ( ) ; <nl> + JniHelper : : callStaticVoidMethod ( helperClassName , " disableAccelerometer " ) ; <nl> } <nl> } <nl> <nl> void Device : : setAccelerometerInterval ( float interval ) <nl> { <nl> - setAccelerometerIntervalJni ( interval ) ; <nl> + JniHelper : : callStaticVoidMethod ( helperClassName , " setAccelerometerInterval " , interval ) ; <nl> } <nl> <nl> class BitmapDC <nl> Data Device : : getTextureDataForText ( const char * text , const FontDefinition & text <nl> <nl> void Device : : setKeepScreenOn ( bool value ) <nl> { <nl> - setKeepScreenOnJni ( value ) ; <nl> + JniHelper : : callStaticVoidMethod ( helperClassName , " setKeepScreenOn " , value ) ; <nl> } <nl> <nl> void Device : : vibrate ( float duration ) <nl> { <nl> - vibrateJni ( duration ) ; <nl> + JniHelper : : callStaticVoidMethod ( helperClassName , " vibrate " , duration ) ; <nl> } <nl> <nl> NS_CC_END <nl> mmm a / cocos / platform / android / CCFileUtils - android . cpp <nl> ppp b / cocos / platform / android / CCFileUtils - android . cpp <nl> THE SOFTWARE . <nl> <nl> # include " CCFileUtils - android . h " <nl> # include " platform / CCCommon . h " <nl> - # include " jni / Java_org_cocos2dx_lib_Cocos2dxHelper . h " <nl> + # include " platform / android / jni / JniHelper . h " <nl> + # include " platform / android / jni / CocosPlayClient . h " <nl> # include " android / asset_manager . h " <nl> # include " android / asset_manager_jni . h " <nl> - # include " jni / CocosPlayClient . h " <nl> # include < stdlib . h > <nl> # include < sys / stat . h > <nl> <nl> string FileUtilsAndroid : : getWritablePath ( ) const <nl> / / Fix for Nexus 10 ( Android 4 . 2 multi - user environment ) <nl> / / the path is retrieved through Java Context . getCacheDir ( ) method <nl> string dir ( " " ) ; <nl> - string tmp = getFileDirectoryJNI ( ) ; <nl> + string tmp = JniHelper : : callStaticStringMethod ( " org / cocos2dx / lib / Cocos2dxHelper " , " getCocos2dxWritablePath " ) ; <nl> <nl> if ( tmp . length ( ) > 0 ) <nl> { <nl> mmm a / cocos / platform / android / CCGLViewImpl - android . cpp <nl> ppp b / cocos / platform / android / CCGLViewImpl - android . cpp <nl> THE SOFTWARE . <nl> # include " CCGLViewImpl - android . h " <nl> # include " base / CCDirector . h " <nl> # include " base / ccMacros . h " <nl> - # include " jni / IMEJni . h " <nl> # include " jni / JniHelper . h " <nl> - # include " jni / Java_org_cocos2dx_lib_Cocos2dxHelper . h " <nl> # include " CCGL . h " <nl> <nl> # include < stdlib . h > <nl> bool GLViewImpl : : isOpenGLReady ( ) <nl> <nl> void GLViewImpl : : end ( ) <nl> { <nl> - terminateProcessJNI ( ) ; <nl> + JniHelper : : callStaticVoidMethod ( " org / cocos2dx / lib / Cocos2dxHelper " , " terminateProcess " ) ; <nl> } <nl> <nl> void GLViewImpl : : swapBuffers ( ) <nl> void GLViewImpl : : swapBuffers ( ) <nl> <nl> void GLViewImpl : : setIMEKeyboardState ( bool bOpen ) <nl> { <nl> - setKeyboardStateJNI ( ( int ) bOpen ) ; <nl> + if ( bOpen ) { <nl> + JniHelper : : callStaticVoidMethod ( " org / cocos2dx / lib / Cocos2dxGLSurfaceView " , " openIMEKeyboard " ) ; <nl> + } else { <nl> + JniHelper : : callStaticVoidMethod ( " org / cocos2dx / lib / Cocos2dxGLSurfaceView " , " closeIMEKeyboard " ) ; <nl> + } <nl> } <nl> <nl> NS_CC_END <nl> deleted file mode 100644 <nl> index 74ed9f90f022 . . 000000000000 <nl> mmm a / cocos / platform / android / jni / DPIJni . cpp <nl> ppp / dev / null <nl> <nl> - # include " DPIJni . h " <nl> - # include " jni / JniHelper . h " <nl> - <nl> - USING_NS_CC ; <nl> - <nl> - extern " C " { <nl> - <nl> - int getDPIJNI ( ) <nl> - { <nl> - JniMethodInfo t ; <nl> - jint ret = - 1 ; <nl> - if ( JniHelper : : getStaticMethodInfo ( t , " org / cocos2dx / lib / Cocos2dxHelper " , " getDPI " , " ( ) I " ) ) { <nl> - ret = t . env - > CallStaticIntMethod ( t . classID , t . methodID ) ; <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - } <nl> - return ret ; <nl> - } <nl> - <nl> - } / / extern " C " <nl> deleted file mode 100644 <nl> index ee81606d6fa4 . . 000000000000 <nl> mmm a / cocos / platform / android / jni / DPIJni . h <nl> ppp / dev / null <nl> <nl> - # ifndef __DPIJNI_H__ <nl> - # define __DPIJNI_H__ <nl> - <nl> - extern " C " { <nl> - <nl> - int getDPIJNI ( ) ; <nl> - <nl> - } / / extern " C " <nl> - <nl> - # endif / * __DPIJNI_H__ * / <nl> deleted file mode 100644 <nl> index 8d9c00d5ee67 . . 000000000000 <nl> mmm a / cocos / platform / android / jni / IMEJni . cpp <nl> ppp / dev / null <nl> <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * <nl> - Copyright ( c ) 2011 - 2012 cocos2d - x . org <nl> - Copyright ( c ) 2013 - 2014 Chukong Technologies Inc . <nl> - <nl> - http : / / www . cocos2d - x . org <nl> - <nl> - Permission is hereby granted , free of charge , to any person obtaining a copy <nl> - of this software and associated documentation files ( the " Software " ) , to deal <nl> - in the Software without restriction , including without limitation the rights <nl> - to use , copy , modify , merge , publish , distribute , sublicense , and / or sell <nl> - copies of the Software , and to permit persons to whom the Software is <nl> - furnished to do so , subject to the following conditions : <nl> - <nl> - The above copyright notice and this permission notice shall be included in <nl> - all copies or substantial portions of the Software . <nl> - <nl> - THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR <nl> - IMPLIED , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY , <nl> - FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE <nl> - AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER <nl> - LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM , <nl> - OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN <nl> - THE SOFTWARE . <nl> - * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - # include " IMEJni . h " <nl> - # include " base / CCIMEDispatcher . h " <nl> - # include " JniHelper . h " <nl> - <nl> - # include < android / log . h > <nl> - # include < string . h > <nl> - # include < jni . h > <nl> - <nl> - using namespace cocos2d ; <nl> - <nl> - extern " C " { <nl> - void setKeyboardStateJNI ( int bOpen ) { <nl> - if ( bOpen ) { <nl> - openKeyboardJNI ( ) ; <nl> - } else { <nl> - closeKeyboardJNI ( ) ; <nl> - } <nl> - } <nl> - <nl> - void openKeyboardJNI ( ) { <nl> - JniMethodInfo t ; <nl> - <nl> - if ( JniHelper : : getStaticMethodInfo ( t , " org / cocos2dx / lib / Cocos2dxGLSurfaceView " , " openIMEKeyboard " , " ( ) V " ) ) { <nl> - t . env - > CallStaticVoidMethod ( t . classID , t . methodID ) ; <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - } <nl> - } <nl> - <nl> - void closeKeyboardJNI ( ) { <nl> - JniMethodInfo t ; <nl> - <nl> - if ( JniHelper : : getStaticMethodInfo ( t , " org / cocos2dx / lib / Cocos2dxGLSurfaceView " , " closeIMEKeyboard " , " ( ) V " ) ) { <nl> - t . env - > CallStaticVoidMethod ( t . classID , t . methodID ) ; <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - } <nl> - } <nl> - } <nl> deleted file mode 100644 <nl> index 4dfba6c74656 . . 000000000000 <nl> mmm a / cocos / platform / android / jni / IMEJni . h <nl> ppp / dev / null <nl> <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * <nl> - Copyright ( c ) 2011 - 2012 cocos2d - x . org <nl> - Copyright ( c ) 2013 - 2014 Chukong Technologies Inc . <nl> - <nl> - http : / / www . cocos2d - x . org <nl> - <nl> - Permission is hereby granted , free of charge , to any person obtaining a copy <nl> - of this software and associated documentation files ( the " Software " ) , to deal <nl> - in the Software without restriction , including without limitation the rights <nl> - to use , copy , modify , merge , publish , distribute , sublicense , and / or sell <nl> - copies of the Software , and to permit persons to whom the Software is <nl> - furnished to do so , subject to the following conditions : <nl> - <nl> - The above copyright notice and this permission notice shall be included in <nl> - all copies or substantial portions of the Software . <nl> - <nl> - THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR <nl> - IMPLIED , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY , <nl> - FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE <nl> - AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER <nl> - LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM , <nl> - OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN <nl> - THE SOFTWARE . <nl> - * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - # ifndef __ANDROID_IME_JNI_H__ <nl> - # define __ANDROID_IME_JNI_H__ <nl> - <nl> - extern " C " { <nl> - extern void setKeyboardStateJNI ( int open ) ; <nl> - extern void openKeyboardJNI ( ) ; <nl> - extern void closeKeyboardJNI ( ) ; <nl> - } <nl> - <nl> - # endif / / __ANDROID_IME_JNI_H__ <nl> mmm a / cocos / platform / android / jni / Java_org_cocos2dx_lib_Cocos2dxBitmap . cpp <nl> ppp b / cocos / platform / android / jni / Java_org_cocos2dx_lib_Cocos2dxBitmap . cpp <nl> OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN <nl> THE SOFTWARE . <nl> * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> <nl> - # include " JniHelper . h " <nl> + # include " platform / android / jni / JniHelper . h " <nl> # include < string . h > <nl> # include " base / CCDirector . h " <nl> # include " . . / CCApplication . h " <nl> # include " platform / CCFileUtils . h " <nl> - # include < jni . h > <nl> # include " base / ccUTF8 . h " <nl> <nl> - using namespace cocos2d ; <nl> + static const std : : string className = " org / cocos2dx / lib / Cocos2dxBitmap " ; <nl> <nl> + using namespace cocos2d ; <nl> <nl> int getFontSizeAccordingHeightJni ( int height ) { <nl> - int ret = 0 ; <nl> - <nl> - JniMethodInfo t ; <nl> - if ( JniHelper : : getStaticMethodInfo ( t , " org / cocos2dx / lib / Cocos2dxBitmap " , " getFontSizeAccordingHeight " , " ( I ) I " ) ) { <nl> - ret = t . env - > CallStaticIntMethod ( t . classID , t . methodID , height ) ; <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - } <nl> - <nl> - return ret ; <nl> + return JniHelper : : callStaticIntMethod ( className , " getFontSizeAccordingHeight " , height ) ; <nl> } <nl> <nl> std : : string getStringWithEllipsisJni ( const char * text , float width , float fontSize ) { <nl> - std : : string ret ; <nl> - JniMethodInfo t ; <nl> - <nl> - if ( JniHelper : : getStaticMethodInfo ( t , " org / cocos2dx / lib / Cocos2dxBitmap " , " getStringWithEllipsis " , " ( Ljava / lang / String ; FF ) Ljava / lang / String ; " ) ) { <nl> - jstring stringArg1 ; <nl> - <nl> - if ( ! text ) { <nl> - stringArg1 = t . env - > NewStringUTF ( " " ) ; <nl> - } else { <nl> - stringArg1 = t . env - > NewStringUTF ( text ) ; <nl> - } <nl> - <nl> - jstring retFromJava = ( jstring ) t . env - > CallStaticObjectMethod ( t . classID , t . methodID , stringArg1 , width , fontSize ) ; <nl> - ret = cocos2d : : StringUtils : : getStringUTFCharsJNI ( t . env , retFromJava ) ; <nl> - <nl> - t . env - > DeleteLocalRef ( stringArg1 ) ; <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - } <nl> - return ret ; <nl> + return JniHelper : : callStaticStringMethod ( className , " getStringWithEllipsis " , text , width , fontSize ) ; <nl> } <nl> <nl> mmm a / cocos / platform / android / jni / Java_org_cocos2dx_lib_Cocos2dxHelper . cpp <nl> ppp b / cocos / platform / android / jni / Java_org_cocos2dx_lib_Cocos2dxHelper . cpp <nl> THE SOFTWARE . <nl> # define LOG_TAG " Java_org_cocos2dx_lib_Cocos2dxHelper . cpp " <nl> # define LOGD ( . . . ) __android_log_print ( ANDROID_LOG_DEBUG , LOG_TAG , __VA_ARGS__ ) <nl> <nl> - # define CLASS_NAME " org / cocos2dx / lib / Cocos2dxHelper " <nl> - # define EDITBOX_CLASS_NAME " org / cocos2dx / lib / Cocos2dxEditBoxHelper " <nl> + static const std : : string className = " org / cocos2dx / lib / Cocos2dxHelper " ; <nl> <nl> static EditTextCallback s_editTextCallback = nullptr ; <nl> static void * s_ctx = nullptr ; <nl> const char * getApkPath ( ) { <nl> return g_apkPath . c_str ( ) ; <nl> } <nl> <nl> - void showDialogJNI ( const char * message , const char * title ) { <nl> - if ( ! message ) { <nl> - return ; <nl> - } <nl> - <nl> - JniMethodInfo t ; <nl> - if ( JniHelper : : getStaticMethodInfo ( t , CLASS_NAME , " showDialog " , " ( Ljava / lang / String ; Ljava / lang / String ; ) V " ) ) { <nl> - jstring stringArg1 ; <nl> - <nl> - if ( ! title ) { <nl> - stringArg1 = t . env - > NewStringUTF ( " " ) ; <nl> - } else { <nl> - stringArg1 = t . env - > NewStringUTF ( title ) ; <nl> - } <nl> - <nl> - jstring stringArg2 = t . env - > NewStringUTF ( message ) ; <nl> - t . env - > CallStaticVoidMethod ( t . classID , t . methodID , stringArg1 , stringArg2 ) ; <nl> - <nl> - t . env - > DeleteLocalRef ( stringArg1 ) ; <nl> - t . env - > DeleteLocalRef ( stringArg2 ) ; <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - } <nl> - } <nl> - <nl> - void terminateProcessJNI ( ) { <nl> - JniMethodInfo t ; <nl> - <nl> - if ( JniHelper : : getStaticMethodInfo ( t , CLASS_NAME , " terminateProcess " , " ( ) V " ) ) { <nl> - t . env - > CallStaticVoidMethod ( t . classID , t . methodID ) ; <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - } <nl> - } <nl> - <nl> - std : : string getPackageNameJNI ( ) { <nl> - JniMethodInfo t ; <nl> - std : : string ret ( " " ) ; <nl> - <nl> - if ( JniHelper : : getStaticMethodInfo ( t , CLASS_NAME , " getCocos2dxPackageName " , " ( ) Ljava / lang / String ; " ) ) { <nl> - jstring str = ( jstring ) t . env - > CallStaticObjectMethod ( t . classID , t . methodID ) ; <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - ret = JniHelper : : jstring2string ( str ) ; <nl> - t . env - > DeleteLocalRef ( str ) ; <nl> - } <nl> - return ret ; <nl> - } <nl> - <nl> - std : : string getFileDirectoryJNI ( ) { <nl> - JniMethodInfo t ; <nl> - std : : string ret ( " " ) ; <nl> - <nl> - if ( JniHelper : : getStaticMethodInfo ( t , CLASS_NAME , " getCocos2dxWritablePath " , " ( ) Ljava / lang / String ; " ) ) { <nl> - jstring str = ( jstring ) t . env - > CallStaticObjectMethod ( t . classID , t . methodID ) ; <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - ret = JniHelper : : jstring2string ( str ) ; <nl> - t . env - > DeleteLocalRef ( str ) ; <nl> - } <nl> - <nl> - return ret ; <nl> - } <nl> - <nl> - std : : string getCurrentLanguageJNI ( ) { <nl> - JniMethodInfo t ; <nl> - std : : string ret ( " " ) ; <nl> - <nl> - if ( JniHelper : : getStaticMethodInfo ( t , CLASS_NAME , " getCurrentLanguage " , " ( ) Ljava / lang / String ; " ) ) { <nl> - jstring str = ( jstring ) t . env - > CallStaticObjectMethod ( t . classID , t . methodID ) ; <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - ret = JniHelper : : jstring2string ( str ) ; <nl> - t . env - > DeleteLocalRef ( str ) ; <nl> - } <nl> - <nl> - return ret ; <nl> - } <nl> - <nl> - void enableAccelerometerJni ( ) { <nl> - JniMethodInfo t ; <nl> - <nl> - if ( JniHelper : : getStaticMethodInfo ( t , CLASS_NAME , " enableAccelerometer " , " ( ) V " ) ) { <nl> - t . env - > CallStaticVoidMethod ( t . classID , t . methodID ) ; <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - } <nl> - } <nl> - <nl> - void setAccelerometerIntervalJni ( float interval ) { <nl> - JniMethodInfo t ; <nl> - <nl> - if ( JniHelper : : getStaticMethodInfo ( t , CLASS_NAME , " setAccelerometerInterval " , " ( F ) V " ) ) { <nl> - t . env - > CallStaticVoidMethod ( t . classID , t . methodID , interval ) ; <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - } <nl> - } <nl> - <nl> - void disableAccelerometerJni ( ) { <nl> - JniMethodInfo t ; <nl> - <nl> - if ( JniHelper : : getStaticMethodInfo ( t , CLASS_NAME , " disableAccelerometer " , " ( ) V " ) ) { <nl> - t . env - > CallStaticVoidMethod ( t . classID , t . methodID ) ; <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - } <nl> - } <nl> - <nl> - void setKeepScreenOnJni ( bool value ) { <nl> - JniMethodInfo t ; <nl> - <nl> - if ( JniHelper : : getStaticMethodInfo ( t , CLASS_NAME , " setKeepScreenOn " , " ( Z ) V " ) ) { <nl> - t . env - > CallStaticVoidMethod ( t . classID , t . methodID , value ) ; <nl> - <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - } <nl> - } <nl> - <nl> - void vibrateJni ( float duration ) { <nl> - JniMethodInfo t ; <nl> - <nl> - if ( JniHelper : : getStaticMethodInfo ( t , CLASS_NAME , " vibrate " , " ( F ) V " ) ) { <nl> - t . env - > CallStaticVoidMethod ( t . classID , t . methodID , duration ) ; <nl> - <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - } <nl> - } <nl> - <nl> - extern bool openURLJNI ( const char * url ) { <nl> - JniMethodInfo t ; <nl> - <nl> - bool ret = false ; <nl> - if ( JniHelper : : getStaticMethodInfo ( t , CLASS_NAME , " openURL " , " ( Ljava / lang / String ; ) Z " ) ) { <nl> - jstring stringArg = t . env - > NewStringUTF ( url ) ; <nl> - ret = t . env - > CallStaticBooleanMethod ( t . classID , t . methodID , stringArg ) ; <nl> - <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - t . env - > DeleteLocalRef ( stringArg ) ; <nl> - } <nl> - return ret ; <nl> - } <nl> - <nl> - / / functions for UserDefault <nl> - bool getBoolForKeyJNI ( const char * key , bool defaultValue ) <nl> - { <nl> - JniMethodInfo t ; <nl> - <nl> - if ( JniHelper : : getStaticMethodInfo ( t , CLASS_NAME , " getBoolForKey " , " ( Ljava / lang / String ; Z ) Z " ) ) { <nl> - jstring stringArg = t . env - > NewStringUTF ( key ) ; <nl> - jboolean ret = t . env - > CallStaticBooleanMethod ( t . classID , t . methodID , stringArg , defaultValue ) ; <nl> - <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - t . env - > DeleteLocalRef ( stringArg ) ; <nl> - <nl> - return ret ; <nl> - } <nl> - <nl> - return defaultValue ; <nl> - } <nl> - <nl> - int getIntegerForKeyJNI ( const char * key , int defaultValue ) <nl> - { <nl> - JniMethodInfo t ; <nl> - <nl> - if ( JniHelper : : getStaticMethodInfo ( t , CLASS_NAME , " getIntegerForKey " , " ( Ljava / lang / String ; I ) I " ) ) { <nl> - jstring stringArg = t . env - > NewStringUTF ( key ) ; <nl> - jint ret = t . env - > CallStaticIntMethod ( t . classID , t . methodID , stringArg , defaultValue ) ; <nl> - <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - t . env - > DeleteLocalRef ( stringArg ) ; <nl> - <nl> - return ret ; <nl> - } <nl> - <nl> - return defaultValue ; <nl> - } <nl> - <nl> - float getFloatForKeyJNI ( const char * key , float defaultValue ) <nl> - { <nl> - JniMethodInfo t ; <nl> - <nl> - if ( JniHelper : : getStaticMethodInfo ( t , CLASS_NAME , " getFloatForKey " , " ( Ljava / lang / String ; F ) F " ) ) { <nl> - jstring stringArg = t . env - > NewStringUTF ( key ) ; <nl> - jfloat ret = t . env - > CallStaticFloatMethod ( t . classID , t . methodID , stringArg , defaultValue ) ; <nl> - <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - t . env - > DeleteLocalRef ( stringArg ) ; <nl> - <nl> - return ret ; <nl> - } <nl> - <nl> - return defaultValue ; <nl> - } <nl> - <nl> - double getDoubleForKeyJNI ( const char * key , double defaultValue ) <nl> - { <nl> - JniMethodInfo t ; <nl> - <nl> - if ( JniHelper : : getStaticMethodInfo ( t , CLASS_NAME , " getDoubleForKey " , " ( Ljava / lang / String ; D ) D " ) ) { <nl> - jstring stringArg = t . env - > NewStringUTF ( key ) ; <nl> - jdouble ret = t . env - > CallStaticDoubleMethod ( t . classID , t . methodID , stringArg , defaultValue ) ; <nl> - <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - t . env - > DeleteLocalRef ( stringArg ) ; <nl> - <nl> - return ret ; <nl> - } <nl> - <nl> - return defaultValue ; <nl> - } <nl> - <nl> - std : : string getStringForKeyJNI ( const char * key , const char * defaultValue ) <nl> - { <nl> - JniMethodInfo t ; <nl> - std : : string ret ( " " ) ; <nl> - <nl> - if ( JniHelper : : getStaticMethodInfo ( t , CLASS_NAME , " getStringForKey " , " ( Ljava / lang / String ; Ljava / lang / String ; ) Ljava / lang / String ; " ) ) { <nl> - jstring stringArg1 = t . env - > NewStringUTF ( key ) ; <nl> - jstring stringArg2 = t . env - > NewStringUTF ( defaultValue ) ; <nl> - jstring str = ( jstring ) t . env - > CallStaticObjectMethod ( t . classID , t . methodID , stringArg1 , stringArg2 ) ; <nl> - ret = JniHelper : : jstring2string ( str ) ; <nl> - <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - t . env - > DeleteLocalRef ( stringArg1 ) ; <nl> - t . env - > DeleteLocalRef ( stringArg2 ) ; <nl> - t . env - > DeleteLocalRef ( str ) ; <nl> - <nl> - return ret ; <nl> - } <nl> - <nl> - return defaultValue ; <nl> - } <nl> - <nl> - void setBoolForKeyJNI ( const char * key , bool value ) <nl> - { <nl> - JniMethodInfo t ; <nl> - <nl> - if ( JniHelper : : getStaticMethodInfo ( t , CLASS_NAME , " setBoolForKey " , " ( Ljava / lang / String ; Z ) V " ) ) { <nl> - jstring stringArg = t . env - > NewStringUTF ( key ) ; <nl> - t . env - > CallStaticVoidMethod ( t . classID , t . methodID , stringArg , value ) ; <nl> - <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - t . env - > DeleteLocalRef ( stringArg ) ; <nl> - } <nl> - } <nl> - <nl> - void setIntegerForKeyJNI ( const char * key , int value ) <nl> - { <nl> - JniMethodInfo t ; <nl> - <nl> - if ( JniHelper : : getStaticMethodInfo ( t , CLASS_NAME , " setIntegerForKey " , " ( Ljava / lang / String ; I ) V " ) ) { <nl> - jstring stringArg = t . env - > NewStringUTF ( key ) ; <nl> - t . env - > CallStaticVoidMethod ( t . classID , t . methodID , stringArg , value ) ; <nl> - <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - t . env - > DeleteLocalRef ( stringArg ) ; <nl> - } <nl> - } <nl> - <nl> - void setFloatForKeyJNI ( const char * key , float value ) <nl> - { <nl> - JniMethodInfo t ; <nl> - <nl> - if ( JniHelper : : getStaticMethodInfo ( t , CLASS_NAME , " setFloatForKey " , " ( Ljava / lang / String ; F ) V " ) ) { <nl> - jstring stringArg = t . env - > NewStringUTF ( key ) ; <nl> - t . env - > CallStaticVoidMethod ( t . classID , t . methodID , stringArg , value ) ; <nl> - <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - t . env - > DeleteLocalRef ( stringArg ) ; <nl> - } <nl> - } <nl> - <nl> - void setDoubleForKeyJNI ( const char * key , double value ) <nl> - { <nl> - JniMethodInfo t ; <nl> - <nl> - if ( JniHelper : : getStaticMethodInfo ( t , CLASS_NAME , " setDoubleForKey " , " ( Ljava / lang / String ; D ) V " ) ) { <nl> - jstring stringArg = t . env - > NewStringUTF ( key ) ; <nl> - t . env - > CallStaticVoidMethod ( t . classID , t . methodID , stringArg , value ) ; <nl> - <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - t . env - > DeleteLocalRef ( stringArg ) ; <nl> - } <nl> - } <nl> - <nl> - void setStringForKeyJNI ( const char * key , const char * value ) <nl> - { <nl> - JniMethodInfo t ; <nl> - <nl> - if ( JniHelper : : getStaticMethodInfo ( t , CLASS_NAME , " setStringForKey " , " ( Ljava / lang / String ; Ljava / lang / String ; ) V " ) ) { <nl> - jstring stringArg1 = t . env - > NewStringUTF ( key ) ; <nl> - jstring stringArg2 = t . env - > NewStringUTF ( value ) ; <nl> - t . env - > CallStaticVoidMethod ( t . classID , t . methodID , stringArg1 , stringArg2 ) ; <nl> - <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - t . env - > DeleteLocalRef ( stringArg1 ) ; <nl> - t . env - > DeleteLocalRef ( stringArg2 ) ; <nl> - } <nl> - } <nl> - <nl> - void deleteValueForKeyJNI ( const char * key ) <nl> - { <nl> - JniMethodInfo t ; <nl> - <nl> - if ( JniHelper : : getStaticMethodInfo ( t , CLASS_NAME , " deleteValueForKey " , " ( Ljava / lang / String ; ) V " ) ) { <nl> - jstring stringArg1 = t . env - > NewStringUTF ( key ) ; <nl> - t . env - > CallStaticVoidMethod ( t . classID , t . methodID , stringArg1 ) ; <nl> - <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - t . env - > DeleteLocalRef ( stringArg1 ) ; <nl> - } <nl> - } <nl> - <nl> - int addEditBoxJNI ( int left , int top , int width , int height , float scaleX ) { <nl> - JniMethodInfo t ; <nl> - <nl> - int ret = - 1 ; <nl> - if ( JniHelper : : getStaticMethodInfo ( t , EDITBOX_CLASS_NAME , " createEditBox " , " ( IIIIF ) I " ) ) { <nl> - ret = t . env - > CallStaticIntMethod ( t . classID , t . methodID , left , top , width , height , scaleX ) ; <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - } <nl> - return ret ; <nl> - } <nl> - <nl> - void removeEditBoxJNI ( int index ) <nl> - { <nl> - JniMethodInfo t ; <nl> - <nl> - if ( JniHelper : : getStaticMethodInfo ( t , EDITBOX_CLASS_NAME , " removeEditBox " , " ( I ) V " ) ) { <nl> - t . env - > CallStaticVoidMethod ( t . classID , t . methodID , index ) ; <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - } <nl> - } <nl> - <nl> - void setEditBoxViewRectJNI ( int index , int left , int top , int width , int height ) <nl> - { <nl> - JniMethodInfo t ; <nl> - <nl> - if ( JniHelper : : getStaticMethodInfo ( t , EDITBOX_CLASS_NAME , " setEditBoxViewRect " , " ( IIIII ) V " ) ) { <nl> - t . env - > CallStaticVoidMethod ( t . classID , t . methodID , index , left , top , width , height ) ; <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - } <nl> - } <nl> - <nl> - void setMaxLengthJNI ( int index , int maxLength ) <nl> - { <nl> - JniMethodInfo t ; <nl> - <nl> - if ( JniHelper : : getStaticMethodInfo ( t , EDITBOX_CLASS_NAME , " setMaxLength " , " ( II ) V " ) ) { <nl> - t . env - > CallStaticVoidMethod ( t . classID , t . methodID , index , maxLength ) ; <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - } <nl> - } <nl> - <nl> - void openEditBoxKeyboardJNI ( int index ) <nl> - { <nl> - JniMethodInfo t ; <nl> - <nl> - if ( JniHelper : : getStaticMethodInfo ( t , EDITBOX_CLASS_NAME , " openKeyboard " , " ( I ) V " ) ) { <nl> - t . env - > CallStaticVoidMethod ( t . classID , t . methodID , index ) ; <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - } <nl> - } <nl> - <nl> - void closeEditBoxKeyboardJNI ( int index ) <nl> - { <nl> - JniMethodInfo t ; <nl> - <nl> - if ( JniHelper : : getStaticMethodInfo ( t , EDITBOX_CLASS_NAME , " closeKeyboard " , " ( I ) V " ) ) { <nl> - t . env - > CallStaticVoidMethod ( t . classID , t . methodID , index ) ; <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - } <nl> - } <nl> - <nl> - void setVisibleEditBoxJNI ( int index , bool visibility ) <nl> - { <nl> - JniMethodInfo t ; <nl> - <nl> - if ( JniHelper : : getStaticMethodInfo ( t , EDITBOX_CLASS_NAME , " setVisible " , " ( IZ ) V " ) ) { <nl> - t . env - > CallStaticVoidMethod ( t . classID , t . methodID , index , visibility ) ; <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - } <nl> - } <nl> - <nl> - void setReturnTypeEditBoxJNI ( int index , int returnType ) <nl> - { <nl> - JniMethodInfo t ; <nl> - <nl> - if ( JniHelper : : getStaticMethodInfo ( t , EDITBOX_CLASS_NAME , " setReturnType " , " ( II ) V " ) ) { <nl> - t . env - > CallStaticVoidMethod ( t . classID , t . methodID , index , returnType ) ; <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - } <nl> - } <nl> - <nl> - void setInputFlagEditBoxJNI ( int index , int returnType ) <nl> - { <nl> - JniMethodInfo t ; <nl> - <nl> - if ( JniHelper : : getStaticMethodInfo ( t , EDITBOX_CLASS_NAME , " setInputFlag " , " ( II ) V " ) ) { <nl> - t . env - > CallStaticVoidMethod ( t . classID , t . methodID , index , returnType ) ; <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - } <nl> - } <nl> - <nl> - void setInputModeEditBoxJNI ( int index , int inputMode ) <nl> - { <nl> - JniMethodInfo t ; <nl> - <nl> - if ( JniHelper : : getStaticMethodInfo ( t , EDITBOX_CLASS_NAME , " setInputMode " , " ( II ) V " ) ) { <nl> - t . env - > CallStaticVoidMethod ( t . classID , t . methodID , index , inputMode ) ; <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - } <nl> - } <nl> - <nl> - void setTextEditBoxJNI ( int index , const char * text ) <nl> - { <nl> - JniMethodInfo t ; <nl> - <nl> - if ( JniHelper : : getStaticMethodInfo ( t , EDITBOX_CLASS_NAME , " setText " , " ( ILjava / lang / String ; ) V " ) ) { <nl> - jstring stringText = StringUtils : : newStringUTFJNI ( t . env , text ) ; <nl> - t . env - > CallStaticVoidMethod ( t . classID , t . methodID , index , stringText ) ; <nl> - t . env - > DeleteLocalRef ( stringText ) ; <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - } <nl> - } <nl> - <nl> - void setFontEditBoxJNI ( int index , const char * fontName , float fontSize ) <nl> - { <nl> - JniMethodInfo t ; <nl> - <nl> - if ( JniHelper : : getStaticMethodInfo ( t , EDITBOX_CLASS_NAME , " setFont " , " ( ILjava / lang / String ; F ) V " ) ) { <nl> - jstring stringText = StringUtils : : newStringUTFJNI ( t . env , fontName ) ; <nl> - t . env - > CallStaticVoidMethod ( t . classID , t . methodID , index , stringText , fontSize ) ; <nl> - <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - t . env - > DeleteLocalRef ( stringText ) ; <nl> - } <nl> - } <nl> - <nl> - void setFontColorEditBoxJNI ( int index , int red , int green , int blue , int alpha ) <nl> - { <nl> - JniMethodInfo t ; <nl> - <nl> - if ( JniHelper : : getStaticMethodInfo ( t , EDITBOX_CLASS_NAME , " setFontColor " , " ( IIIII ) V " ) ) { <nl> - t . env - > CallStaticVoidMethod ( t . classID , t . methodID , index , red , green , blue , alpha ) ; <nl> - <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - } <nl> - } <nl> - <nl> - void setPlaceHolderTextEditBoxJNI ( int index , const char * text ) <nl> - { <nl> - JniMethodInfo t ; <nl> - <nl> - if ( JniHelper : : getStaticMethodInfo ( t , EDITBOX_CLASS_NAME , " setPlaceHolderText " , " ( ILjava / lang / String ; ) V " ) ) { <nl> - jstring stringText = StringUtils : : newStringUTFJNI ( t . env , text ) ; <nl> - t . env - > CallStaticVoidMethod ( t . classID , t . methodID , index , stringText ) ; <nl> - <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - t . env - > DeleteLocalRef ( stringText ) ; <nl> - } <nl> - <nl> - } <nl> - <nl> - void setPlaceHolderTextColorEditBoxJNI ( int index , int red , int green , int blue , int alpha ) <nl> - { <nl> - JniMethodInfo t ; <nl> - <nl> - if ( JniHelper : : getStaticMethodInfo ( t , EDITBOX_CLASS_NAME , " setPlaceHolderTextColor " , " ( IIIII ) V " ) ) { <nl> - t . env - > CallStaticVoidMethod ( t . classID , t . methodID , index , red , green , blue , alpha ) ; <nl> - <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - } <nl> - } <nl> - <nl> void conversionEncodingJNI ( const char * src , int byteSize , const char * fromCharset , char * dst , const char * newCharset ) <nl> { <nl> JniMethodInfo methodInfo ; <nl> <nl> - if ( JniHelper : : getStaticMethodInfo ( methodInfo , CLASS_NAME , " conversionEncoding " , " ( [ BLjava / lang / String ; Ljava / lang / String ; ) [ B " ) ) { <nl> + if ( JniHelper : : getStaticMethodInfo ( methodInfo , className . c_str ( ) , " conversionEncoding " , " ( [ BLjava / lang / String ; Ljava / lang / String ; ) [ B " ) ) { <nl> jbyteArray strArray = methodInfo . env - > NewByteArray ( byteSize ) ; <nl> methodInfo . env - > SetByteArrayRegion ( strArray , 0 , byteSize , reinterpret_cast < const jbyte * > ( src ) ) ; <nl> <nl> mmm a / cocos / platform / android / jni / Java_org_cocos2dx_lib_Cocos2dxHelper . h <nl> ppp b / cocos / platform / android / jni / Java_org_cocos2dx_lib_Cocos2dxHelper . h <nl> typedef void ( * EditTextCallback ) ( const char * text , void * ctx ) ; <nl> <nl> extern const char * getApkPath ( ) ; <nl> extern void showDialogJNI ( const char * message , const char * title ) ; <nl> - extern void terminateProcessJNI ( ) ; <nl> - extern std : : string getCurrentLanguageJNI ( ) ; <nl> - extern std : : string getPackageNameJNI ( ) ; <nl> + / / extern void terminateProcessJNI ( ) ; <nl> + / / extern std : : string getCurrentLanguageJNI ( ) ; <nl> + / / extern std : : string getPackageNameJNI ( ) ; <nl> extern std : : string getFileDirectoryJNI ( ) ; <nl> - extern void enableAccelerometerJni ( ) ; <nl> - extern void disableAccelerometerJni ( ) ; <nl> - extern void setAccelerometerIntervalJni ( float interval ) ; <nl> - extern void setKeepScreenOnJni ( bool value ) ; <nl> - extern void vibrateJni ( float duration ) ; <nl> - extern bool openURLJNI ( const char * url ) ; <nl> / / functions for UserDefault <nl> - extern bool getBoolForKeyJNI ( const char * key , bool defaultValue ) ; <nl> - extern int getIntegerForKeyJNI ( const char * key , int defaultValue ) ; <nl> - extern float getFloatForKeyJNI ( const char * key , float defaultValue ) ; <nl> - extern double getDoubleForKeyJNI ( const char * key , double defaultValue ) ; <nl> - extern std : : string getStringForKeyJNI ( const char * key , const char * defaultValue ) ; <nl> - extern void setBoolForKeyJNI ( const char * key , bool value ) ; <nl> - extern void setIntegerForKeyJNI ( const char * key , int value ) ; <nl> - extern void setFloatForKeyJNI ( const char * key , float value ) ; <nl> - extern void setDoubleForKeyJNI ( const char * key , double value ) ; <nl> - extern void setStringForKeyJNI ( const char * key , const char * value ) ; <nl> - extern void deleteValueForKeyJNI ( const char * key ) ; <nl> + / / extern bool getBoolForKeyJNI ( const char * key , bool defaultValue ) ; <nl> + / / extern int getIntegerForKeyJNI ( const char * key , int defaultValue ) ; <nl> + / / extern float getFloatForKeyJNI ( const char * key , float defaultValue ) ; <nl> + / / extern double getDoubleForKeyJNI ( const char * key , double defaultValue ) ; <nl> + / / extern std : : string getStringForKeyJNI ( const char * key , const char * defaultValue ) ; <nl> + / / extern void setBoolForKeyJNI ( const char * key , bool value ) ; <nl> + / / extern void setIntegerForKeyJNI ( const char * key , int value ) ; <nl> + / / extern void setFloatForKeyJNI ( const char * key , float value ) ; <nl> + / / extern void setDoubleForKeyJNI ( const char * key , double value ) ; <nl> + / / extern void setStringForKeyJNI ( const char * key , const char * value ) ; <nl> + / / extern void deleteValueForKeyJNI ( const char * key ) ; <nl> extern void conversionEncodingJNI ( const char * src , int byteSize , const char * fromCharset , char * dst , const char * newCharset ) ; <nl> / / Added for new Android EditBox <nl> extern int addEditBoxJNI ( int left , int top , int width , int height , float scaleX ) ; <nl> mmm a / cocos / platform / android / jni / JniHelper . cpp <nl> ppp b / cocos / platform / android / jni / JniHelper . cpp <nl> namespace cocos2d { <nl> } <nl> <nl> jstring JniHelper : : convert ( cocos2d : : JniMethodInfo & t , const char * x ) { <nl> - jstring ret = t . env - > NewStringUTF ( x ? x : " " ) ; <nl> + jstring ret = cocos2d : : StringUtils : : newStringUTFJNI ( t . env , x ? x : " " ) ; <nl> localRefs [ t . env ] . push_back ( ret ) ; <nl> return ret ; <nl> } <nl> namespace cocos2d { <nl> localRefs [ env ] . clear ( ) ; <nl> } <nl> <nl> + void JniHelper : : reportError ( const std : : string & className , const std : : string & methodName , const std : : string & signature ) { <nl> + LOGE ( " Failed to find static java method . Class name : % s , method name : % s , signature : % s " , className . c_str ( ) , methodName . c_str ( ) , signature . c_str ( ) ) ; <nl> + } <nl> <nl> } / / namespace cocos2d <nl> mmm a / cocos / platform / android / jni / JniHelper . h <nl> ppp b / cocos / platform / android / jni / JniHelper . h <nl> class CC_DLL JniHelper <nl> t . env - > CallStaticVoidMethod ( t . classID , t . methodID , convert ( t , xs ) . . . ) ; <nl> t . env - > DeleteLocalRef ( t . classID ) ; <nl> deleteLocalRefs ( t . env ) ; <nl> + } else { <nl> + reportError ( className , methodName , signature ) ; <nl> } <nl> } <nl> <nl> class CC_DLL JniHelper <nl> jret = t . env - > CallStaticBooleanMethod ( t . classID , t . methodID , convert ( t , xs ) . . . ) ; <nl> t . env - > DeleteLocalRef ( t . classID ) ; <nl> deleteLocalRefs ( t . env ) ; <nl> + } else { <nl> + reportError ( className , methodName , signature ) ; <nl> } <nl> return ( jret = = JNI_TRUE ) ; <nl> } <nl> <nl> template < typename . . . Ts > <nl> static int callStaticIntMethod ( const std : : string & className , <nl> - const std : : string & methodName , <nl> - Ts . . . xs ) { <nl> + const std : : string & methodName , <nl> + Ts . . . xs ) { <nl> jint ret = 0 ; <nl> cocos2d : : JniMethodInfo t ; <nl> std : : string signature = " ( " + std : : string ( getJNISignature ( xs . . . ) ) + " ) I " ; <nl> class CC_DLL JniHelper <nl> ret = t . env - > CallStaticIntMethod ( t . classID , t . methodID , convert ( t , xs ) . . . ) ; <nl> t . env - > DeleteLocalRef ( t . classID ) ; <nl> deleteLocalRefs ( t . env ) ; <nl> + } else { <nl> + reportError ( className , methodName , signature ) ; <nl> } <nl> return ret ; <nl> } <nl> <nl> template < typename . . . Ts > <nl> static float callStaticFloatMethod ( const std : : string & className , <nl> - const std : : string & methodName , <nl> - Ts . . . xs ) { <nl> + const std : : string & methodName , <nl> + Ts . . . xs ) { <nl> jfloat ret = 0 . 0 ; <nl> cocos2d : : JniMethodInfo t ; <nl> std : : string signature = " ( " + std : : string ( getJNISignature ( xs . . . ) ) + " ) F " ; <nl> class CC_DLL JniHelper <nl> ret = t . env - > CallStaticFloatMethod ( t . classID , t . methodID , convert ( t , xs ) . . . ) ; <nl> t . env - > DeleteLocalRef ( t . classID ) ; <nl> deleteLocalRefs ( t . env ) ; <nl> + } else { <nl> + reportError ( className , methodName , signature ) ; <nl> } <nl> return ret ; <nl> } <nl> <nl> template < typename . . . Ts > <nl> static double callStaticDoubleMethod ( const std : : string & className , <nl> - const std : : string & methodName , <nl> - Ts . . . xs ) { <nl> + const std : : string & methodName , <nl> + Ts . . . xs ) { <nl> jdouble ret = 0 . 0 ; <nl> cocos2d : : JniMethodInfo t ; <nl> std : : string signature = " ( " + std : : string ( getJNISignature ( xs . . . ) ) + " ) D " ; <nl> class CC_DLL JniHelper <nl> ret = t . env - > CallStaticDoubleMethod ( t . classID , t . methodID , convert ( t , xs ) . . . ) ; <nl> t . env - > DeleteLocalRef ( t . classID ) ; <nl> deleteLocalRefs ( t . env ) ; <nl> + } else { <nl> + reportError ( className , methodName , signature ) ; <nl> } <nl> return ret ; <nl> } <nl> class CC_DLL JniHelper <nl> t . env - > DeleteLocalRef ( t . classID ) ; <nl> t . env - > DeleteLocalRef ( jret ) ; <nl> deleteLocalRefs ( t . env ) ; <nl> + } else { <nl> + reportError ( className , methodName , signature ) ; <nl> } <nl> return ret ; <nl> } <nl> class CC_DLL JniHelper <nl> <nl> static jstring convert ( cocos2d : : JniMethodInfo & t , const std : : string & x ) ; <nl> <nl> - static std : : unordered_map < JNIEnv * , std : : vector < jobject > > localRefs ; <nl> - <nl> - static void deleteLocalRefs ( JNIEnv * env ) ; <nl> - <nl> template < typename T > <nl> static T convert ( cocos2d : : JniMethodInfo & , T x ) { <nl> return x ; <nl> } <nl> <nl> - static constexpr const char * getJNISignature ( ) { <nl> + static std : : unordered_map < JNIEnv * , std : : vector < jobject > > localRefs ; <nl> + <nl> + static void deleteLocalRefs ( JNIEnv * env ) ; <nl> + <nl> + static std : : string getJNISignature ( ) { <nl> return " " ; <nl> } <nl> <nl> - static constexpr const char * getJNISignature ( bool ) { <nl> + static std : : string getJNISignature ( bool ) { <nl> return " Z " ; <nl> } <nl> <nl> - static constexpr const char * getJNISignature ( char ) { <nl> + static std : : string getJNISignature ( char ) { <nl> return " C " ; <nl> } <nl> <nl> - static constexpr const char * getJNISignature ( short ) { <nl> + static std : : string getJNISignature ( short ) { <nl> return " S " ; <nl> } <nl> <nl> - static constexpr const char * getJNISignature ( int ) { <nl> + static std : : string getJNISignature ( int ) { <nl> return " I " ; <nl> } <nl> <nl> - static constexpr const char * getJNISignature ( long ) { <nl> + static std : : string getJNISignature ( long ) { <nl> return " J " ; <nl> } <nl> <nl> - static constexpr const char * getJNISignature ( float ) { <nl> + static std : : string getJNISignature ( float ) { <nl> return " F " ; <nl> } <nl> <nl> - static constexpr const char * getJNISignature ( double ) { <nl> + static std : : string getJNISignature ( double ) { <nl> return " D " ; <nl> } <nl> <nl> - static constexpr const char * getJNISignature ( const char * ) { <nl> + static std : : string getJNISignature ( const char * ) { <nl> return " Ljava / lang / String ; " ; <nl> } <nl> <nl> - static constexpr const char * getJNISignature ( const std : : string & ) { <nl> + static std : : string getJNISignature ( const std : : string & ) { <nl> return " Ljava / lang / String ; " ; <nl> } <nl> <nl> template < typename T > <nl> - static constexpr const char * getJNISignature ( T x ) { <nl> + static std : : string getJNISignature ( T x ) { <nl> / / This template should never be instantiated <nl> static_assert ( sizeof ( x ) = = 0 , " Unsupported argument type " ) ; <nl> return " " ; <nl> } <nl> <nl> template < typename T , typename . . . Ts > <nl> - static constexpr const char * getJNISignature ( T x , Ts . . . xs ) { <nl> - return ( std : : string ( getJNISignature ( x ) ) + std : : string ( getJNISignature ( xs . . . ) ) ) . c_str ( ) ; <nl> + static std : : string getJNISignature ( T x , Ts . . . xs ) { <nl> + return getJNISignature ( x ) + getJNISignature ( xs . . . ) ; <nl> } <nl> + <nl> + static void reportError ( const std : : string & className , const std : : string & methodName , const std : : string & signature ) ; <nl> } ; <nl> <nl> NS_CC_END <nl> mmm a / cocos / storage / local - storage / LocalStorage - android . cpp <nl> ppp b / cocos / storage / local - storage / LocalStorage - android . cpp <nl> <nl> USING_NS_CC ; <nl> static int _initialized = 0 ; <nl> <nl> + static className = " org / cocos2dx / lib / Cocos2dxLocalStorage " ; <nl> + <nl> static void splitFilename ( std : : string & str ) <nl> { <nl> size_t found = 0 ; <nl> void localStorageInit ( const std : : string & fullpath ) <nl> <nl> if ( ! _initialized ) <nl> { <nl> - JniMethodInfo t ; <nl> - <nl> - if ( JniHelper : : getStaticMethodInfo ( t , " org / cocos2dx / lib / Cocos2dxLocalStorage " , " init " , " ( Ljava / lang / String ; Ljava / lang / String ; ) Z " ) ) { <nl> - std : : string strDBFilename = fullpath ; <nl> - splitFilename ( strDBFilename ) ; <nl> - jstring jdbName = t . env - > NewStringUTF ( strDBFilename . c_str ( ) ) ; <nl> - jstring jtableName = t . env - > NewStringUTF ( " data " ) ; <nl> - jboolean ret = t . env - > CallStaticBooleanMethod ( t . classID , t . methodID , jdbName , jtableName ) ; <nl> - t . env - > DeleteLocalRef ( jdbName ) ; <nl> - t . env - > DeleteLocalRef ( jtableName ) ; <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - if ( ret ) { <nl> - _initialized = 1 ; <nl> - } <nl> + std : : string strDBFilename = fullpath ; <nl> + splitFilename ( strDBFilename ) ; <nl> + if ( JniHelper : : CallStaticBooleanMethod ( className , " init " , strDBFilename , " data " ) ) { <nl> + _initialized = 1 ; <nl> } <nl> } <nl> } <nl> void localStorageInit ( const std : : string & fullpath ) <nl> void localStorageFree ( ) <nl> { <nl> if ( _initialized ) { <nl> - <nl> - JniMethodInfo t ; <nl> - <nl> - if ( JniHelper : : getStaticMethodInfo ( t , " org / cocos2dx / lib / Cocos2dxLocalStorage " , " destory " , " ( ) V " ) ) <nl> - { <nl> - t . env - > CallStaticVoidMethod ( t . classID , t . methodID ) ; <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - } <nl> - <nl> + JniHelper : : CallStaticVoidMethod ( className , " destory " ) ; <nl> _initialized = 0 ; <nl> } <nl> } <nl> void localStorageFree ( ) <nl> void localStorageSetItem ( const std : : string & key , const std : : string & value ) <nl> { <nl> assert ( _initialized ) ; <nl> - <nl> - JniMethodInfo t ; <nl> - <nl> - if ( JniHelper : : getStaticMethodInfo ( t , " org / cocos2dx / lib / Cocos2dxLocalStorage " , " setItem " , " ( Ljava / lang / String ; Ljava / lang / String ; ) V " ) ) { <nl> - jstring jkey = t . env - > NewStringUTF ( key . c_str ( ) ) ; <nl> - jstring jvalue = t . env - > NewStringUTF ( value . c_str ( ) ) ; <nl> - t . env - > CallStaticVoidMethod ( t . classID , t . methodID , jkey , jvalue ) ; <nl> - t . env - > DeleteLocalRef ( jkey ) ; <nl> - t . env - > DeleteLocalRef ( jvalue ) ; <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - } <nl> + JniHelper : : CallStaticVoidMethod ( className , " setItem " , key , value ) ; <nl> } <nl> <nl> / * * gets an item from the LS * / <nl> bool localStorageGetItem ( const std : : string & key , std : : string * outItem ) <nl> void localStorageRemoveItem ( const std : : string & key ) <nl> { <nl> assert ( _initialized ) ; <nl> - JniMethodInfo t ; <nl> - <nl> - if ( JniHelper : : getStaticMethodInfo ( t , " org / cocos2dx / lib / Cocos2dxLocalStorage " , " removeItem " , " ( Ljava / lang / String ; ) V " ) ) { <nl> - jstring jkey = t . env - > NewStringUTF ( key . c_str ( ) ) ; <nl> - t . env - > CallStaticVoidMethod ( t . classID , t . methodID , jkey ) ; <nl> - t . env - > DeleteLocalRef ( jkey ) ; <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - } <nl> + JniHelper : : CallStaticVoidMethod ( className , " removeItem " , key ) ; <nl> <nl> } <nl> <nl> void localStorageRemoveItem ( const std : : string & key ) <nl> void localStorageClear ( ) <nl> { <nl> assert ( _initialized ) ; <nl> - JniMethodInfo t ; <nl> - <nl> - if ( JniHelper : : getStaticMethodInfo ( t , " org / cocos2dx / lib / Cocos2dxLocalStorage " , " clear " , " ( ) V " ) ) { <nl> - t . env - > CallStaticVoidMethod ( t . classID , t . methodID ) ; <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - } <nl> + JniHelper : : CallStaticVoidMethod ( className , " clear " ) ; <nl> } <nl> <nl> # endif / / # if ( CC_TARGET_PLATFORM = = CC_PLATFORM_ANDROID ) <nl> mmm a / cocos / ui / UIEditBox / UIEditBoxImpl - android . cpp <nl> ppp b / cocos / ui / UIEditBox / UIEditBoxImpl - android . cpp <nl> <nl> <nl> # include " UIEditBox . h " <nl> # include < jni . h > <nl> - # include " jni / Java_org_cocos2dx_lib_Cocos2dxHelper . h " <nl> + # include " platform / android / jni / JniHelper . h " <nl> # include " 2d / CCLabel . h " <nl> # include " base / ccUTF8 . h " <nl> # include " math / Vec2 . h " <nl> <nl> <nl> NS_CC_BEGIN <nl> <nl> + static const std : : string editBoxClassName = " org / cocos2dx / lib / Cocos2dxEditBoxHelper " ; <nl> <nl> namespace ui { <nl> <nl> EditBoxImplAndroid : : EditBoxImplAndroid ( EditBox * pEditText ) <nl> EditBoxImplAndroid : : ~ EditBoxImplAndroid ( ) <nl> { <nl> s_allEditBoxes . erase ( _editBoxIndex ) ; <nl> - removeEditBoxJNI ( _editBoxIndex ) ; <nl> + JniHelper : : callStaticVoidMethod ( editBoxClassName , " removeEditBox " , _editBoxIndex ) ; <nl> + <nl> } <nl> <nl> void EditBoxImplAndroid : : createNativeControl ( const Rect & frame ) <nl> void EditBoxImplAndroid : : createNativeControl ( const Rect & frame ) <nl> auto uiWidth = ( rightTop . x - leftBottom . x ) * glView - > getScaleX ( ) ; <nl> auto uiHeight = ( rightTop . y - leftBottom . y ) * glView - > getScaleY ( ) ; <nl> LOGD ( " scaleX = % f " , glView - > getScaleX ( ) ) ; <nl> - _editBoxIndex = addEditBoxJNI ( uiLeft , uiTop , uiWidth , uiHeight , glView - > getScaleX ( ) ) ; <nl> + _editBoxIndex = JniHelper : : callStaticIntMethod ( editBoxClassName , " createEditBox " , <nl> + ( int ) uiLeft , ( int ) uiTop , ( int ) uiWidth , ( int ) uiHeight , <nl> + ( float ) glView - > getScaleX ( ) ) ; <nl> s_allEditBoxes [ _editBoxIndex ] = this ; <nl> } <nl> <nl> void EditBoxImplAndroid : : setNativeFont ( const char * pFontName , int fontSize ) <nl> { <nl> auto director = cocos2d : : Director : : getInstance ( ) ; <nl> auto glView = director - > getOpenGLView ( ) ; <nl> - setFontEditBoxJNI ( _editBoxIndex , pFontName , fontSize * glView - > getScaleX ( ) ) ; <nl> + JniHelper : : callStaticVoidMethod ( editBoxClassName , " setFont " , <nl> + _editBoxIndex , pFontName , <nl> + ( float ) fontSize * glView - > getScaleX ( ) ) ; <nl> } <nl> <nl> void EditBoxImplAndroid : : setNativeFontColor ( const Color4B & color ) <nl> { <nl> - setFontColorEditBoxJNI ( _editBoxIndex , color . r , color . g , color . b , color . a ) ; <nl> + JniHelper : : callStaticVoidMethod ( editBoxClassName , " setFontColor " , _editBoxIndex , <nl> + ( int ) color . r , ( int ) color . g , ( int ) color . b , ( int ) color . a ) ; <nl> } <nl> <nl> void EditBoxImplAndroid : : setNativePlaceholderFont ( const char * pFontName , int fontSize ) <nl> void EditBoxImplAndroid : : setNativePlaceholderFont ( const char * pFontName , int fon <nl> <nl> void EditBoxImplAndroid : : setNativePlaceholderFontColor ( const Color4B & color ) <nl> { <nl> - setPlaceHolderTextColorEditBoxJNI ( _editBoxIndex , color . r , color . g , color . b , color . a ) ; <nl> + JniHelper : : callStaticVoidMethod ( editBoxClassName , " setPlaceHolderTextColor " , _editBoxIndex , <nl> + ( int ) color . r , ( int ) color . g , ( int ) color . b , ( int ) color . a ) ; <nl> } <nl> <nl> void EditBoxImplAndroid : : setNativeInputMode ( EditBox : : InputMode inputMode ) <nl> { <nl> - setInputModeEditBoxJNI ( _editBoxIndex , static_cast < int > ( inputMode ) ) ; <nl> + JniHelper : : callStaticVoidMethod ( editBoxClassName , " setInputMode " , <nl> + _editBoxIndex , static_cast < int > ( inputMode ) ) ; <nl> } <nl> <nl> void EditBoxImplAndroid : : setNativeMaxLength ( int maxLength ) <nl> { <nl> - setMaxLengthJNI ( _editBoxIndex , maxLength ) ; <nl> + JniHelper : : callStaticVoidMethod ( editBoxClassName , " setMaxLength " , _editBoxIndex , maxLength ) ; <nl> } <nl> <nl> - <nl> void EditBoxImplAndroid : : setNativeInputFlag ( EditBox : : InputFlag inputFlag ) <nl> { <nl> - setInputFlagEditBoxJNI ( _editBoxIndex , static_cast < int > ( inputFlag ) ) ; <nl> + JniHelper : : callStaticVoidMethod ( editBoxClassName , " setInputFlag " , <nl> + _editBoxIndex , static_cast < int > ( inputFlag ) ) ; <nl> } <nl> <nl> void EditBoxImplAndroid : : setNativeReturnType ( EditBox : : KeyboardReturnType returnType ) <nl> { <nl> - setReturnTypeEditBoxJNI ( _editBoxIndex , static_cast < int > ( returnType ) ) ; <nl> + JniHelper : : callStaticVoidMethod ( editBoxClassName , " setReturnType " , <nl> + _editBoxIndex , static_cast < int > ( returnType ) ) ; <nl> } <nl> <nl> bool EditBoxImplAndroid : : isEditing ( ) <nl> bool EditBoxImplAndroid : : isEditing ( ) <nl> <nl> void EditBoxImplAndroid : : setNativeText ( const char * pText ) <nl> { <nl> - setTextEditBoxJNI ( _editBoxIndex , pText ) ; <nl> + JniHelper : : callStaticVoidMethod ( editBoxClassName , " setText " , _editBoxIndex , pText ) ; <nl> } <nl> <nl> void EditBoxImplAndroid : : setNativePlaceHolder ( const char * pText ) <nl> { <nl> - setPlaceHolderTextEditBoxJNI ( _editBoxIndex , pText ) ; <nl> + JniHelper : : callStaticVoidMethod ( editBoxClassName , " setPlaceHolderText " , _editBoxIndex , pText ) ; <nl> } <nl> <nl> <nl> void EditBoxImplAndroid : : setNativeVisible ( bool visible ) <nl> { / / don ' t need to be implemented on android platform . <nl> - setVisibleEditBoxJNI ( _editBoxIndex , visible ) ; <nl> + JniHelper : : callStaticVoidMethod ( editBoxClassName , " setVisible " , _editBoxIndex , visible ) ; <nl> } <nl> <nl> void EditBoxImplAndroid : : updateNativeFrame ( const Rect & rect ) <nl> { <nl> - <nl> - setEditBoxViewRectJNI ( _editBoxIndex , rect . origin . x , rect . origin . y , rect . size . width , rect . size . height ) ; <nl> + JniHelper : : callStaticVoidMethod ( editBoxClassName , " setEditBoxViewRect " , _editBoxIndex , <nl> + ( int ) rect . origin . x , ( int ) rect . origin . y , <nl> + ( int ) rect . size . width , ( int ) rect . size . height ) ; <nl> } <nl> <nl> void EditBoxImplAndroid : : nativeOpenKeyboard ( ) <nl> { <nl> / / it will also open up the soft keyboard <nl> - setVisibleEditBoxJNI ( _editBoxIndex , true ) ; <nl> + JniHelper : : callStaticVoidMethod ( editBoxClassName , " setVisible " , _editBoxIndex , true ) ; <nl> } <nl> <nl> <nl> void EditBoxImplAndroid : : nativeCloseKeyboard ( ) <nl> { <nl> - closeEditBoxKeyboardJNI ( _editBoxIndex ) ; <nl> + JniHelper : : callStaticVoidMethod ( editBoxClassName , " closeKeyboard " , _editBoxIndex ) ; <nl> } <nl> <nl> void editBoxEditingDidBegin ( int index ) <nl> mmm a / cocos / ui / UIVideoPlayer - android . cpp <nl> ppp b / cocos / ui / UIVideoPlayer - android . cpp <nl> <nl> # include < stdlib . h > <nl> # include < jni . h > <nl> # include < string > <nl> - # include " jni / JniHelper . h " <nl> + # include " platform / android / jni / JniHelper . h " <nl> # include " base / CCDirector . h " <nl> # include " base / CCEventListenerKeyboard . h " <nl> # include " platform / CCFileUtils . h " <nl> # include " ui / UIHelper . h " <nl> <nl> / / mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - <nl> - # define CLASS_NAME " org / cocos2dx / lib / Cocos2dxVideoHelper " <nl> + <nl> + static const std : : string videoHelperClassName = " org / cocos2dx / lib / Cocos2dxVideoHelper " ; <nl> <nl> USING_NS_CC ; <nl> <nl> int createVideoWidgetJNI ( ) <nl> { <nl> JniMethodInfo t ; <nl> int ret = - 1 ; <nl> - if ( JniHelper : : getStaticMethodInfo ( t , CLASS_NAME , " createVideoWidget " , " ( ) I " ) ) { <nl> + if ( JniHelper : : getStaticMethodInfo ( t , videoHelperClassName . c_str ( ) , " createVideoWidget " , " ( ) I " ) ) { <nl> ret = t . env - > CallStaticIntMethod ( t . classID , t . methodID ) ; <nl> <nl> t . env - > DeleteLocalRef ( t . classID ) ; <nl> int createVideoWidgetJNI ( ) <nl> return ret ; <nl> } <nl> <nl> - void callVideoNonParameterFun ( int index , const char * funName ) <nl> - { <nl> - JniMethodInfo t ; <nl> - <nl> - if ( JniHelper : : getStaticMethodInfo ( t , CLASS_NAME , funName , " ( I ) V " ) ) { <nl> - t . env - > CallStaticVoidMethod ( t . classID , t . methodID , index ) ; <nl> - <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - } <nl> - } <nl> - <nl> - void removeVideoWidgetJNI ( int index ) <nl> - { <nl> - callVideoNonParameterFun ( index , " removeVideoWidget " ) ; <nl> - } <nl> - <nl> - void setVideoRectJNI ( int index , int left , int top , int width , int height ) <nl> - { <nl> - JniMethodInfo t ; <nl> - <nl> - if ( JniHelper : : getStaticMethodInfo ( t , CLASS_NAME , " setVideoRect " , " ( IIIII ) V " ) ) { <nl> - t . env - > CallStaticVoidMethod ( t . classID , t . methodID , index , left , top , width , height ) ; <nl> - <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - } <nl> - } <nl> - <nl> - void setFullScreenEnabledJni ( int index , bool enabled , int width , int height ) <nl> - { <nl> - JniMethodInfo t ; <nl> - <nl> - if ( JniHelper : : getStaticMethodInfo ( t , CLASS_NAME , " setFullScreenEnabled " , " ( IZII ) V " ) ) { <nl> - t . env - > CallStaticVoidMethod ( t . classID , t . methodID , index , enabled , width , height ) ; <nl> - <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - } <nl> - } <nl> - <nl> - void setVideoURLJNI ( int index , int videoSource , const std : : string & videoUrl ) <nl> - { <nl> - JniMethodInfo t ; <nl> - if ( JniHelper : : getStaticMethodInfo ( t , CLASS_NAME , " setVideoUrl " , " ( IILjava / lang / String ; ) V " ) ) { <nl> - jstring stringArg = t . env - > NewStringUTF ( videoUrl . c_str ( ) ) ; <nl> - t . env - > CallStaticVoidMethod ( t . classID , t . methodID , index , videoSource , stringArg ) ; <nl> - <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - t . env - > DeleteLocalRef ( stringArg ) ; <nl> - } <nl> - } <nl> - <nl> - void startVideoJNI ( int index ) <nl> - { <nl> - callVideoNonParameterFun ( index , " startVideo " ) ; <nl> - } <nl> - <nl> - void pauseVideoJNI ( int index ) <nl> - { <nl> - callVideoNonParameterFun ( index , " pauseVideo " ) ; <nl> - } <nl> - <nl> - void resumeVideoJNI ( int index ) <nl> - { <nl> - callVideoNonParameterFun ( index , " resumeVideo " ) ; <nl> - } <nl> - <nl> - void stopVideoJNI ( int index ) <nl> - { <nl> - callVideoNonParameterFun ( index , " stopVideo " ) ; <nl> - } <nl> - <nl> - void seekVideoToJNI ( int index , int msec ) <nl> - { <nl> - JniMethodInfo t ; <nl> - <nl> - if ( JniHelper : : getStaticMethodInfo ( t , CLASS_NAME , " seekVideoTo " , " ( II ) V " ) ) { <nl> - t . env - > CallStaticVoidMethod ( t . classID , t . methodID , index , msec ) ; <nl> - <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - } <nl> - } <nl> - <nl> - void setVideoVisible ( int index , bool visible ) <nl> - { <nl> - JniMethodInfo t ; <nl> - <nl> - if ( JniHelper : : getStaticMethodInfo ( t , CLASS_NAME , " setVideoVisible " , " ( IZ ) V " ) ) { <nl> - t . env - > CallStaticVoidMethod ( t . classID , t . methodID , index , visible ) ; <nl> - <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - } <nl> - } <nl> - <nl> - void setVideoKeepRatioEnabled ( int index , bool enabled ) <nl> - { <nl> - JniMethodInfo t ; <nl> - <nl> - if ( JniHelper : : getStaticMethodInfo ( t , CLASS_NAME , " setVideoKeepRatioEnabled " , " ( IZ ) V " ) ) { <nl> - t . env - > CallStaticVoidMethod ( t . classID , t . methodID , index , enabled ) ; <nl> - <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - } <nl> - } <nl> / / mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - <nl> <nl> using namespace cocos2d : : experimental : : ui ; <nl> VideoPlayer : : VideoPlayer ( ) <nl> VideoPlayer : : ~ VideoPlayer ( ) <nl> { <nl> s_allVideoPlayers . erase ( _videoPlayerIndex ) ; <nl> - removeVideoWidgetJNI ( _videoPlayerIndex ) ; <nl> + JniHelper : : callStaticVoidMethod ( videoHelperClassName , " removeVideoWidget " , _videoPlayerIndex ) ; <nl> } <nl> <nl> void VideoPlayer : : setFileName ( const std : : string & fileName ) <nl> { <nl> _videoURL = FileUtils : : getInstance ( ) - > fullPathForFilename ( fileName ) ; <nl> _videoSource = VideoPlayer : : Source : : FILENAME ; <nl> - setVideoURLJNI ( _videoPlayerIndex , ( int ) Source : : FILENAME , _videoURL ) ; <nl> + JniHelper : : callStaticVoidMethod ( videoHelperClassName , " setVideoUrl " , _videoPlayerIndex , <nl> + ( int ) Source : : FILENAME , _videoURL ) ; <nl> } <nl> <nl> void VideoPlayer : : setURL ( const std : : string & videoUrl ) <nl> { <nl> _videoURL = videoUrl ; <nl> _videoSource = VideoPlayer : : Source : : URL ; <nl> - setVideoURLJNI ( _videoPlayerIndex , ( int ) Source : : URL , _videoURL ) ; <nl> + JniHelper : : callStaticVoidMethod ( videoHelperClassName , " setVideoUrl " , _videoPlayerIndex , <nl> + ( int ) Source : : URL , _videoURL ) ; <nl> } <nl> <nl> void VideoPlayer : : draw ( Renderer * renderer , const Mat4 & transform , uint32_t flags ) <nl> void VideoPlayer : : draw ( Renderer * renderer , const Mat4 & transform , uint32_t flags <nl> if ( flags & FLAGS_TRANSFORM_DIRTY ) <nl> { <nl> auto uiRect = cocos2d : : ui : : Helper : : convertBoundingBoxToScreen ( this ) ; <nl> - <nl> - setVideoRectJNI ( _videoPlayerIndex , uiRect . origin . x , uiRect . origin . y , <nl> - uiRect . size . width , uiRect . size . height ) ; <nl> + JniHelper : : callStaticVoidMethod ( videoHelperClassName , " setVideoRect " , _videoPlayerIndex , <nl> + ( int ) uiRect . origin . x , ( int ) uiRect . origin . y , <nl> + ( int ) uiRect . size . width , ( int ) uiRect . size . height ) ; <nl> } <nl> <nl> # if CC_VIDEOPLAYER_DEBUG_DRAW <nl> void VideoPlayer : : setFullScreenEnabled ( bool enabled ) <nl> _fullScreenEnabled = enabled ; <nl> <nl> auto frameSize = Director : : getInstance ( ) - > getOpenGLView ( ) - > getFrameSize ( ) ; <nl> - setFullScreenEnabledJni ( _videoPlayerIndex , enabled , frameSize . width , frameSize . height ) ; <nl> + JniHelper : : callStaticVoidMethod ( videoHelperClassName , " setFullScreenEnabled " , _videoPlayerIndex , <nl> + enabled , ( int ) frameSize . width , ( int ) frameSize . height ) ; <nl> } <nl> } <nl> <nl> void VideoPlayer : : setKeepAspectRatioEnabled ( bool enable ) <nl> if ( _keepAspectRatioEnabled ! = enable ) <nl> { <nl> _keepAspectRatioEnabled = enable ; <nl> - setVideoKeepRatioEnabled ( _videoPlayerIndex , enable ) ; <nl> + JniHelper : : callStaticVoidMethod ( videoHelperClassName , " setVideoKeepRatioEnabled " , _videoPlayerIndex , enable ) ; <nl> } <nl> } <nl> <nl> void VideoPlayer : : play ( ) <nl> { <nl> if ( ! _videoURL . empty ( ) ) <nl> { <nl> - startVideoJNI ( _videoPlayerIndex ) ; <nl> + JniHelper : : callStaticVoidMethod ( videoHelperClassName , " startVideo " , _videoPlayerIndex ) ; <nl> } <nl> } <nl> <nl> void VideoPlayer : : pause ( ) <nl> { <nl> if ( ! _videoURL . empty ( ) ) <nl> { <nl> - pauseVideoJNI ( _videoPlayerIndex ) ; <nl> + JniHelper : : callStaticVoidMethod ( videoHelperClassName , " pauseVideo " , _videoPlayerIndex ) ; <nl> } <nl> } <nl> <nl> void VideoPlayer : : resume ( ) <nl> { <nl> if ( ! _videoURL . empty ( ) ) <nl> { <nl> - resumeVideoJNI ( _videoPlayerIndex ) ; <nl> + JniHelper : : callStaticVoidMethod ( videoHelperClassName , " resumeVideo " , _videoPlayerIndex ) ; <nl> } <nl> } <nl> <nl> void VideoPlayer : : stop ( ) <nl> { <nl> if ( ! _videoURL . empty ( ) ) <nl> { <nl> - stopVideoJNI ( _videoPlayerIndex ) ; <nl> + JniHelper : : callStaticVoidMethod ( videoHelperClassName , " stopVideo " , _videoPlayerIndex ) ; <nl> } <nl> } <nl> <nl> void VideoPlayer : : seekTo ( float sec ) <nl> { <nl> if ( ! _videoURL . empty ( ) ) <nl> { <nl> - seekVideoToJNI ( _videoPlayerIndex , int ( sec * 1000 ) ) ; <nl> + JniHelper : : callStaticVoidMethod ( videoHelperClassName , " seekVideoTo " , _videoPlayerIndex , int ( sec * 1000 ) ) ; <nl> } <nl> } <nl> <nl> void VideoPlayer : : setVisible ( bool visible ) <nl> <nl> if ( ! _videoURL . empty ( ) ) <nl> { <nl> - setVideoVisible ( _videoPlayerIndex , visible ) ; <nl> + JniHelper : : callStaticVoidMethod ( videoHelperClassName , " setVideoVisible " , _videoPlayerIndex , visible ) ; <nl> } <nl> } <nl> <nl> mmm a / cocos / ui / UIWebViewImpl - android . cpp <nl> ppp b / cocos / ui / UIWebViewImpl - android . cpp <nl> <nl> # include < unordered_map > <nl> # include < stdlib . h > <nl> # include < string > <nl> - # include " jni / JniHelper . h " <nl> - # include < jni . h > <nl> + # include " platform / android / jni / JniHelper . h " <nl> <nl> # include " UIWebView . h " <nl> # include " platform / CCGLView . h " <nl> <nl> # include " platform / CCFileUtils . h " <nl> # include " ui / UIHelper . h " <nl> <nl> - # define CLASS_NAME " org / cocos2dx / lib / Cocos2dxWebViewHelper " <nl> + static const std : : string className = " org / cocos2dx / lib / Cocos2dxWebViewHelper " ; <nl> <nl> # define LOGD ( . . . ) __android_log_print ( ANDROID_LOG_DEBUG , " " , __VA_ARGS__ ) <nl> <nl> namespace { <nl> <nl> int createWebViewJNI ( ) { <nl> cocos2d : : JniMethodInfo t ; <nl> - if ( cocos2d : : JniHelper : : getStaticMethodInfo ( t , CLASS_NAME , " createWebView " , " ( ) I " ) ) { <nl> + if ( cocos2d : : JniHelper : : getStaticMethodInfo ( t , className . c_str ( ) , " createWebView " , " ( ) I " ) ) { <nl> / / LOGD ( " error : % s , % d " , __func__ , __LINE__ ) ; <nl> jint viewTag = t . env - > CallStaticIntMethod ( t . classID , t . methodID ) ; <nl> t . env - > DeleteLocalRef ( t . classID ) ; <nl> int createWebViewJNI ( ) { <nl> return - 1 ; <nl> } <nl> <nl> - void removeWebViewJNI ( const int index ) { <nl> - / / LOGD ( " error : % s , % d " , __func__ , __LINE__ ) ; <nl> - cocos2d : : JniMethodInfo t ; <nl> - if ( cocos2d : : JniHelper : : getStaticMethodInfo ( t , CLASS_NAME , " removeWebView " , " ( I ) V " ) ) { <nl> - t . env - > CallStaticVoidMethod ( t . classID , t . methodID , index ) ; <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - } <nl> - } <nl> - <nl> - void setWebViewRectJNI ( const int index , const int left , const int top , const int width , const int height ) { <nl> - / / LOGD ( " error : % s , % d " , __func__ , __LINE__ ) ; <nl> - cocos2d : : JniMethodInfo t ; <nl> - if ( cocos2d : : JniHelper : : getStaticMethodInfo ( t , CLASS_NAME , " setWebViewRect " , " ( IIIII ) V " ) ) { <nl> - t . env - > CallStaticVoidMethod ( t . classID , t . methodID , index , left , top , width , height ) ; <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - } <nl> - } <nl> - <nl> - void setJavascriptInterfaceSchemeJNI ( const int index , const std : : string & scheme ) { <nl> - / / LOGD ( " error : % s , % d " , __func__ , __LINE__ ) ; <nl> - cocos2d : : JniMethodInfo t ; <nl> - if ( cocos2d : : JniHelper : : getStaticMethodInfo ( t , CLASS_NAME , " setJavascriptInterfaceScheme " , " ( ILjava / lang / String ; ) V " ) ) { <nl> - jstring jScheme = t . env - > NewStringUTF ( scheme . c_str ( ) ) ; <nl> - t . env - > CallStaticVoidMethod ( t . classID , t . methodID , index , jScheme ) ; <nl> - <nl> - t . env - > DeleteLocalRef ( jScheme ) ; <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - } <nl> - } <nl> - <nl> - void loadDataJNI ( const int index , const std : : string & data , const std : : string & MIMEType , const std : : string & encoding , const std : : string & baseURL ) { <nl> - / / LOGD ( " error : % s , % d " , __func__ , __LINE__ ) ; <nl> - cocos2d : : JniMethodInfo t ; <nl> - if ( cocos2d : : JniHelper : : getStaticMethodInfo ( t , CLASS_NAME , " loadData " , " ( ILjava / lang / String ; Ljava / lang / String ; Ljava / lang / String ; Ljava / lang / String ; ) V " ) ) { <nl> - jstring jData = t . env - > NewStringUTF ( data . c_str ( ) ) ; <nl> - jstring jMIMEType = t . env - > NewStringUTF ( MIMEType . c_str ( ) ) ; <nl> - jstring jEncoding = t . env - > NewStringUTF ( encoding . c_str ( ) ) ; <nl> - jstring jBaseURL = t . env - > NewStringUTF ( getFixedBaseUrl ( baseURL ) . c_str ( ) ) ; <nl> - t . env - > CallStaticVoidMethod ( t . classID , t . methodID , index , jData , jMIMEType , jEncoding , jBaseURL ) ; <nl> - <nl> - t . env - > DeleteLocalRef ( jData ) ; <nl> - t . env - > DeleteLocalRef ( jMIMEType ) ; <nl> - t . env - > DeleteLocalRef ( jEncoding ) ; <nl> - t . env - > DeleteLocalRef ( jBaseURL ) ; <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - } <nl> - } <nl> - <nl> - void loadHTMLStringJNI ( const int index , const std : : string & string , const std : : string & baseURL ) { <nl> - / / LOGD ( " error : % s , % d " , __func__ , __LINE__ ) ; <nl> - cocos2d : : JniMethodInfo t ; <nl> - if ( cocos2d : : JniHelper : : getStaticMethodInfo ( t , CLASS_NAME , " loadHTMLString " , " ( ILjava / lang / String ; Ljava / lang / String ; ) V " ) ) { <nl> - jstring jString = t . env - > NewStringUTF ( string . c_str ( ) ) ; <nl> - jstring jBaseURL = t . env - > NewStringUTF ( getFixedBaseUrl ( baseURL ) . c_str ( ) ) ; <nl> - t . env - > CallStaticVoidMethod ( t . classID , t . methodID , index , jString , jBaseURL ) ; <nl> - <nl> - t . env - > DeleteLocalRef ( jString ) ; <nl> - t . env - > DeleteLocalRef ( jBaseURL ) ; <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - } <nl> - } <nl> - <nl> - void loadUrlJNI ( const int index , const std : : string & url ) { <nl> - cocos2d : : JniMethodInfo t ; <nl> - if ( cocos2d : : JniHelper : : getStaticMethodInfo ( t , CLASS_NAME , " loadUrl " , " ( ILjava / lang / String ; ) V " ) ) { <nl> - jstring jUrl = t . env - > NewStringUTF ( url . c_str ( ) ) ; <nl> - t . env - > CallStaticVoidMethod ( t . classID , t . methodID , index , jUrl ) ; <nl> - <nl> - t . env - > DeleteLocalRef ( jUrl ) ; <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - } <nl> - } <nl> - <nl> - void loadFileJNI ( const int index , const std : : string & filePath ) { <nl> - / / LOGD ( " error : % s , % d " , __func__ , __LINE__ ) ; <nl> - cocos2d : : JniMethodInfo t ; <nl> - if ( cocos2d : : JniHelper : : getStaticMethodInfo ( t , CLASS_NAME , " loadFile " , " ( ILjava / lang / String ; ) V " ) ) { <nl> - jstring jFilePath = t . env - > NewStringUTF ( filePath . c_str ( ) ) ; <nl> - t . env - > CallStaticVoidMethod ( t . classID , t . methodID , index , jFilePath ) ; <nl> - <nl> - t . env - > DeleteLocalRef ( jFilePath ) ; <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - } <nl> - } <nl> - <nl> - void stopLoadingJNI ( const int index ) { <nl> - / / LOGD ( " error : % s , % d " , __func__ , __LINE__ ) ; <nl> - cocos2d : : JniMethodInfo t ; <nl> - if ( cocos2d : : JniHelper : : getStaticMethodInfo ( t , CLASS_NAME , " stopLoading " , " ( I ) V " ) ) { <nl> - t . env - > CallStaticVoidMethod ( t . classID , t . methodID , index ) ; <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - } <nl> - } <nl> - <nl> - void reloadJNI ( const int index ) { <nl> - / / LOGD ( " error : % s , % d " , __func__ , __LINE__ ) ; <nl> - cocos2d : : JniMethodInfo t ; <nl> - if ( cocos2d : : JniHelper : : getStaticMethodInfo ( t , CLASS_NAME , " reload " , " ( I ) V " ) ) { <nl> - t . env - > CallStaticVoidMethod ( t . classID , t . methodID , index ) ; <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - } <nl> - } <nl> - <nl> - bool canGoBackJNI ( const int index ) { <nl> - / / LOGD ( " error : % s , % d " , __func__ , __LINE__ ) ; <nl> - cocos2d : : JniMethodInfo t ; <nl> - if ( cocos2d : : JniHelper : : getStaticMethodInfo ( t , CLASS_NAME , " canGoBack " , " ( I ) Z " ) ) { <nl> - jboolean ret = t . env - > CallStaticBooleanMethod ( t . classID , t . methodID , index ) ; <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - return ret ; <nl> - } <nl> - return false ; <nl> - } <nl> - <nl> - bool canGoForwardJNI ( const int index ) { <nl> - / / LOGD ( " error : % s , % d " , __func__ , __LINE__ ) ; <nl> - cocos2d : : JniMethodInfo t ; <nl> - if ( cocos2d : : JniHelper : : getStaticMethodInfo ( t , CLASS_NAME , " canGoForward " , " ( I ) Z " ) ) { <nl> - jboolean ret = t . env - > CallStaticBooleanMethod ( t . classID , t . methodID , index ) ; <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - return ret ; <nl> - } <nl> - return false ; <nl> - } <nl> - <nl> - void goBackJNI ( const int index ) { <nl> - / / LOGD ( " error : % s , % d " , __func__ , __LINE__ ) ; <nl> - cocos2d : : JniMethodInfo t ; <nl> - if ( cocos2d : : JniHelper : : getStaticMethodInfo ( t , CLASS_NAME , " goBack " , " ( I ) V " ) ) { <nl> - t . env - > CallStaticVoidMethod ( t . classID , t . methodID , index ) ; <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - } <nl> - } <nl> - <nl> - void goForwardJNI ( const int index ) { <nl> - / / LOGD ( " error : % s , % d " , __func__ , __LINE__ ) ; <nl> - cocos2d : : JniMethodInfo t ; <nl> - if ( cocos2d : : JniHelper : : getStaticMethodInfo ( t , CLASS_NAME , " goForward " , " ( I ) V " ) ) { <nl> - t . env - > CallStaticVoidMethod ( t . classID , t . methodID , index ) ; <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - } <nl> - } <nl> - <nl> - void evaluateJSJNI ( const int index , const std : : string & js ) { <nl> - / / LOGD ( " error : % s , % d " , __func__ , __LINE__ ) ; <nl> - cocos2d : : JniMethodInfo t ; <nl> - if ( cocos2d : : JniHelper : : getStaticMethodInfo ( t , CLASS_NAME , " evaluateJS " , " ( ILjava / lang / String ; ) V " ) ) { <nl> - jstring jjs = t . env - > NewStringUTF ( js . c_str ( ) ) ; <nl> - t . env - > CallStaticVoidMethod ( t . classID , t . methodID , index , jjs ) ; <nl> - <nl> - t . env - > DeleteLocalRef ( jjs ) ; <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - } <nl> - } <nl> - <nl> - void setScalesPageToFitJNI ( const int index , const bool scalesPageToFit ) { <nl> - / / LOGD ( " error : % s , % d " , __func__ , __LINE__ ) ; <nl> - cocos2d : : JniMethodInfo t ; <nl> - if ( cocos2d : : JniHelper : : getStaticMethodInfo ( t , CLASS_NAME , " setScalesPageToFit " , " ( IZ ) V " ) ) { <nl> - t . env - > CallStaticVoidMethod ( t . classID , t . methodID , index , scalesPageToFit ) ; <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - } <nl> - } <nl> - <nl> - void setWebViewVisibleJNI ( const int index , const bool visible ) { <nl> - / / LOGD ( " error : % s , % d " , __func__ , __LINE__ ) ; <nl> - cocos2d : : JniMethodInfo t ; <nl> - if ( cocos2d : : JniHelper : : getStaticMethodInfo ( t , CLASS_NAME , " setVisible " , " ( IZ ) V " ) ) { <nl> - t . env - > CallStaticVoidMethod ( t . classID , t . methodID , index , visible ) ; <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - } <nl> - } <nl> - <nl> std : : string getUrlStringByFileName ( const std : : string & fileName ) { <nl> / / LOGD ( " error : % s , % d " , __func__ , __LINE__ ) ; <nl> const std : : string basePath ( " file : / / / android_asset / " ) ; <nl> namespace cocos2d { <nl> } <nl> <nl> WebViewImpl : : ~ WebViewImpl ( ) { <nl> - removeWebViewJNI ( _viewTag ) ; <nl> + JniHelper : : callStaticVoidMethod ( className , " removeWebView " , _viewTag ) ; <nl> s_WebViewImpls . erase ( _viewTag ) ; <nl> } <nl> <nl> void WebViewImpl : : loadData ( const Data & data , const std : : string & MIMEType , const std : : string & encoding , const std : : string & baseURL ) { <nl> std : : string dataString ( reinterpret_cast < char * > ( data . getBytes ( ) ) , static_cast < unsigned int > ( data . getSize ( ) ) ) ; <nl> - loadDataJNI ( _viewTag , dataString , MIMEType , encoding , baseURL ) ; <nl> + JniHelper : : callStaticVoidMethod ( className , " setJavascriptInterfaceScheme " , _viewTag , dataString , MIMEType , encoding , baseURL ) ; <nl> } <nl> <nl> void WebViewImpl : : loadHTMLString ( const std : : string & string , const std : : string & baseURL ) { <nl> - loadHTMLStringJNI ( _viewTag , string , baseURL ) ; <nl> + JniHelper : : callStaticVoidMethod ( className , " loadHTMLString " , _viewTag , string , baseURL ) ; <nl> } <nl> <nl> void WebViewImpl : : loadURL ( const std : : string & url ) { <nl> - loadUrlJNI ( _viewTag , url ) ; <nl> + JniHelper : : callStaticVoidMethod ( className , " loadUrl " , _viewTag , url ) ; <nl> } <nl> <nl> void WebViewImpl : : loadFile ( const std : : string & fileName ) { <nl> auto fullPath = getUrlStringByFileName ( fileName ) ; <nl> - loadFileJNI ( _viewTag , fullPath ) ; <nl> + JniHelper : : callStaticVoidMethod ( className , " loadFile " , _viewTag , fullPath ) ; <nl> } <nl> <nl> void WebViewImpl : : stopLoading ( ) { <nl> - stopLoadingJNI ( _viewTag ) ; <nl> + JniHelper : : callStaticVoidMethod ( className , " stopLoading " , _viewTag ) ; <nl> } <nl> <nl> void WebViewImpl : : reload ( ) { <nl> - reloadJNI ( _viewTag ) ; <nl> + JniHelper : : callStaticVoidMethod ( className , " reload " , _viewTag ) ; <nl> } <nl> <nl> bool WebViewImpl : : canGoBack ( ) { <nl> - return canGoBackJNI ( _viewTag ) ; <nl> + return JniHelper : : callStaticBooleanMethod ( className , " canGoBack " , _viewTag ) ; <nl> } <nl> <nl> bool WebViewImpl : : canGoForward ( ) { <nl> - return canGoForwardJNI ( _viewTag ) ; <nl> + return JniHelper : : callStaticBooleanMethod ( className , " canGoForward " , _viewTag ) ; <nl> } <nl> <nl> void WebViewImpl : : goBack ( ) { <nl> - goBackJNI ( _viewTag ) ; <nl> + JniHelper : : callStaticVoidMethod ( className , " goBack " , _viewTag ) ; <nl> } <nl> <nl> void WebViewImpl : : goForward ( ) { <nl> - goForwardJNI ( _viewTag ) ; <nl> + JniHelper : : callStaticVoidMethod ( className , " goForward " , _viewTag ) ; <nl> } <nl> <nl> void WebViewImpl : : setJavascriptInterfaceScheme ( const std : : string & scheme ) { <nl> - setJavascriptInterfaceSchemeJNI ( _viewTag , scheme ) ; <nl> + JniHelper : : callStaticVoidMethod ( className , " setJavascriptInterfaceScheme " , _viewTag , scheme ) ; <nl> } <nl> <nl> void WebViewImpl : : evaluateJS ( const std : : string & js ) { <nl> - evaluateJSJNI ( _viewTag , js ) ; <nl> + JniHelper : : callStaticVoidMethod ( className , " evaluateJS " , _viewTag , js ) ; <nl> } <nl> <nl> void WebViewImpl : : setScalesPageToFit ( const bool scalesPageToFit ) { <nl> - setScalesPageToFitJNI ( _viewTag , scalesPageToFit ) ; <nl> + JniHelper : : callStaticVoidMethod ( className , " setScalesPageToFit " , _viewTag , scalesPageToFit ) ; <nl> } <nl> <nl> bool WebViewImpl : : shouldStartLoading ( const int viewTag , const std : : string & url ) { <nl> namespace cocos2d { <nl> void WebViewImpl : : draw ( cocos2d : : Renderer * renderer , cocos2d : : Mat4 const & transform , uint32_t flags ) { <nl> if ( flags & cocos2d : : Node : : FLAGS_TRANSFORM_DIRTY ) { <nl> auto uiRect = cocos2d : : ui : : Helper : : convertBoundingBoxToScreen ( _webView ) ; <nl> - <nl> - setWebViewRectJNI ( _viewTag , uiRect . origin . x , uiRect . origin . y , <nl> - uiRect . size . width , uiRect . size . height ) ; <nl> + JniHelper : : callStaticVoidMethod ( className , " setWebViewRect " , _viewTag , <nl> + ( int ) uiRect . origin . x , ( int ) uiRect . origin . y , <nl> + ( int ) uiRect . size . width , ( int ) uiRect . size . height ) ; <nl> } <nl> } <nl> <nl> void WebViewImpl : : setVisible ( bool visible ) { <nl> - setWebViewVisibleJNI ( _viewTag , visible ) ; <nl> + JniHelper : : callStaticVoidMethod ( className , " setVisible " , _viewTag , visible ) ; <nl> } <nl> } / / namespace ui <nl> } / / namespace experimental <nl> mmm a / tools / simulator / libsimulator / proj . android / hellolua / Runtime_android . cpp <nl> ppp b / tools / simulator / libsimulator / proj . android / hellolua / Runtime_android . cpp <nl> <nl> using namespace std ; <nl> using namespace cocos2d ; <nl> <nl> - static std : : string ACTIVITY_PATH ( " org / cocos2dx / lua / AppActivity " ) ; <nl> + static std : : string className = " org / cocos2dx / lua / AppActivity " ; <nl> <nl> void setActivityPathForAndroid ( const std : : string & path ) <nl> { <nl> - ACTIVITY_PATH = path ; <nl> + className = path ; <nl> } <nl> <nl> string getIPAddress ( ) <nl> { <nl> - JniMethodInfo t ; <nl> - string IPAddress ( " " ) ; <nl> - <nl> - if ( JniHelper : : getStaticMethodInfo ( t , ACTIVITY_PATH . c_str ( ) , " getLocalIpAddress " , " ( ) Ljava / lang / String ; " ) ) { <nl> - jstring str = ( jstring ) t . env - > CallStaticObjectMethod ( t . classID , t . methodID ) ; <nl> - t . env - > DeleteLocalRef ( t . classID ) ; <nl> - IPAddress = JniHelper : : jstring2string ( str ) ; <nl> - t . env - > DeleteLocalRef ( str ) ; <nl> - } <nl> - return IPAddress ; <nl> + return JniHelper : : callStaticStringMethod ( className , " getLocalIpAddress " ) ; <nl> } <nl> | updates to JniHelper , refactoring of existing JNI calls | cocos2d/cocos2d-x | c4b995fe5947f6d5db9fa93d9613661c37b4c4c3 | 2015-11-27T09:00:33Z |
mmm a / Code / CryEngine / CryAudioSystem / ATLAudioObject . cpp <nl> ppp b / Code / CryEngine / CryAudioSystem / ATLAudioObject . cpp <nl> void CATLAudioObject : : Update ( <nl> m_propagationProcessor . GetPropagationData ( propagationData ) ; <nl> m_pImplData - > SetObstructionOcclusion ( propagationData . obstruction , propagationData . occlusion ) ; <nl> } <nl> + <nl> + UpdateControls ( deltaTime , distanceToListener , listenerPosition , listenerVelocity , listenerMoved ) ; <nl> + m_pImplData - > Update ( ) ; <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> | ! XB ( Audio ) Fixed merge issue of CL 1631770 | CRYTEK/CRYENGINE | 5ca877324650822fc29a1fcd847ca7788e8ba139 | 2018-01-03T16:21:02Z |
mmm a / docs / test - cases - and - sections . md <nl> ppp b / docs / test - cases - and - sections . md <nl> When any of these macros are used the console reporter recognises them and forma <nl> <nl> Other than the additional prefixes and the formatting in the console reporter these macros behave exactly as ` ` ` TEST_CASE ` ` ` s and ` ` ` SECTION ` ` ` s . As such there is nothing enforcing the correct sequencing of these macros - that ' s up to the programmer ! <nl> <nl> + # # Type parametrised test cases <nl> + <nl> + In addition to ` TEST_CASE ` s , Catch2 also supports test cases parametrised <nl> + by type , in the form of ` TEMPLATE_TEST_CASE ` . <nl> + <nl> + * * * TEMPLATE_TEST_CASE ( * * _test name_ , _tags_ , _type1_ , _type2_ , . . . , _typen_ * * ) * * <nl> + <nl> + _test name_ and _tag_ are exactly the same as they are in ` TEST_CASE ` , <nl> + with the difference that the tag string must be provided ( however , it <nl> + can be empty ) . _type1_ through _typen_ is the list of types for which <nl> + this test case should run , and , inside the test code , the current type <nl> + is available as the ` TestType ` type . <nl> + <nl> + Because of limitations of the C + + preprocessor , if you want to specify <nl> + a type with multiple template parameters , you need to enclose it in <nl> + parentheses , e . g . ` std : : map < int , std : : string > ` needs to be passed as <nl> + ` ( std : : map < int , std : : string > ) ` . <nl> + <nl> + Example : <nl> + ` ` ` cpp <nl> + TEMPLATE_TEST_CASE ( " vectors can be sized and resized " , " [ vector ] [ template ] " , int , std : : string , ( std : : tuple < int , float > ) ) { <nl> + <nl> + std : : vector < TestType > v ( 5 ) ; <nl> + <nl> + REQUIRE ( v . size ( ) = = 5 ) ; <nl> + REQUIRE ( v . capacity ( ) > = 5 ) ; <nl> + <nl> + SECTION ( " resizing bigger changes size and capacity " ) { <nl> + v . resize ( 10 ) ; <nl> + <nl> + REQUIRE ( v . size ( ) = = 10 ) ; <nl> + REQUIRE ( v . capacity ( ) > = 10 ) ; <nl> + } <nl> + SECTION ( " resizing smaller changes size but not capacity " ) { <nl> + v . resize ( 0 ) ; <nl> + <nl> + REQUIRE ( v . size ( ) = = 0 ) ; <nl> + REQUIRE ( v . capacity ( ) > = 5 ) ; <nl> + <nl> + SECTION ( " We can use the ' swap trick ' to reset the capacity " ) { <nl> + std : : vector < TestType > empty ; <nl> + empty . swap ( v ) ; <nl> + <nl> + REQUIRE ( v . capacity ( ) = = 0 ) ; <nl> + } <nl> + } <nl> + SECTION ( " reserving smaller does not change size or capacity " ) { <nl> + v . reserve ( 0 ) ; <nl> + <nl> + REQUIRE ( v . size ( ) = = 5 ) ; <nl> + REQUIRE ( v . capacity ( ) > = 5 ) ; <nl> + } <nl> + } <nl> + ` ` ` <nl> + <nl> + _While there is an upper limit on the number of types you can specify <nl> + in single ` TEMPLATE_TEST_CASE ` , the limit is very high and should not <nl> + be encountered in practice . _ <nl> + <nl> mmm <nl> <nl> [ Home ] ( Readme . md # top ) <nl> mmm a / docs / test - fixtures . md <nl> ppp b / docs / test - fixtures . md <nl> class UniqueTestsFixture { <nl> <nl> The two test cases here will create uniquely - named derived classes of UniqueTestsFixture and thus can access the ` getID ( ) ` protected method and ` conn ` member variables . This ensures that both the test cases are able to create a DBConnection using the same method ( DRY principle ) and that any ID ' s created are unique such that the order that tests are executed does not matter . <nl> <nl> + <nl> + Catch2 also provides ` TEMPLATE_TEST_CASE_METHOD ` that can be used together <nl> + with templated fixtures to perform tests for multiple different types . <nl> + However , unlike ` TEST_CASE_METHOD ` , ` TEMPLATE_TEST_CASE_METHOD ` requires <nl> + the tag specification to be non - empty , as it is followed by further macros <nl> + arguments . <nl> + <nl> + Also note that , because of limitations of the C + + preprocessor , if you <nl> + want to specify a type with multiple template parameters , you need to <nl> + enclose it in parentheses , e . g . ` std : : map < int , std : : string > ` needs to be <nl> + passed as ` ( std : : map < int , std : : string > ) ` . <nl> + <nl> + Example : <nl> + ` ` ` cpp <nl> + template < typename T > <nl> + struct Template_Fixture { <nl> + Template_Fixture ( ) : m_a ( 1 ) { } <nl> + <nl> + T m_a ; <nl> + } ; <nl> + <nl> + TEMPLATE_TEST_CASE_METHOD ( Template_Fixture , " A TEMPLATE_TEST_CASE_METHOD based test run that succeeds " , " [ class ] [ template ] " , int , float , double ) { <nl> + REQUIRE ( Template_Fixture < TestType > : : m_a = = 1 ) ; <nl> + } <nl> + ` ` ` <nl> + <nl> + _While there is an upper limit on the number of types you can specify <nl> + in single ` TEMPLATE_TEST_CASE ` , the limit is very high and should not <nl> + be encountered in practice . _ <nl> + <nl> mmm <nl> <nl> [ Home ] ( Readme . md # top ) <nl> mmm a / docs / tutorial . md <nl> ppp b / docs / tutorial . md <nl> <nl> [ Test cases and sections ] ( # test - cases - and - sections ) < br > <nl> [ BDD - Style ] ( # bdd - style ) < br > <nl> [ Scaling up ] ( # scaling - up ) < br > <nl> + [ Type parametrised test cases ] ( # type - parametrised - test - cases ) < br > <nl> [ Next steps ] ( # next - steps ) < br > <nl> <nl> # # Getting Catch2 <nl> The full source for Catch2 , including test projects , documentation , and other th <nl> <nl> # # Where to put it ? <nl> <nl> - Catch2 is header only . All you need to do is drop the file somewhere reachable from your project - either in some central location you can set your header search path to find , or directly into your project tree itself ! This is a particularly good option for other Open - Source projects that want to use Catch for their test suite . See [ this blog entry for more on that ] ( http : / / www . levelofindirection . com / journal / 2011 / 5 / 27 / unit - testing - in - c - and - objective - c - just - got - ridiculously - easi . html ) . <nl> + Catch2 is header only . All you need to do is drop the file somewhere reachable from your project - either in some central location you can set your header search path to find , or directly into your project tree itself ! This is a particularly good option for other Open - Source projects that want to use Catch for their test suite . See [ this blog entry for more on that ] ( http : / / www . levelofindirection . com / journal / 2011 / 5 / 27 / unit - testing - in - c - and - objective - c - just - got - ridiculously - easi . html ) . <nl> <nl> The rest of this tutorial will assume that the Catch2 single - include header ( or the include folder ) is available unqualified - but you may need to prefix it with a folder name if necessary . <nl> <nl> package , you need to include the header as ` # include < catch2 / catch . hpp > ` _ <nl> <nl> # # Writing tests <nl> <nl> - Let ' s start with a really simple example ( [ code ] ( . . / examples / 010 - TestCase . cpp ) ) . Say you have written a function to calculate factorials and now you want to test it ( let ' s leave aside TDD for now ) . <nl> + Let ' s start with a really simple example ( [ code ] ( . . / examples / 010 - TestCase . cpp ) ) . Say you have written a function to calculate factorials and now you want to test it ( let ' s leave aside TDD for now ) . <nl> <nl> ` ` ` c + + <nl> unsigned int Factorial ( unsigned int number ) { <nl> Catch takes a different approach ( to both NUnit and xUnit ) that is a more natura <nl> TEST_CASE ( " vectors can be sized and resized " , " [ vector ] " ) { <nl> <nl> std : : vector < int > v ( 5 ) ; <nl> - <nl> + <nl> REQUIRE ( v . size ( ) = = 5 ) ; <nl> REQUIRE ( v . capacity ( ) > = 5 ) ; <nl> - <nl> + <nl> SECTION ( " resizing bigger changes size and capacity " ) { <nl> v . resize ( 10 ) ; <nl> - <nl> + <nl> REQUIRE ( v . size ( ) = = 10 ) ; <nl> REQUIRE ( v . capacity ( ) > = 10 ) ; <nl> } <nl> SECTION ( " resizing smaller changes size but not capacity " ) { <nl> v . resize ( 0 ) ; <nl> - <nl> + <nl> REQUIRE ( v . size ( ) = = 0 ) ; <nl> REQUIRE ( v . capacity ( ) > = 5 ) ; <nl> } <nl> SECTION ( " reserving bigger changes capacity but not size " ) { <nl> v . reserve ( 10 ) ; <nl> - <nl> + <nl> REQUIRE ( v . size ( ) = = 5 ) ; <nl> REQUIRE ( v . capacity ( ) > = 10 ) ; <nl> } <nl> SECTION ( " reserving smaller does not change size or capacity " ) { <nl> v . reserve ( 0 ) ; <nl> - <nl> + <nl> REQUIRE ( v . size ( ) = = 5 ) ; <nl> REQUIRE ( v . capacity ( ) > = 5 ) ; <nl> } <nl> The power of sections really shows , however , when we need to execute a sequence <nl> ` ` ` c + + <nl> SECTION ( " reserving bigger changes capacity but not size " ) { <nl> v . reserve ( 10 ) ; <nl> - <nl> + <nl> REQUIRE ( v . size ( ) = = 5 ) ; <nl> REQUIRE ( v . capacity ( ) > = 10 ) ; <nl> - <nl> + <nl> SECTION ( " reserving smaller again does not change capacity " ) { <nl> v . reserve ( 7 ) ; <nl> - <nl> + <nl> REQUIRE ( v . capacity ( ) > = 10 ) ; <nl> } <nl> } <nl> SCENARIO ( " vectors can be sized and resized " , " [ vector ] " ) { <nl> <nl> GIVEN ( " A vector with some items " ) { <nl> std : : vector < int > v ( 5 ) ; <nl> - <nl> + <nl> REQUIRE ( v . size ( ) = = 5 ) ; <nl> REQUIRE ( v . capacity ( ) > = 5 ) ; <nl> - <nl> + <nl> WHEN ( " the size is increased " ) { <nl> v . resize ( 10 ) ; <nl> - <nl> + <nl> THEN ( " the size and capacity change " ) { <nl> REQUIRE ( v . size ( ) = = 10 ) ; <nl> REQUIRE ( v . capacity ( ) > = 10 ) ; <nl> SCENARIO ( " vectors can be sized and resized " , " [ vector ] " ) { <nl> } <nl> WHEN ( " the size is reduced " ) { <nl> v . resize ( 0 ) ; <nl> - <nl> + <nl> THEN ( " the size changes but not capacity " ) { <nl> REQUIRE ( v . size ( ) = = 0 ) ; <nl> REQUIRE ( v . capacity ( ) > = 5 ) ; <nl> SCENARIO ( " vectors can be sized and resized " , " [ vector ] " ) { <nl> } <nl> WHEN ( " more capacity is reserved " ) { <nl> v . reserve ( 10 ) ; <nl> - <nl> + <nl> THEN ( " the capacity changes but not the size " ) { <nl> REQUIRE ( v . size ( ) = = 5 ) ; <nl> REQUIRE ( v . capacity ( ) > = 10 ) ; <nl> SCENARIO ( " vectors can be sized and resized " , " [ vector ] " ) { <nl> } <nl> WHEN ( " less capacity is reserved " ) { <nl> v . reserve ( 0 ) ; <nl> - <nl> + <nl> THEN ( " neither size nor capacity are changed " ) { <nl> REQUIRE ( v . size ( ) = = 5 ) ; <nl> REQUIRE ( v . capacity ( ) > = 5 ) ; <nl> In fact it is usually a good idea to put the block with the ` ` ` # define ` ` ` [ in it <nl> Do not write your tests in header files ! <nl> <nl> <nl> + # # Type parametrised test cases <nl> + <nl> + Test cases in Catch2 can be also parametrised by type , via the <nl> + ` TEMPLATE_TEST_CASE ` macro , which behaves in the same way the ` TEST_CASE ` <nl> + macro , but is run for every type . <nl> + <nl> + For more details , see our documentation on [ test cases and sections ] <nl> + ( test - cases - and - sections . md # type - parametrised - test - cases ) . <nl> + <nl> + <nl> # # Next steps <nl> <nl> This has been a brief introduction to get you up and running with Catch , and to point out some of the key differences between Catch and other frameworks you may already be familiar with . This will get you going quite far already and you are now in a position to dive in and write some tests . <nl> mmm a / include / catch . hpp <nl> ppp b / include / catch . hpp <nl> <nl> <nl> # define CATCH_ANON_TEST_CASE ( ) INTERNAL_CATCH_TESTCASE ( ) <nl> <nl> + # ifndef CATCH_CONFIG_TRADITIONAL_MSVC_PREPROCESSOR <nl> + # define CATCH_TEMPLATE_TEST_CASE ( . . . ) INTERNAL_CATCH_TEMPLATE_TEST_CASE ( __VA_ARGS__ ) <nl> + # define CATCH_TEMPLATE_TEST_CASE_METHOD ( className , . . . ) INTERNAL_CATCH_TEMPLATE_TEST_CASE_METHOD ( className , __VA_ARGS__ ) <nl> + # else <nl> + # define CATCH_TEMPLATE_TEST_CASE ( . . . ) INTERNAL_CATCH_EXPAND_VARGS ( INTERNAL_CATCH_TEMPLATE_TEST_CASE ( __VA_ARGS__ ) ) <nl> + # define CATCH_TEMPLATE_TEST_CASE_METHOD ( className , . . . ) INTERNAL_CATCH_EXPAND_VARGS ( INTERNAL_CATCH_TEMPLATE_TEST_CASE_METHOD ( className , __VA_ARGS__ ) ) <nl> + # endif <nl> + <nl> + <nl> # if ! defined ( CATCH_CONFIG_RUNTIME_STATIC_REQUIRE ) <nl> # define CATCH_STATIC_REQUIRE ( . . . ) static_assert ( __VA_ARGS__ , # __VA_ARGS__ ) ; CATCH_SUCCEED ( # __VA_ARGS__ ) <nl> # define CATCH_STATIC_REQUIRE_FALSE ( . . . ) static_assert ( ! ( __VA_ARGS__ ) , " ! ( " # __VA_ARGS__ " ) " ) ; CATCH_SUCCEED ( # __VA_ARGS__ ) <nl> <nl> # define SUCCEED ( . . . ) INTERNAL_CATCH_MSG ( " SUCCEED " , Catch : : ResultWas : : Ok , Catch : : ResultDisposition : : ContinueOnFailure , __VA_ARGS__ ) <nl> # define ANON_TEST_CASE ( ) INTERNAL_CATCH_TESTCASE ( ) <nl> <nl> + # ifndef CATCH_CONFIG_TRADITIONAL_MSVC_PREPROCESSOR <nl> + # define TEMPLATE_TEST_CASE ( . . . ) INTERNAL_CATCH_TEMPLATE_TEST_CASE ( __VA_ARGS__ ) <nl> + # define TEMPLATE_TEST_CASE_METHOD ( className , . . . ) INTERNAL_CATCH_TEMPLATE_TEST_CASE_METHOD ( className , __VA_ARGS__ ) <nl> + # else <nl> + # define TEMPLATE_TEST_CASE ( . . . ) INTERNAL_CATCH_EXPAND_VARGS ( INTERNAL_CATCH_TEMPLATE_TEST_CASE ( __VA_ARGS__ ) ) <nl> + # define TEMPLATE_TEST_CASE_METHOD ( className , . . . ) INTERNAL_CATCH_EXPAND_VARGS ( INTERNAL_CATCH_TEMPLATE_TEST_CASE_METHOD ( className , __VA_ARGS__ ) ) <nl> + # endif <nl> + <nl> + <nl> # if ! defined ( CATCH_CONFIG_RUNTIME_STATIC_REQUIRE ) <nl> # define STATIC_REQUIRE ( . . . ) static_assert ( __VA_ARGS__ , # __VA_ARGS__ ) ; SUCCEED ( # __VA_ARGS__ ) <nl> # define STATIC_REQUIRE_FALSE ( . . . ) static_assert ( ! ( __VA_ARGS__ ) , " ! ( " # __VA_ARGS__ " ) " ) ; SUCCEED ( " ! ( " # __VA_ARGS__ " ) " ) <nl> using Catch : : Detail : : Approx ; <nl> <nl> # define CATCH_ANON_TEST_CASE ( ) INTERNAL_CATCH_TESTCASE_NO_REGISTRATION ( INTERNAL_CATCH_UNIQUE_NAME ( ____C_A_T_C_H____T_E_S_T____ ) ) <nl> <nl> + # ifndef CATCH_CONFIG_TRADITIONAL_MSVC_PREPROCESSOR <nl> + # define CATCH_TEMPLATE_TEST_CASE ( . . . ) INTERNAL_CATCH_TEMPLATE_TEST_CASE_NO_REGISTRATION ( INTERNAL_CATCH_UNIQUE_NAME ( ____C_A_T_C_H____T_E_M_P_L_A_T_E____T_E_S_T____ ) ) <nl> + # define CATCH_TEMPLATE_TEST_CASE_METHOD ( className , . . . ) INTERNAL_CATCH_TEMPLATE_TEST_CASE_METHOD_NO_REGISTRATION ( INTERNAL_CATCH_UNIQUE_NAME ( ____C_A_T_C_H____T_E_M_P_L_A_T_E____T_E_S_T____ ) , className ) <nl> + # else <nl> + # define CATCH_TEMPLATE_TEST_CASE ( . . . ) INTERNAL_CATCH_EXPAND_VARGS ( INTERNAL_CATCH_TEMPLATE_TEST_CASE_NO_REGISTRATION ( INTERNAL_CATCH_UNIQUE_NAME ( ____C_A_T_C_H____T_E_M_P_L_A_T_E____T_E_S_T____ ) ) ) <nl> + # define CATCH_TEMPLATE_TEST_CASE_METHOD ( className , . . . ) INTERNAL_CATCH_EXPAND_VARGS ( INTERNAL_CATCH_TEMPLATE_TEST_CASE_METHOD_NO_REGISTRATION ( INTERNAL_CATCH_UNIQUE_NAME ( ____C_A_T_C_H____T_E_M_P_L_A_T_E____T_E_S_T____ ) , className ) ) <nl> + # endif <nl> + <nl> / / " BDD - style " convenience wrappers <nl> # define CATCH_SCENARIO ( . . . ) INTERNAL_CATCH_TESTCASE_NO_REGISTRATION ( INTERNAL_CATCH_UNIQUE_NAME ( ____C_A_T_C_H____T_E_S_T____ ) ) <nl> # define CATCH_SCENARIO_METHOD ( className , . . . ) INTERNAL_CATCH_TESTCASE_METHOD_NO_REGISTRATION ( INTERNAL_CATCH_UNIQUE_NAME ( ____C_A_T_C_H____T_E_S_T____ ) , className ) <nl> using Catch : : Detail : : Approx ; <nl> # define SUCCEED ( . . . ) ( void ) ( 0 ) <nl> # define ANON_TEST_CASE ( ) INTERNAL_CATCH_TESTCASE_NO_REGISTRATION ( INTERNAL_CATCH_UNIQUE_NAME ( ____C_A_T_C_H____T_E_S_T____ ) ) <nl> <nl> + # ifndef CATCH_CONFIG_TRADITIONAL_MSVC_PREPROCESSOR <nl> + # define TEMPLATE_TEST_CASE ( . . . ) INTERNAL_CATCH_TEMPLATE_TEST_CASE_NO_REGISTRATION ( INTERNAL_CATCH_UNIQUE_NAME ( ____C_A_T_C_H____T_E_M_P_L_A_T_E____T_E_S_T____ ) ) <nl> + # define TEMPLATE_TEST_CASE_METHOD ( className , . . . ) INTERNAL_CATCH_TEMPLATE_TEST_CASE_METHOD_NO_REGISTRATION ( INTERNAL_CATCH_UNIQUE_NAME ( ____C_A_T_C_H____T_E_M_P_L_A_T_E____T_E_S_T____ ) , className ) <nl> + # else <nl> + # define TEMPLATE_TEST_CASE ( . . . ) INTERNAL_CATCH_EXPAND_VARGS ( INTERNAL_CATCH_TEMPLATE_TEST_CASE_NO_REGISTRATION ( INTERNAL_CATCH_UNIQUE_NAME ( ____C_A_T_C_H____T_E_M_P_L_A_T_E____T_E_S_T____ ) ) ) <nl> + # define TEMPLATE_TEST_CASE_METHOD ( className , . . . ) INTERNAL_CATCH_EXPAND_VARGS ( INTERNAL_CATCH_TEMPLATE_TEST_CASE_METHOD_NO_REGISTRATION ( INTERNAL_CATCH_UNIQUE_NAME ( ____C_A_T_C_H____T_E_M_P_L_A_T_E____T_E_S_T____ ) , className ) ) <nl> + # endif <nl> <nl> # define STATIC_REQUIRE ( . . . ) ( void ) ( 0 ) <nl> # define STATIC_REQUIRE_FALSE ( . . . ) ( void ) ( 0 ) <nl> mmm a / include / internal / catch_compiler_capabilities . h <nl> ppp b / include / internal / catch_compiler_capabilities . h <nl> <nl> # define CATCH_INTERNAL_CONFIG_WINDOWS_SEH <nl> # endif <nl> <nl> + / / MSVC traditional preprocessor needs some workaround for __VA_ARGS__ <nl> + / / _MSVC_TRADITIONAL = = 0 means new conformant preprocessor <nl> + / / _MSVC_TRADITIONAL = = 1 means old traditional non - conformant preprocessor <nl> + # if ! defined ( _MSVC_TRADITIONAL ) | | ( defined ( _MSVC_TRADITIONAL ) & & _MSVC_TRADITIONAL ) <nl> + # define CATCH_INTERNAL_CONFIG_TRADITIONAL_MSVC_PREPROCESSOR <nl> + # endif <nl> + <nl> # endif / / _MSC_VER <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> <nl> # define CATCH_CATCH_ANON ( type ) catch ( type ) <nl> # endif <nl> <nl> + # if defined ( CATCH_INTERNAL_CONFIG_TRADITIONAL_MSVC_PREPROCESSOR ) & & ! defined ( CATCH_CONFIG_NO_TRADITIONAL_MSVC_PREPROCESSOR ) & & ! defined ( CATCH_CONFIG_TRADITIONAL_MSVC_PREPROCESSOR ) <nl> + # define CATCH_CONFIG_TRADITIONAL_MSVC_PREPROCESSOR <nl> + # endif <nl> <nl> # endif / / TWOBLUECUBES_CATCH_COMPILER_CAPABILITIES_HPP_INCLUDED <nl> <nl> new file mode 100644 <nl> index 000000000 . . 0bfb660f5 <nl> mmm / dev / null <nl> ppp b / include / internal / catch_preprocessor . hpp <nl> <nl> + / * <nl> + * Created by Jozef on 12 / 11 / 2018 . <nl> + * Copyright 2017 Two Blue Cubes Ltd . All rights reserved . <nl> + * <nl> + * Distributed under the Boost Software License , Version 1 . 0 . ( See accompanying <nl> + * file LICENSE_1_0 . txt or copy at http : / / www . boost . org / LICENSE_1_0 . txt ) <nl> + * / <nl> + <nl> + # ifndef TWOBLUECUBES_CATCH_PREPROCESSOR_HPP_INCLUDED <nl> + # define TWOBLUECUBES_CATCH_PREPROCESSOR_HPP_INCLUDED <nl> + <nl> + # define CATCH_RECURSION_LEVEL0 ( . . . ) __VA_ARGS__ <nl> + # define CATCH_RECURSION_LEVEL1 ( . . . ) CATCH_RECURSION_LEVEL0 ( CATCH_RECURSION_LEVEL0 ( CATCH_RECURSION_LEVEL0 ( __VA_ARGS__ ) ) ) <nl> + # define CATCH_RECURSION_LEVEL2 ( . . . ) CATCH_RECURSION_LEVEL1 ( CATCH_RECURSION_LEVEL1 ( CATCH_RECURSION_LEVEL1 ( __VA_ARGS__ ) ) ) <nl> + # define CATCH_RECURSION_LEVEL3 ( . . . ) CATCH_RECURSION_LEVEL2 ( CATCH_RECURSION_LEVEL2 ( CATCH_RECURSION_LEVEL2 ( __VA_ARGS__ ) ) ) <nl> + # define CATCH_RECURSION_LEVEL4 ( . . . ) CATCH_RECURSION_LEVEL3 ( CATCH_RECURSION_LEVEL3 ( CATCH_RECURSION_LEVEL3 ( __VA_ARGS__ ) ) ) <nl> + # define CATCH_RECURSION_LEVEL5 ( . . . ) CATCH_RECURSION_LEVEL4 ( CATCH_RECURSION_LEVEL4 ( CATCH_RECURSION_LEVEL4 ( __VA_ARGS__ ) ) ) <nl> + <nl> + # ifdef CATCH_CONFIG_TRADITIONAL_MSVC_PREPROCESSOR <nl> + # define INTERNAL_CATCH_EXPAND_VARGS ( . . . ) __VA_ARGS__ <nl> + / / MSVC needs more evaluations <nl> + # define CATCH_RECURSION_LEVEL6 ( . . . ) CATCH_RECURSION_LEVEL5 ( CATCH_RECURSION_LEVEL5 ( CATCH_RECURSION_LEVEL5 ( __VA_ARGS__ ) ) ) <nl> + # define CATCH_RECURSE ( . . . ) CATCH_RECURSION_LEVEL6 ( CATCH_RECURSION_LEVEL6 ( __VA_ARGS__ ) ) <nl> + # else <nl> + # define CATCH_RECURSE ( . . . ) CATCH_RECURSION_LEVEL5 ( __VA_ARGS__ ) <nl> + # endif <nl> + <nl> + # define CATCH_REC_END ( . . . ) <nl> + # define CATCH_REC_OUT <nl> + <nl> + # define CATCH_EMPTY ( ) <nl> + # define CATCH_DEFER ( id ) id CATCH_EMPTY ( ) <nl> + <nl> + # define CATCH_REC_GET_END2 ( ) 0 , CATCH_REC_END <nl> + # define CATCH_REC_GET_END1 ( . . . ) CATCH_REC_GET_END2 <nl> + # define CATCH_REC_GET_END ( . . . ) CATCH_REC_GET_END1 <nl> + # define CATCH_REC_NEXT0 ( test , next , . . . ) next CATCH_REC_OUT <nl> + # define CATCH_REC_NEXT1 ( test , next ) CATCH_DEFER ( CATCH_REC_NEXT0 ) ( test , next , 0 ) <nl> + # define CATCH_REC_NEXT ( test , next ) CATCH_REC_NEXT1 ( CATCH_REC_GET_END test , next ) <nl> + <nl> + # define CATCH_REC_LIST0 ( f , x , peek , . . . ) , f ( x ) CATCH_DEFER ( CATCH_REC_NEXT ( peek , CATCH_REC_LIST1 ) ) ( f , peek , __VA_ARGS__ ) <nl> + # define CATCH_REC_LIST1 ( f , x , peek , . . . ) , f ( x ) CATCH_DEFER ( CATCH_REC_NEXT ( peek , CATCH_REC_LIST0 ) ) ( f , peek , __VA_ARGS__ ) <nl> + # define CATCH_REC_LIST2 ( f , x , peek , . . . ) f ( x ) CATCH_DEFER ( CATCH_REC_NEXT ( peek , CATCH_REC_LIST1 ) ) ( f , peek , __VA_ARGS__ ) <nl> + <nl> + # define CATCH_REC_LIST0_UD ( f , userdata , x , peek , . . . ) , f ( userdata , x ) CATCH_DEFER ( CATCH_REC_NEXT ( peek , CATCH_REC_LIST1_UD ) ) ( f , userdata , peek , __VA_ARGS__ ) <nl> + # define CATCH_REC_LIST1_UD ( f , userdata , x , peek , . . . ) , f ( userdata , x ) CATCH_DEFER ( CATCH_REC_NEXT ( peek , CATCH_REC_LIST0_UD ) ) ( f , userdata , peek , __VA_ARGS__ ) <nl> + # define CATCH_REC_LIST2_UD ( f , userdata , x , peek , . . . ) f ( userdata , x ) CATCH_DEFER ( CATCH_REC_NEXT ( peek , CATCH_REC_LIST1_UD ) ) ( f , userdata , peek , __VA_ARGS__ ) <nl> + <nl> + / / Applies the function macro ` f ` to each of the remaining parameters , inserts commas between the results , <nl> + / / and passes userdata as the first parameter to each invocation , <nl> + / / e . g . CATCH_REC_LIST_UD ( f , x , a , b , c ) evaluates to f ( x , a ) , f ( x , b ) , f ( x , c ) <nl> + # define CATCH_REC_LIST_UD ( f , userdata , . . . ) CATCH_RECURSE ( CATCH_REC_LIST2_UD ( f , userdata , __VA_ARGS__ , ( ) ( ) ( ) , ( ) ( ) ( ) , ( ) ( ) ( ) , 0 ) ) <nl> + <nl> + # define CATCH_REC_LIST ( f , . . . ) CATCH_RECURSE ( CATCH_REC_LIST2 ( f , __VA_ARGS__ , ( ) ( ) ( ) , ( ) ( ) ( ) , ( ) ( ) ( ) , 0 ) ) <nl> + <nl> + # define INTERNAL_CATCH_EXPAND1 ( param ) INTERNAL_CATCH_EXPAND2 ( param ) <nl> + # define INTERNAL_CATCH_EXPAND2 ( . . . ) INTERNAL_CATCH_NO # # __VA_ARGS__ <nl> + # define INTERNAL_CATCH_DEF ( . . . ) INTERNAL_CATCH_DEF __VA_ARGS__ <nl> + # define INTERNAL_CATCH_NOINTERNAL_CATCH_DEF <nl> + <nl> + # define INTERNAL_CATCH_REMOVE_PARENS ( . . . ) INTERNAL_CATCH_EXPAND1 ( INTERNAL_CATCH_DEF __VA_ARGS__ ) <nl> + <nl> + # define INTERNAL_CATCH_TEMPLATE_UNIQUE_NAME2 ( Name , . . . ) INTERNAL_CATCH_TEMPLATE_UNIQUE_NAME3 ( Name , __VA_ARGS__ ) <nl> + # ifndef CATCH_CONFIG_TRADITIONAL_MSVC_PREPROCESSOR <nl> + # define INTERNAL_CATCH_TEMPLATE_UNIQUE_NAME3 ( Name , . . . ) Name " - " # __VA_ARGS__ <nl> + # define INTERNAL_CATCH_TEMPLATE_UNIQUE_NAME ( Name , . . . ) INTERNAL_CATCH_TEMPLATE_UNIQUE_NAME2 ( Name , INTERNAL_CATCH_REMOVE_PARENS ( __VA_ARGS__ ) ) <nl> + # else <nl> + / / MSVC is adding extra space and needs more calls to properly remove ( ) <nl> + # define INTERNAL_CATCH_TEMPLATE_UNIQUE_NAME3 ( Name , . . . ) Name " - " # __VA_ARGS__ <nl> + # define INTERNAL_CATCH_TEMPLATE_UNIQUE_NAME1 ( Name , . . . ) INTERNAL_CATCH_TEMPLATE_UNIQUE_NAME2 ( Name , __VA_ARGS__ ) <nl> + # define INTERNAL_CATCH_TEMPLATE_UNIQUE_NAME ( Name , . . . ) INTERNAL_CATCH_TEMPLATE_UNIQUE_NAME1 ( Name , INTERNAL_CATCH_EXPAND_VARGS ( INTERNAL_CATCH_REMOVE_PARENS ( __VA_ARGS__ ) ) ) <nl> + # endif <nl> + <nl> + # endif / / TWOBLUECUBES_CATCH_PREPROCESSOR_HPP_INCLUDED <nl> mmm a / include / internal / catch_test_registry . h <nl> ppp b / include / internal / catch_test_registry . h <nl> <nl> # include " catch_interfaces_testcase . h " <nl> # include " catch_compiler_capabilities . h " <nl> # include " catch_stringref . h " <nl> + # include " catch_type_traits . hpp " <nl> + # include " catch_preprocessor . hpp " <nl> <nl> namespace Catch { <nl> <nl> struct AutoReg : NonCopyable { <nl> <nl> } / / end namespace Catch <nl> <nl> - # define INTERNAL_CATCH_EXPAND1 ( param ) INTERNAL_CATCH_EXPAND2 ( param ) <nl> - # define INTERNAL_CATCH_EXPAND2 ( . . . ) INTERNAL_CATCH_NO # # __VA_ARGS__ <nl> - # define INTERNAL_CATCH_DEF ( . . . ) INTERNAL_CATCH_DEF __VA_ARGS__ <nl> - # define INTERNAL_CATCH_NOINTERNAL_CATCH_DEF <nl> - <nl> # if defined ( CATCH_CONFIG_DISABLE ) <nl> # define INTERNAL_CATCH_TESTCASE_NO_REGISTRATION ( TestName , . . . ) \ <nl> static void TestName ( ) <nl> # define INTERNAL_CATCH_TESTCASE_METHOD_NO_REGISTRATION ( TestName , ClassName , . . . ) \ <nl> namespace { \ <nl> - struct TestName : INTERNAL_CATCH_EXPAND1 ( INTERNAL_CATCH_DEF ClassName ) { \ <nl> + struct TestName : INTERNAL_CATCH_REMOVE_PARENS ( ClassName ) { \ <nl> void test ( ) ; \ <nl> } ; \ <nl> } \ <nl> void TestName : : test ( ) <nl> - <nl> + # define INTERNAL_CATCH_TEMPLATE_TEST_CASE_NO_REGISTRATION ( TestName , . . . ) \ <nl> + template < typename TestType > \ <nl> + static void TestName ( ) <nl> + # define INTERNAL_CATCH_TEMPLATE_TEST_CASE_METHOD_NO_REGISTRATION ( TestName , ClassName , . . . ) \ <nl> + namespace { \ <nl> + template < typename TestType > \ <nl> + struct TestName : INTERNAL_CATCH_REMOVE_PARENS ( ClassName < TestType > ) { \ <nl> + void test ( ) ; \ <nl> + } ; \ <nl> + } \ <nl> + template < typename TestType > \ <nl> + void TestName : : test ( ) <nl> # endif <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> struct AutoReg : NonCopyable { <nl> # define INTERNAL_CATCH_TEST_CASE_METHOD2 ( TestName , ClassName , . . . ) \ <nl> CATCH_INTERNAL_SUPPRESS_GLOBALS_WARNINGS \ <nl> namespace { \ <nl> - struct TestName : INTERNAL_CATCH_EXPAND1 ( INTERNAL_CATCH_DEF ClassName ) { \ <nl> + struct TestName : INTERNAL_CATCH_REMOVE_PARENS ( ClassName ) { \ <nl> void test ( ) ; \ <nl> } ; \ <nl> Catch : : AutoReg INTERNAL_CATCH_UNIQUE_NAME ( autoRegistrar ) ( Catch : : makeTestInvoker ( & TestName : : test ) , CATCH_INTERNAL_LINEINFO , # ClassName , Catch : : NameAndTags { __VA_ARGS__ } ) ; / * NOLINT * / \ <nl> struct AutoReg : NonCopyable { <nl> Catch : : AutoReg INTERNAL_CATCH_UNIQUE_NAME ( autoRegistrar ) ( Catch : : makeTestInvoker ( Function ) , CATCH_INTERNAL_LINEINFO , Catch : : StringRef ( ) , Catch : : NameAndTags { __VA_ARGS__ } ) ; / * NOLINT * / \ <nl> CATCH_INTERNAL_UNSUPPRESS_GLOBALS_WARNINGS <nl> <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + # define INTERNAL_CATCH_TEMPLATE_TEST_CASE_2 ( TestName , TestFunc , Name , Tags , . . . ) \ <nl> + CATCH_INTERNAL_SUPPRESS_GLOBALS_WARNINGS \ <nl> + template < typename TestType > \ <nl> + static void TestFunc ( ) ; \ <nl> + namespace { \ <nl> + template < typename . . . Types > \ <nl> + struct TestName { \ <nl> + template < typename . . . Ts > \ <nl> + TestName ( Ts . . . names ) { \ <nl> + CATCH_INTERNAL_CHECK_UNIQUE_TYPES ( CATCH_REC_LIST ( INTERNAL_CATCH_REMOVE_PARENS , __VA_ARGS__ ) ) \ <nl> + using expander = int [ ] ; \ <nl> + ( void ) expander { ( Catch : : AutoReg ( Catch : : makeTestInvoker ( & TestFunc < Types > ) , CATCH_INTERNAL_LINEINFO , Catch : : StringRef ( ) , Catch : : NameAndTags { names , Tags } ) , 0 ) . . . } ; / * NOLINT * / \ <nl> + } \ <nl> + } ; \ <nl> + INTERNAL_CATCH_TEMPLATE_REGISTRY_INITIATE ( TestName , Name , __VA_ARGS__ ) \ <nl> + } \ <nl> + CATCH_INTERNAL_UNSUPPRESS_GLOBALS_WARNINGS \ <nl> + template < typename TestType > \ <nl> + static void TestFunc ( ) <nl> + <nl> + # if defined ( CATCH_CPP17_OR_GREATER ) <nl> + # define CATCH_INTERNAL_CHECK_UNIQUE_TYPES ( . . . ) static_assert ( Catch : : is_unique < __VA_ARGS__ > , " Duplicate type detected in declaration of template test case " ) ; <nl> + # else <nl> + # define CATCH_INTERNAL_CHECK_UNIQUE_TYPES ( . . . ) static_assert ( Catch : : is_unique < __VA_ARGS__ > : : value , " Duplicate type detected in declaration of template test case " ) ; <nl> + # endif <nl> + <nl> + # ifndef CATCH_CONFIG_TRADITIONAL_MSVC_PREPROCESSOR <nl> + # define INTERNAL_CATCH_TEMPLATE_TEST_CASE ( Name , Tags , . . . ) \ <nl> + INTERNAL_CATCH_TEMPLATE_TEST_CASE_2 ( INTERNAL_CATCH_UNIQUE_NAME ( ____C_A_T_C_H____T_E_M_P_L_A_T_E____T_E_S_T____ ) , INTERNAL_CATCH_UNIQUE_NAME ( ____C_A_T_C_H____T_E_M_P_L_A_T_E____T_E_S_T____F_U_N_C____ ) , Name , Tags , __VA_ARGS__ ) <nl> + # else <nl> + # define INTERNAL_CATCH_TEMPLATE_TEST_CASE ( Name , Tags , . . . ) \ <nl> + INTERNAL_CATCH_EXPAND_VARGS ( INTERNAL_CATCH_TEMPLATE_TEST_CASE_2 ( INTERNAL_CATCH_UNIQUE_NAME ( ____C_A_T_C_H____T_E_M_P_L_A_T_E____T_E_S_T____ ) , INTERNAL_CATCH_UNIQUE_NAME ( ____C_A_T_C_H____T_E_M_P_L_A_T_E____T_E_S_T____F_U_N_C____ ) , Name , Tags , __VA_ARGS__ ) ) <nl> + # endif <nl> + <nl> + # define INTERNAL_CATCH_TEMPLATE_REGISTRY_INITIATE ( TestName , Name , . . . ) \ <nl> + static int INTERNAL_CATCH_UNIQUE_NAME ( globalRegistrar ) = [ ] ( ) { \ <nl> + TestName < CATCH_REC_LIST ( INTERNAL_CATCH_REMOVE_PARENS , __VA_ARGS__ ) > ( CATCH_REC_LIST_UD ( INTERNAL_CATCH_TEMPLATE_UNIQUE_NAME , Name , __VA_ARGS__ ) ) ; \ <nl> + return 0 ; \ <nl> + } ( ) ; <nl> + <nl> + # define INTERNAL_CATCH_TEMPLATE_TEST_CASE_METHOD_2 ( TestNameClass , TestName , ClassName , Name , Tags , . . . ) \ <nl> + CATCH_INTERNAL_SUPPRESS_GLOBALS_WARNINGS \ <nl> + namespace { \ <nl> + template < typename TestType > \ <nl> + struct TestName : INTERNAL_CATCH_REMOVE_PARENS ( ClassName < TestType > ) { \ <nl> + void test ( ) ; \ <nl> + } ; \ <nl> + template < typename . . . Types > \ <nl> + struct TestNameClass { \ <nl> + template < typename . . . Ts > \ <nl> + TestNameClass ( Ts . . . names ) { \ <nl> + CATCH_INTERNAL_CHECK_UNIQUE_TYPES ( CATCH_REC_LIST ( INTERNAL_CATCH_REMOVE_PARENS , __VA_ARGS__ ) ) \ <nl> + using expander = int [ ] ; \ <nl> + ( void ) expander { ( Catch : : AutoReg ( Catch : : makeTestInvoker ( & TestName < Types > : : test ) , CATCH_INTERNAL_LINEINFO , # ClassName , Catch : : NameAndTags { names , Tags } ) , 0 ) . . . } ; / * NOLINT * / \ <nl> + } \ <nl> + } ; \ <nl> + INTERNAL_CATCH_TEMPLATE_REGISTRY_INITIATE ( TestNameClass , Name , __VA_ARGS__ ) \ <nl> + } \ <nl> + CATCH_INTERNAL_UNSUPPRESS_GLOBALS_WARNINGS \ <nl> + template < typename TestType > \ <nl> + void TestName < TestType > : : test ( ) <nl> + <nl> + # ifndef CATCH_CONFIG_TRADITIONAL_MSVC_PREPROCESSOR <nl> + # define INTERNAL_CATCH_TEMPLATE_TEST_CASE_METHOD ( ClassName , Name , Tags , . . . ) \ <nl> + INTERNAL_CATCH_TEMPLATE_TEST_CASE_METHOD_2 ( INTERNAL_CATCH_UNIQUE_NAME ( ____C_A_T_C_H____T_E_M_P_L_A_T_E____T_E_S_T____C_L_A_S_S____ ) , INTERNAL_CATCH_UNIQUE_NAME ( ____C_A_T_C_H____T_E_M_P_L_A_T_E____T_E_S_T____ ) , ClassName , Name , Tags , __VA_ARGS__ ) <nl> + # else <nl> + # define INTERNAL_CATCH_TEMPLATE_TEST_CASE_METHOD ( ClassName , Name , Tags , . . . ) \ <nl> + INTERNAL_CATCH_EXPAND_VARGS ( INTERNAL_CATCH_TEMPLATE_TEST_CASE_METHOD_2 ( INTERNAL_CATCH_UNIQUE_NAME ( ____C_A_T_C_H____T_E_M_P_L_A_T_E____T_E_S_T____C_L_A_S_S____ ) , INTERNAL_CATCH_UNIQUE_NAME ( ____C_A_T_C_H____T_E_M_P_L_A_T_E____T_E_S_T____ ) , ClassName , Name , Tags , __VA_ARGS__ ) ) <nl> + # endif <nl> <nl> # endif / / TWOBLUECUBES_CATCH_TEST_REGISTRY_HPP_INCLUDED <nl> new file mode 100644 <nl> index 000000000 . . 88615484b <nl> mmm / dev / null <nl> ppp b / include / internal / catch_type_traits . hpp <nl> <nl> + / * <nl> + * Created by Jozef on 12 / 11 / 2018 . <nl> + * Copyright 2017 Two Blue Cubes Ltd . All rights reserved . <nl> + * <nl> + * Distributed under the Boost Software License , Version 1 . 0 . ( See accompanying <nl> + * file LICENSE_1_0 . txt or copy at http : / / www . boost . org / LICENSE_1_0 . txt ) <nl> + * / <nl> + <nl> + # ifndef TWOBLUECUBES_CATCH_TYPE_TRAITS_HPP_INCLUDED <nl> + # define TWOBLUECUBES_CATCH_TYPE_TRAITS_HPP_INCLUDED <nl> + <nl> + namespace Catch { <nl> + <nl> + # ifdef CATCH_CPP17_OR_GREATER <nl> + template < typename . . . > <nl> + inline constexpr auto is_unique = std : : true_type { } ; <nl> + <nl> + template < typename T , typename . . . Rest > <nl> + inline constexpr auto is_unique < T , Rest . . . > = std : : bool_constant < <nl> + ( ! std : : is_same_v < T , Rest > & & . . . ) & & is_unique < Rest . . . > <nl> + > { } ; <nl> + # else <nl> + <nl> + template < typename . . . > <nl> + struct is_unique : std : : true_type { } ; <nl> + <nl> + template < typename T0 , typename T1 , typename . . . Rest > <nl> + struct is_unique < T0 , T1 , Rest . . . > : std : : integral_constant <nl> + < bool , <nl> + ! std : : is_same < T0 , T1 > : : value <nl> + & & is_unique < T0 , Rest . . . > : : value <nl> + & & is_unique < T1 , Rest . . . > : : value <nl> + > { } ; <nl> + <nl> + # endif <nl> + } <nl> + <nl> + # endif / / TWOBLUECUBES_CATCH_TYPE_TRAITS_HPP_INCLUDED <nl> mmm a / projects / CMakeLists . txt <nl> ppp b / projects / CMakeLists . txt <nl> set ( INTERNAL_HEADERS <nl> $ { HEADER_DIR } / internal / catch_option . hpp <nl> $ { HEADER_DIR } / internal / catch_output_redirect . h <nl> $ { HEADER_DIR } / internal / catch_platform . h <nl> + $ { HEADER_DIR } / internal / catch_preprocessor . hpp <nl> $ { HEADER_DIR } / internal / catch_random_number_generator . h <nl> $ { HEADER_DIR } / internal / catch_reenable_warnings . h <nl> $ { HEADER_DIR } / internal / catch_reporter_registrars . hpp <nl> set ( INTERNAL_HEADERS <nl> $ { HEADER_DIR } / internal / catch_to_string . hpp <nl> $ { HEADER_DIR } / internal / catch_tostring . h <nl> $ { HEADER_DIR } / internal / catch_totals . h <nl> + $ { HEADER_DIR } / internal / catch_type_traits . hpp <nl> $ { HEADER_DIR } / internal / catch_uncaught_exceptions . h <nl> $ { HEADER_DIR } / internal / catch_user_interfaces . h <nl> $ { HEADER_DIR } / internal / catch_version . h <nl> mmm a / projects / SelfTest / Baselines / compact . sw . approved . txt <nl> ppp b / projects / SelfTest / Baselines / compact . sw . approved . txt <nl> Generators . tests . cpp : < line number > : passed : x < y for : 10 < 109 <nl> Generators . tests . cpp : < line number > : passed : x < y for : 10 < 110 <nl> Class . tests . cpp : < line number > : failed : s = = " world " for : " hello " = = " world " <nl> Class . tests . cpp : < line number > : passed : s = = " hello " for : " hello " = = " hello " <nl> + Class . tests . cpp : < line number > : failed : Template_Fixture < TestType > : : m_a = = 2 for : 1 . 0 = = 2 <nl> + Class . tests . cpp : < line number > : failed : Template_Fixture < TestType > : : m_a = = 2 for : 1 . 0f = = 2 <nl> + Class . tests . cpp : < line number > : failed : Template_Fixture < TestType > : : m_a = = 2 for : 1 = = 2 <nl> + Class . tests . cpp : < line number > : passed : Template_Fixture < TestType > : : m_a = = 1 for : 1 . 0 = = 1 <nl> + Class . tests . cpp : < line number > : passed : Template_Fixture < TestType > : : m_a = = 1 for : 1 . 0f = = 1 <nl> + Class . tests . cpp : < line number > : passed : Template_Fixture < TestType > : : m_a = = 1 for : 1 = = 1 <nl> Class . tests . cpp : < line number > : failed : m_a = = 2 for : 1 = = 2 <nl> Class . tests . cpp : < line number > : passed : m_a = = 1 for : 1 = = 1 <nl> Approx . tests . cpp : < line number > : passed : d = = 1 . 23_a for : 1 . 23 = = Approx ( 1 . 23 ) <nl> TagAlias . tests . cpp : < line number > : passed : registry . add ( " [ no ampersat ] " , " " , Cat <nl> TagAlias . tests . cpp : < line number > : passed : registry . add ( " [ the @ is not at the start ] " , " " , Catch : : SourceLineInfo ( " file " , 3 ) ) <nl> TagAlias . tests . cpp : < line number > : passed : registry . add ( " @ no square bracket at start ] " , " " , Catch : : SourceLineInfo ( " file " , 3 ) ) <nl> TagAlias . tests . cpp : < line number > : passed : registry . add ( " [ @ no square bracket at end " , " " , Catch : : SourceLineInfo ( " file " , 3 ) ) <nl> + Misc . tests . cpp : < line number > : passed : v . size ( ) = = 5 for : 5 = = 5 <nl> + Misc . tests . cpp : < line number > : passed : v . capacity ( ) > = 5 for : 5 > = 5 <nl> + Misc . tests . cpp : < line number > : passed : v . size ( ) = = 10 for : 10 = = 10 <nl> + Misc . tests . cpp : < line number > : passed : v . capacity ( ) > = 10 for : 10 > = 10 <nl> + Misc . tests . cpp : < line number > : passed : v . size ( ) = = 5 for : 5 = = 5 <nl> + Misc . tests . cpp : < line number > : passed : v . capacity ( ) > = 5 for : 5 > = 5 <nl> + Misc . tests . cpp : < line number > : passed : v . size ( ) = = 0 for : 0 = = 0 <nl> + Misc . tests . cpp : < line number > : passed : v . capacity ( ) > = 5 for : 5 > = 5 <nl> + Misc . tests . cpp : < line number > : passed : v . capacity ( ) = = 0 for : 0 = = 0 <nl> + Misc . tests . cpp : < line number > : passed : v . size ( ) = = 5 for : 5 = = 5 <nl> + Misc . tests . cpp : < line number > : passed : v . capacity ( ) > = 5 for : 5 > = 5 <nl> + Misc . tests . cpp : < line number > : passed : v . size ( ) = = 5 for : 5 = = 5 <nl> + Misc . tests . cpp : < line number > : passed : v . capacity ( ) > = 10 for : 10 > = 10 <nl> + Misc . tests . cpp : < line number > : passed : v . size ( ) = = 5 for : 5 = = 5 <nl> + Misc . tests . cpp : < line number > : passed : v . capacity ( ) > = 5 for : 5 > = 5 <nl> + Misc . tests . cpp : < line number > : passed : v . size ( ) = = 5 for : 5 = = 5 <nl> + Misc . tests . cpp : < line number > : passed : v . capacity ( ) > = 5 for : 5 > = 5 <nl> + Misc . tests . cpp : < line number > : passed : v . size ( ) = = 5 for : 5 = = 5 <nl> + Misc . tests . cpp : < line number > : passed : v . capacity ( ) > = 5 for : 5 > = 5 <nl> + Misc . tests . cpp : < line number > : passed : v . size ( ) = = 10 for : 10 = = 10 <nl> + Misc . tests . cpp : < line number > : passed : v . capacity ( ) > = 10 for : 10 > = 10 <nl> + Misc . tests . cpp : < line number > : passed : v . size ( ) = = 5 for : 5 = = 5 <nl> + Misc . tests . cpp : < line number > : passed : v . capacity ( ) > = 5 for : 5 > = 5 <nl> + Misc . tests . cpp : < line number > : passed : v . size ( ) = = 0 for : 0 = = 0 <nl> + Misc . tests . cpp : < line number > : passed : v . capacity ( ) > = 5 for : 5 > = 5 <nl> + Misc . tests . cpp : < line number > : passed : v . capacity ( ) = = 0 for : 0 = = 0 <nl> + Misc . tests . cpp : < line number > : passed : v . size ( ) = = 5 for : 5 = = 5 <nl> + Misc . tests . cpp : < line number > : passed : v . capacity ( ) > = 5 for : 5 > = 5 <nl> + Misc . tests . cpp : < line number > : passed : v . size ( ) = = 5 for : 5 = = 5 <nl> + Misc . tests . cpp : < line number > : passed : v . capacity ( ) > = 10 for : 10 > = 10 <nl> + Misc . tests . cpp : < line number > : passed : v . size ( ) = = 5 for : 5 = = 5 <nl> + Misc . tests . cpp : < line number > : passed : v . capacity ( ) > = 5 for : 5 > = 5 <nl> + Misc . tests . cpp : < line number > : passed : v . size ( ) = = 5 for : 5 = = 5 <nl> + Misc . tests . cpp : < line number > : passed : v . capacity ( ) > = 5 for : 5 > = 5 <nl> + Misc . tests . cpp : < line number > : passed : v . size ( ) = = 5 for : 5 = = 5 <nl> + Misc . tests . cpp : < line number > : passed : v . capacity ( ) > = 5 for : 5 > = 5 <nl> + Misc . tests . cpp : < line number > : passed : v . size ( ) = = 10 for : 10 = = 10 <nl> + Misc . tests . cpp : < line number > : passed : v . capacity ( ) > = 10 for : 10 > = 10 <nl> + Misc . tests . cpp : < line number > : passed : v . size ( ) = = 5 for : 5 = = 5 <nl> + Misc . tests . cpp : < line number > : passed : v . capacity ( ) > = 5 for : 5 > = 5 <nl> + Misc . tests . cpp : < line number > : passed : v . size ( ) = = 0 for : 0 = = 0 <nl> + Misc . tests . cpp : < line number > : passed : v . capacity ( ) > = 5 for : 5 > = 5 <nl> + Misc . tests . cpp : < line number > : passed : v . capacity ( ) = = 0 for : 0 = = 0 <nl> + Misc . tests . cpp : < line number > : passed : v . size ( ) = = 5 for : 5 = = 5 <nl> + Misc . tests . cpp : < line number > : passed : v . capacity ( ) > = 5 for : 5 > = 5 <nl> + Misc . tests . cpp : < line number > : passed : v . size ( ) = = 5 for : 5 = = 5 <nl> + Misc . tests . cpp : < line number > : passed : v . capacity ( ) > = 10 for : 10 > = 10 <nl> + Misc . tests . cpp : < line number > : passed : v . size ( ) = = 5 for : 5 = = 5 <nl> + Misc . tests . cpp : < line number > : passed : v . capacity ( ) > = 5 for : 5 > = 5 <nl> + Misc . tests . cpp : < line number > : passed : v . size ( ) = = 5 for : 5 = = 5 <nl> + Misc . tests . cpp : < line number > : passed : v . capacity ( ) > = 5 for : 5 > = 5 <nl> + Misc . tests . cpp : < line number > : passed : v . size ( ) = = 5 for : 5 = = 5 <nl> + Misc . tests . cpp : < line number > : passed : v . capacity ( ) > = 5 for : 5 > = 5 <nl> + Misc . tests . cpp : < line number > : passed : v . size ( ) = = 10 for : 10 = = 10 <nl> + Misc . tests . cpp : < line number > : passed : v . capacity ( ) > = 10 for : 10 > = 10 <nl> + Misc . tests . cpp : < line number > : passed : v . size ( ) = = 5 for : 5 = = 5 <nl> + Misc . tests . cpp : < line number > : passed : v . capacity ( ) > = 5 for : 5 > = 5 <nl> + Misc . tests . cpp : < line number > : passed : v . size ( ) = = 0 for : 0 = = 0 <nl> + Misc . tests . cpp : < line number > : passed : v . capacity ( ) > = 5 for : 5 > = 5 <nl> + Misc . tests . cpp : < line number > : passed : v . capacity ( ) = = 0 for : 0 = = 0 <nl> + Misc . tests . cpp : < line number > : passed : v . size ( ) = = 5 for : 5 = = 5 <nl> + Misc . tests . cpp : < line number > : passed : v . capacity ( ) > = 5 for : 5 > = 5 <nl> + Misc . tests . cpp : < line number > : passed : v . size ( ) = = 5 for : 5 = = 5 <nl> + Misc . tests . cpp : < line number > : passed : v . capacity ( ) > = 10 for : 10 > = 10 <nl> + Misc . tests . cpp : < line number > : passed : v . size ( ) = = 5 for : 5 = = 5 <nl> + Misc . tests . cpp : < line number > : passed : v . capacity ( ) > = 5 for : 5 > = 5 <nl> + Misc . tests . cpp : < line number > : passed : v . size ( ) = = 5 for : 5 = = 5 <nl> + Misc . tests . cpp : < line number > : passed : v . capacity ( ) > = 5 for : 5 > = 5 <nl> VariadicMacros . tests . cpp : < line number > : passed : with 1 message : ' no assertions ' <nl> Tricky . tests . cpp : < line number > : passed : 0x < hex digits > = = bit30and31 for : 3221225472 ( 0x < hex digits > ) = = 3221225472 <nl> Message . tests . cpp : < line number > : failed - but was ok : 1 = = 2 <nl> Misc . tests . cpp : < line number > : passed : v . size ( ) = = 5 for : 5 = = 5 <nl> Misc . tests . cpp : < line number > : passed : v . capacity ( ) > = 5 for : 5 > = 5 <nl> Misc . tests . cpp : < line number > : passed : <nl> Misc . tests . cpp : < line number > : passed : <nl> - Failed 62 test cases , failed 122 assertions . <nl> + Failed 65 test cases , failed 125 assertions . <nl> <nl> mmm a / projects / SelfTest / Baselines / console . std . approved . txt <nl> ppp b / projects / SelfTest / Baselines / console . std . approved . txt <nl> Class . tests . cpp : < line number > : FAILED : <nl> with expansion : <nl> " hello " = = " world " <nl> <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + A TEMPLATE_TEST_CASE_METHOD based test run that fails - double <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + Class . tests . cpp : < line number > <nl> + . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . <nl> + <nl> + Class . tests . cpp : < line number > : FAILED : <nl> + REQUIRE ( Template_Fixture < TestType > : : m_a = = 2 ) <nl> + with expansion : <nl> + 1 . 0 = = 2 <nl> + <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + A TEMPLATE_TEST_CASE_METHOD based test run that fails - float <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + Class . tests . cpp : < line number > <nl> + . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . <nl> + <nl> + Class . tests . cpp : < line number > : FAILED : <nl> + REQUIRE ( Template_Fixture < TestType > : : m_a = = 2 ) <nl> + with expansion : <nl> + 1 . 0f = = 2 <nl> + <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + A TEMPLATE_TEST_CASE_METHOD based test run that fails - int <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + Class . tests . cpp : < line number > <nl> + . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . <nl> + <nl> + Class . tests . cpp : < line number > : FAILED : <nl> + REQUIRE ( Template_Fixture < TestType > : : m_a = = 2 ) <nl> + with expansion : <nl> + 1 = = 2 <nl> + <nl> mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> A TEST_CASE_METHOD based test run that fails <nl> mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> due to unexpected exception with message : <nl> Why would you throw a std : : string ? <nl> <nl> = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = <nl> - test cases : 216 | 163 passed | 49 failed | 4 failed as expected <nl> - assertions : 1234 | 1105 passed | 108 failed | 21 failed as expected <nl> + test cases : 226 | 170 passed | 52 failed | 4 failed as expected <nl> + assertions : 1308 | 1176 passed | 111 failed | 21 failed as expected <nl> <nl> mmm a / projects / SelfTest / Baselines / console . sw . approved . txt <nl> ppp b / projects / SelfTest / Baselines / console . sw . approved . txt <nl> Class . tests . cpp : < line number > : PASSED : <nl> with expansion : <nl> " hello " = = " hello " <nl> <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + A TEMPLATE_TEST_CASE_METHOD based test run that fails - double <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + Class . tests . cpp : < line number > <nl> + . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . <nl> + <nl> + Class . tests . cpp : < line number > : FAILED : <nl> + REQUIRE ( Template_Fixture < TestType > : : m_a = = 2 ) <nl> + with expansion : <nl> + 1 . 0 = = 2 <nl> + <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + A TEMPLATE_TEST_CASE_METHOD based test run that fails - float <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + Class . tests . cpp : < line number > <nl> + . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . <nl> + <nl> + Class . tests . cpp : < line number > : FAILED : <nl> + REQUIRE ( Template_Fixture < TestType > : : m_a = = 2 ) <nl> + with expansion : <nl> + 1 . 0f = = 2 <nl> + <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + A TEMPLATE_TEST_CASE_METHOD based test run that fails - int <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + Class . tests . cpp : < line number > <nl> + . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . <nl> + <nl> + Class . tests . cpp : < line number > : FAILED : <nl> + REQUIRE ( Template_Fixture < TestType > : : m_a = = 2 ) <nl> + with expansion : <nl> + 1 = = 2 <nl> + <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + A TEMPLATE_TEST_CASE_METHOD based test run that succeeds - double <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + Class . tests . cpp : < line number > <nl> + . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . <nl> + <nl> + Class . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( Template_Fixture < TestType > : : m_a = = 1 ) <nl> + with expansion : <nl> + 1 . 0 = = 1 <nl> + <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + A TEMPLATE_TEST_CASE_METHOD based test run that succeeds - float <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + Class . tests . cpp : < line number > <nl> + . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . <nl> + <nl> + Class . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( Template_Fixture < TestType > : : m_a = = 1 ) <nl> + with expansion : <nl> + 1 . 0f = = 1 <nl> + <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + A TEMPLATE_TEST_CASE_METHOD based test run that succeeds - int <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + Class . tests . cpp : < line number > <nl> + . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . <nl> + <nl> + Class . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( Template_Fixture < TestType > : : m_a = = 1 ) <nl> + with expansion : <nl> + 1 = = 1 <nl> + <nl> mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> A TEST_CASE_METHOD based test run that fails <nl> mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> TagAlias . tests . cpp : < line number > : PASSED : <nl> TagAlias . tests . cpp : < line number > : PASSED : <nl> CHECK_THROWS ( registry . add ( " [ @ no square bracket at end " , " " , Catch : : SourceLineInfo ( " file " , 3 ) ) ) <nl> <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + TemplateTest : vectors can be sized and resized - float <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + Misc . tests . cpp : < line number > <nl> + . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . size ( ) = = 5 ) <nl> + with expansion : <nl> + 5 = = 5 <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . capacity ( ) > = 5 ) <nl> + with expansion : <nl> + 5 > = 5 <nl> + <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + TemplateTest : vectors can be sized and resized - float <nl> + resizing bigger changes size and capacity <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + Misc . tests . cpp : < line number > <nl> + . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . size ( ) = = 10 ) <nl> + with expansion : <nl> + 10 = = 10 <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . capacity ( ) > = 10 ) <nl> + with expansion : <nl> + 10 > = 10 <nl> + <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + TemplateTest : vectors can be sized and resized - float <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + Misc . tests . cpp : < line number > <nl> + . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . size ( ) = = 5 ) <nl> + with expansion : <nl> + 5 = = 5 <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . capacity ( ) > = 5 ) <nl> + with expansion : <nl> + 5 > = 5 <nl> + <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + TemplateTest : vectors can be sized and resized - float <nl> + resizing smaller changes size but not capacity <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + Misc . tests . cpp : < line number > <nl> + . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . size ( ) = = 0 ) <nl> + with expansion : <nl> + 0 = = 0 <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . capacity ( ) > = 5 ) <nl> + with expansion : <nl> + 5 > = 5 <nl> + <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + TemplateTest : vectors can be sized and resized - float <nl> + resizing smaller changes size but not capacity <nl> + We can use the ' swap trick ' to reset the capacity <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + Misc . tests . cpp : < line number > <nl> + . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . capacity ( ) = = 0 ) <nl> + with expansion : <nl> + 0 = = 0 <nl> + <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + TemplateTest : vectors can be sized and resized - float <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + Misc . tests . cpp : < line number > <nl> + . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . size ( ) = = 5 ) <nl> + with expansion : <nl> + 5 = = 5 <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . capacity ( ) > = 5 ) <nl> + with expansion : <nl> + 5 > = 5 <nl> + <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + TemplateTest : vectors can be sized and resized - float <nl> + reserving bigger changes capacity but not size <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + Misc . tests . cpp : < line number > <nl> + . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . size ( ) = = 5 ) <nl> + with expansion : <nl> + 5 = = 5 <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . capacity ( ) > = 10 ) <nl> + with expansion : <nl> + 10 > = 10 <nl> + <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + TemplateTest : vectors can be sized and resized - float <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + Misc . tests . cpp : < line number > <nl> + . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . size ( ) = = 5 ) <nl> + with expansion : <nl> + 5 = = 5 <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . capacity ( ) > = 5 ) <nl> + with expansion : <nl> + 5 > = 5 <nl> + <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + TemplateTest : vectors can be sized and resized - float <nl> + reserving smaller does not change size or capacity <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + Misc . tests . cpp : < line number > <nl> + . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . size ( ) = = 5 ) <nl> + with expansion : <nl> + 5 = = 5 <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . capacity ( ) > = 5 ) <nl> + with expansion : <nl> + 5 > = 5 <nl> + <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + TemplateTest : vectors can be sized and resized - int <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + Misc . tests . cpp : < line number > <nl> + . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . size ( ) = = 5 ) <nl> + with expansion : <nl> + 5 = = 5 <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . capacity ( ) > = 5 ) <nl> + with expansion : <nl> + 5 > = 5 <nl> + <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + TemplateTest : vectors can be sized and resized - int <nl> + resizing bigger changes size and capacity <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + Misc . tests . cpp : < line number > <nl> + . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . size ( ) = = 10 ) <nl> + with expansion : <nl> + 10 = = 10 <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . capacity ( ) > = 10 ) <nl> + with expansion : <nl> + 10 > = 10 <nl> + <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + TemplateTest : vectors can be sized and resized - int <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + Misc . tests . cpp : < line number > <nl> + . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . size ( ) = = 5 ) <nl> + with expansion : <nl> + 5 = = 5 <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . capacity ( ) > = 5 ) <nl> + with expansion : <nl> + 5 > = 5 <nl> + <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + TemplateTest : vectors can be sized and resized - int <nl> + resizing smaller changes size but not capacity <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + Misc . tests . cpp : < line number > <nl> + . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . size ( ) = = 0 ) <nl> + with expansion : <nl> + 0 = = 0 <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . capacity ( ) > = 5 ) <nl> + with expansion : <nl> + 5 > = 5 <nl> + <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + TemplateTest : vectors can be sized and resized - int <nl> + resizing smaller changes size but not capacity <nl> + We can use the ' swap trick ' to reset the capacity <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + Misc . tests . cpp : < line number > <nl> + . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . capacity ( ) = = 0 ) <nl> + with expansion : <nl> + 0 = = 0 <nl> + <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + TemplateTest : vectors can be sized and resized - int <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + Misc . tests . cpp : < line number > <nl> + . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . size ( ) = = 5 ) <nl> + with expansion : <nl> + 5 = = 5 <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . capacity ( ) > = 5 ) <nl> + with expansion : <nl> + 5 > = 5 <nl> + <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + TemplateTest : vectors can be sized and resized - int <nl> + reserving bigger changes capacity but not size <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + Misc . tests . cpp : < line number > <nl> + . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . size ( ) = = 5 ) <nl> + with expansion : <nl> + 5 = = 5 <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . capacity ( ) > = 10 ) <nl> + with expansion : <nl> + 10 > = 10 <nl> + <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + TemplateTest : vectors can be sized and resized - int <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + Misc . tests . cpp : < line number > <nl> + . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . size ( ) = = 5 ) <nl> + with expansion : <nl> + 5 = = 5 <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . capacity ( ) > = 5 ) <nl> + with expansion : <nl> + 5 > = 5 <nl> + <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + TemplateTest : vectors can be sized and resized - int <nl> + reserving smaller does not change size or capacity <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + Misc . tests . cpp : < line number > <nl> + . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . size ( ) = = 5 ) <nl> + with expansion : <nl> + 5 = = 5 <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . capacity ( ) > = 5 ) <nl> + with expansion : <nl> + 5 > = 5 <nl> + <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + TemplateTest : vectors can be sized and resized - std : : string <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + Misc . tests . cpp : < line number > <nl> + . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . size ( ) = = 5 ) <nl> + with expansion : <nl> + 5 = = 5 <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . capacity ( ) > = 5 ) <nl> + with expansion : <nl> + 5 > = 5 <nl> + <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + TemplateTest : vectors can be sized and resized - std : : string <nl> + resizing bigger changes size and capacity <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + Misc . tests . cpp : < line number > <nl> + . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . size ( ) = = 10 ) <nl> + with expansion : <nl> + 10 = = 10 <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . capacity ( ) > = 10 ) <nl> + with expansion : <nl> + 10 > = 10 <nl> + <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + TemplateTest : vectors can be sized and resized - std : : string <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + Misc . tests . cpp : < line number > <nl> + . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . size ( ) = = 5 ) <nl> + with expansion : <nl> + 5 = = 5 <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . capacity ( ) > = 5 ) <nl> + with expansion : <nl> + 5 > = 5 <nl> + <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + TemplateTest : vectors can be sized and resized - std : : string <nl> + resizing smaller changes size but not capacity <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + Misc . tests . cpp : < line number > <nl> + . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . size ( ) = = 0 ) <nl> + with expansion : <nl> + 0 = = 0 <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . capacity ( ) > = 5 ) <nl> + with expansion : <nl> + 5 > = 5 <nl> + <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + TemplateTest : vectors can be sized and resized - std : : string <nl> + resizing smaller changes size but not capacity <nl> + We can use the ' swap trick ' to reset the capacity <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + Misc . tests . cpp : < line number > <nl> + . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . capacity ( ) = = 0 ) <nl> + with expansion : <nl> + 0 = = 0 <nl> + <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + TemplateTest : vectors can be sized and resized - std : : string <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + Misc . tests . cpp : < line number > <nl> + . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . size ( ) = = 5 ) <nl> + with expansion : <nl> + 5 = = 5 <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . capacity ( ) > = 5 ) <nl> + with expansion : <nl> + 5 > = 5 <nl> + <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + TemplateTest : vectors can be sized and resized - std : : string <nl> + reserving bigger changes capacity but not size <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + Misc . tests . cpp : < line number > <nl> + . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . size ( ) = = 5 ) <nl> + with expansion : <nl> + 5 = = 5 <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . capacity ( ) > = 10 ) <nl> + with expansion : <nl> + 10 > = 10 <nl> + <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + TemplateTest : vectors can be sized and resized - std : : string <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + Misc . tests . cpp : < line number > <nl> + . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . size ( ) = = 5 ) <nl> + with expansion : <nl> + 5 = = 5 <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . capacity ( ) > = 5 ) <nl> + with expansion : <nl> + 5 > = 5 <nl> + <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + TemplateTest : vectors can be sized and resized - std : : string <nl> + reserving smaller does not change size or capacity <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + Misc . tests . cpp : < line number > <nl> + . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . size ( ) = = 5 ) <nl> + with expansion : <nl> + 5 = = 5 <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . capacity ( ) > = 5 ) <nl> + with expansion : <nl> + 5 > = 5 <nl> + <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + TemplateTest : vectors can be sized and resized - std : : tuple < int , float > <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + Misc . tests . cpp : < line number > <nl> + . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . size ( ) = = 5 ) <nl> + with expansion : <nl> + 5 = = 5 <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . capacity ( ) > = 5 ) <nl> + with expansion : <nl> + 5 > = 5 <nl> + <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + TemplateTest : vectors can be sized and resized - std : : tuple < int , float > <nl> + resizing bigger changes size and capacity <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + Misc . tests . cpp : < line number > <nl> + . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . size ( ) = = 10 ) <nl> + with expansion : <nl> + 10 = = 10 <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . capacity ( ) > = 10 ) <nl> + with expansion : <nl> + 10 > = 10 <nl> + <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + TemplateTest : vectors can be sized and resized - std : : tuple < int , float > <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + Misc . tests . cpp : < line number > <nl> + . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . size ( ) = = 5 ) <nl> + with expansion : <nl> + 5 = = 5 <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . capacity ( ) > = 5 ) <nl> + with expansion : <nl> + 5 > = 5 <nl> + <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + TemplateTest : vectors can be sized and resized - std : : tuple < int , float > <nl> + resizing smaller changes size but not capacity <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + Misc . tests . cpp : < line number > <nl> + . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . size ( ) = = 0 ) <nl> + with expansion : <nl> + 0 = = 0 <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . capacity ( ) > = 5 ) <nl> + with expansion : <nl> + 5 > = 5 <nl> + <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + TemplateTest : vectors can be sized and resized - std : : tuple < int , float > <nl> + resizing smaller changes size but not capacity <nl> + We can use the ' swap trick ' to reset the capacity <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + Misc . tests . cpp : < line number > <nl> + . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . capacity ( ) = = 0 ) <nl> + with expansion : <nl> + 0 = = 0 <nl> + <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + TemplateTest : vectors can be sized and resized - std : : tuple < int , float > <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + Misc . tests . cpp : < line number > <nl> + . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . size ( ) = = 5 ) <nl> + with expansion : <nl> + 5 = = 5 <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . capacity ( ) > = 5 ) <nl> + with expansion : <nl> + 5 > = 5 <nl> + <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + TemplateTest : vectors can be sized and resized - std : : tuple < int , float > <nl> + reserving bigger changes capacity but not size <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + Misc . tests . cpp : < line number > <nl> + . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . size ( ) = = 5 ) <nl> + with expansion : <nl> + 5 = = 5 <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . capacity ( ) > = 10 ) <nl> + with expansion : <nl> + 10 > = 10 <nl> + <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + TemplateTest : vectors can be sized and resized - std : : tuple < int , float > <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + Misc . tests . cpp : < line number > <nl> + . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . size ( ) = = 5 ) <nl> + with expansion : <nl> + 5 = = 5 <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . capacity ( ) > = 5 ) <nl> + with expansion : <nl> + 5 > = 5 <nl> + <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + TemplateTest : vectors can be sized and resized - std : : tuple < int , float > <nl> + reserving smaller does not change size or capacity <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + Misc . tests . cpp : < line number > <nl> + . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . size ( ) = = 5 ) <nl> + with expansion : <nl> + 5 = = 5 <nl> + <nl> + Misc . tests . cpp : < line number > : PASSED : <nl> + REQUIRE ( v . capacity ( ) > = 5 ) <nl> + with expansion : <nl> + 5 > = 5 <nl> + <nl> mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> Test case with one argument <nl> mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> Misc . tests . cpp : < line number > <nl> Misc . tests . cpp : < line number > : PASSED : <nl> <nl> = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = <nl> - test cases : 216 | 150 passed | 62 failed | 4 failed as expected <nl> - assertions : 1248 | 1105 passed | 122 failed | 21 failed as expected <nl> + test cases : 226 | 157 passed | 65 failed | 4 failed as expected <nl> + assertions : 1322 | 1176 passed | 125 failed | 21 failed as expected <nl> <nl> mmm a / projects / SelfTest / Baselines / junit . sw . approved . txt <nl> ppp b / projects / SelfTest / Baselines / junit . sw . approved . txt <nl> <nl> < ? xml version = " 1 . 0 " encoding = " UTF - 8 " ? > <nl> < testsuitesloose text artifact <nl> > <nl> - < testsuite name = " < exe - name > " errors = " 17 " failures = " 106 " tests = " 1249 " hostname = " tbd " time = " { duration } " timestamp = " { iso8601 - timestamp } " > <nl> + < testsuite name = " < exe - name > " errors = " 17 " failures = " 109 " tests = " 1323 " hostname = " tbd " time = " { duration } " timestamp = " { iso8601 - timestamp } " > <nl> < testcase classname = " < exe - name > . global " name = " # A test name that starts with a # " time = " { duration } " / > <nl> < testcase classname = " < exe - name > . global " name = " # 1005 : Comparing pointer to int and long ( NULL can be either on various systems ) " time = " { duration } " / > <nl> < testcase classname = " < exe - name > . global " name = " # 1027 " time = " { duration } " / > <nl> Class . tests . cpp : < line number > <nl> < / failure > <nl> < / testcase > <nl> < testcase classname = " < exe - name > . TestClass " name = " A METHOD_AS_TEST_CASE based test run that succeeds " time = " { duration } " / > <nl> + < testcase classname = " < exe - name > . Template_Fixture " name = " A TEMPLATE_TEST_CASE_METHOD based test run that fails - double " time = " { duration } " > <nl> + < failure message = " 1 . 0 = = 2 " type = " REQUIRE " > <nl> + Class . tests . cpp : < line number > <nl> + < / failure > <nl> + < / testcase > <nl> + < testcase classname = " < exe - name > . Template_Fixture " name = " A TEMPLATE_TEST_CASE_METHOD based test run that fails - float " time = " { duration } " > <nl> + < failure message = " 1 . 0f = = 2 " type = " REQUIRE " > <nl> + Class . tests . cpp : < line number > <nl> + < / failure > <nl> + < / testcase > <nl> + < testcase classname = " < exe - name > . Template_Fixture " name = " A TEMPLATE_TEST_CASE_METHOD based test run that fails - int " time = " { duration } " > <nl> + < failure message = " 1 = = 2 " type = " REQUIRE " > <nl> + Class . tests . cpp : < line number > <nl> + < / failure > <nl> + < / testcase > <nl> + < testcase classname = " < exe - name > . Template_Fixture " name = " A TEMPLATE_TEST_CASE_METHOD based test run that succeeds - double " time = " { duration } " / > <nl> + < testcase classname = " < exe - name > . Template_Fixture " name = " A TEMPLATE_TEST_CASE_METHOD based test run that succeeds - float " time = " { duration } " / > <nl> + < testcase classname = " < exe - name > . Template_Fixture " name = " A TEMPLATE_TEST_CASE_METHOD based test run that succeeds - int " time = " { duration } " / > <nl> < testcase classname = " < exe - name > . Fixture " name = " A TEST_CASE_METHOD based test run that fails " time = " { duration } " > <nl> < failure message = " 1 = = 2 " type = " REQUIRE " > <nl> Class . tests . cpp : < line number > <nl> Misc . tests . cpp : < line number > <nl> < / testcase > <nl> < testcase classname = " < exe - name > . global " name = " Tag alias can be registered against tag patterns / The same tag alias can only be registered once " time = " { duration } " / > <nl> < testcase classname = " < exe - name > . global " name = " Tag alias can be registered against tag patterns / Tag aliases must be of the form [ @ name ] " time = " { duration } " / > <nl> + < testcase classname = " < exe - name > . global " name = " TemplateTest : vectors can be sized and resized - float " time = " { duration } " / > <nl> + < testcase classname = " < exe - name > . global " name = " TemplateTest : vectors can be sized and resized - float / resizing bigger changes size and capacity " time = " { duration } " / > <nl> + < testcase classname = " < exe - name > . global " name = " TemplateTest : vectors can be sized and resized - float / resizing smaller changes size but not capacity " time = " { duration } " / > <nl> + < testcase classname = " < exe - name > . global " name = " TemplateTest : vectors can be sized and resized - float / resizing smaller changes size but not capacity / We can use the ' swap trick ' to reset the capacity " time = " { duration } " / > <nl> + < testcase classname = " < exe - name > . global " name = " TemplateTest : vectors can be sized and resized - float / reserving bigger changes capacity but not size " time = " { duration } " / > <nl> + < testcase classname = " < exe - name > . global " name = " TemplateTest : vectors can be sized and resized - float / reserving smaller does not change size or capacity " time = " { duration } " / > <nl> + < testcase classname = " < exe - name > . global " name = " TemplateTest : vectors can be sized and resized - int " time = " { duration } " / > <nl> + < testcase classname = " < exe - name > . global " name = " TemplateTest : vectors can be sized and resized - int / resizing bigger changes size and capacity " time = " { duration } " / > <nl> + < testcase classname = " < exe - name > . global " name = " TemplateTest : vectors can be sized and resized - int / resizing smaller changes size but not capacity " time = " { duration } " / > <nl> + < testcase classname = " < exe - name > . global " name = " TemplateTest : vectors can be sized and resized - int / resizing smaller changes size but not capacity / We can use the ' swap trick ' to reset the capacity " time = " { duration } " / > <nl> + < testcase classname = " < exe - name > . global " name = " TemplateTest : vectors can be sized and resized - int / reserving bigger changes capacity but not size " time = " { duration } " / > <nl> + < testcase classname = " < exe - name > . global " name = " TemplateTest : vectors can be sized and resized - int / reserving smaller does not change size or capacity " time = " { duration } " / > <nl> + < testcase classname = " < exe - name > . global " name = " TemplateTest : vectors can be sized and resized - std : : string " time = " { duration } " / > <nl> + < testcase classname = " < exe - name > . global " name = " TemplateTest : vectors can be sized and resized - std : : string / resizing bigger changes size and capacity " time = " { duration } " / > <nl> + < testcase classname = " < exe - name > . global " name = " TemplateTest : vectors can be sized and resized - std : : string / resizing smaller changes size but not capacity " time = " { duration } " / > <nl> + < testcase classname = " < exe - name > . global " name = " TemplateTest : vectors can be sized and resized - std : : string / resizing smaller changes size but not capacity / We can use the ' swap trick ' to reset the capacity " time = " { duration } " / > <nl> + < testcase classname = " < exe - name > . global " name = " TemplateTest : vectors can be sized and resized - std : : string / reserving bigger changes capacity but not size " time = " { duration } " / > <nl> + < testcase classname = " < exe - name > . global " name = " TemplateTest : vectors can be sized and resized - std : : string / reserving smaller does not change size or capacity " time = " { duration } " / > <nl> + < testcase classname = " < exe - name > . global " name = " TemplateTest : vectors can be sized and resized - std : : tuple & lt ; int , float > " time = " { duration } " / > <nl> + < testcase classname = " < exe - name > . global " name = " TemplateTest : vectors can be sized and resized - std : : tuple & lt ; int , float > / resizing bigger changes size and capacity " time = " { duration } " / > <nl> + < testcase classname = " < exe - name > . global " name = " TemplateTest : vectors can be sized and resized - std : : tuple & lt ; int , float > / resizing smaller changes size but not capacity " time = " { duration } " / > <nl> + < testcase classname = " < exe - name > . global " name = " TemplateTest : vectors can be sized and resized - std : : tuple & lt ; int , float > / resizing smaller changes size but not capacity / We can use the ' swap trick ' to reset the capacity " time = " { duration } " / > <nl> + < testcase classname = " < exe - name > . global " name = " TemplateTest : vectors can be sized and resized - std : : tuple & lt ; int , float > / reserving bigger changes capacity but not size " time = " { duration } " / > <nl> + < testcase classname = " < exe - name > . global " name = " TemplateTest : vectors can be sized and resized - std : : tuple & lt ; int , float > / reserving smaller does not change size or capacity " time = " { duration } " / > <nl> < testcase classname = " < exe - name > . global " name = " Test case with one argument " time = " { duration } " / > <nl> < testcase classname = " < exe - name > . global " name = " Test enum bit values " time = " { duration } " / > <nl> < testcase classname = " < exe - name > . global " name = " The NO_FAIL macro reports a failure but does not fail the test " time = " { duration } " / > <nl> mmm a / projects / SelfTest / Baselines / xml . sw . approved . txt <nl> ppp b / projects / SelfTest / Baselines / xml . sw . approved . txt <nl> <nl> < / Expression > <nl> < OverallResult success = " true " / > <nl> < / TestCase > <nl> + < TestCase name = " A TEMPLATE_TEST_CASE_METHOD based test run that fails - double " tags = " [ . ] [ class ] [ failing ] [ template ] " filename = " projects / < exe - name > / UsageTests / Class . tests . cpp " > <nl> + < Expression success = " false " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Class . tests . cpp " > <nl> + < Original > <nl> + Template_Fixture & lt ; TestType > : : m_a = = 2 <nl> + < / Original > <nl> + < Expanded > <nl> + 1 . 0 = = 2 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < OverallResult success = " false " / > <nl> + < / TestCase > <nl> + < TestCase name = " A TEMPLATE_TEST_CASE_METHOD based test run that fails - float " tags = " [ . ] [ class ] [ failing ] [ template ] " filename = " projects / < exe - name > / UsageTests / Class . tests . cpp " > <nl> + < Expression success = " false " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Class . tests . cpp " > <nl> + < Original > <nl> + Template_Fixture & lt ; TestType > : : m_a = = 2 <nl> + < / Original > <nl> + < Expanded > <nl> + 1 . 0f = = 2 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < OverallResult success = " false " / > <nl> + < / TestCase > <nl> + < TestCase name = " A TEMPLATE_TEST_CASE_METHOD based test run that fails - int " tags = " [ . ] [ class ] [ failing ] [ template ] " filename = " projects / < exe - name > / UsageTests / Class . tests . cpp " > <nl> + < Expression success = " false " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Class . tests . cpp " > <nl> + < Original > <nl> + Template_Fixture & lt ; TestType > : : m_a = = 2 <nl> + < / Original > <nl> + < Expanded > <nl> + 1 = = 2 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < OverallResult success = " false " / > <nl> + < / TestCase > <nl> + < TestCase name = " A TEMPLATE_TEST_CASE_METHOD based test run that succeeds - double " tags = " [ class ] [ template ] " filename = " projects / < exe - name > / UsageTests / Class . tests . cpp " > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Class . tests . cpp " > <nl> + < Original > <nl> + Template_Fixture & lt ; TestType > : : m_a = = 1 <nl> + < / Original > <nl> + < Expanded > <nl> + 1 . 0 = = 1 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < OverallResult success = " true " / > <nl> + < / TestCase > <nl> + < TestCase name = " A TEMPLATE_TEST_CASE_METHOD based test run that succeeds - float " tags = " [ class ] [ template ] " filename = " projects / < exe - name > / UsageTests / Class . tests . cpp " > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Class . tests . cpp " > <nl> + < Original > <nl> + Template_Fixture & lt ; TestType > : : m_a = = 1 <nl> + < / Original > <nl> + < Expanded > <nl> + 1 . 0f = = 1 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < OverallResult success = " true " / > <nl> + < / TestCase > <nl> + < TestCase name = " A TEMPLATE_TEST_CASE_METHOD based test run that succeeds - int " tags = " [ class ] [ template ] " filename = " projects / < exe - name > / UsageTests / Class . tests . cpp " > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Class . tests . cpp " > <nl> + < Original > <nl> + Template_Fixture & lt ; TestType > : : m_a = = 1 <nl> + < / Original > <nl> + < Expanded > <nl> + 1 = = 1 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < OverallResult success = " true " / > <nl> + < / TestCase > <nl> < TestCase name = " A TEST_CASE_METHOD based test run that fails " tags = " [ . ] [ class ] [ failing ] " filename = " projects / < exe - name > / UsageTests / Class . tests . cpp " > <nl> < Expression success = " false " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Class . tests . cpp " > <nl> < Original > <nl> Message from section two <nl> < / Section > <nl> < OverallResult success = " true " / > <nl> < / TestCase > <nl> + < TestCase name = " TemplateTest : vectors can be sized and resized - float " tags = " [ template ] [ vector ] " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . size ( ) = = 5 <nl> + < / Original > <nl> + < Expanded > <nl> + 5 = = 5 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . capacity ( ) > = 5 <nl> + < / Original > <nl> + < Expanded > <nl> + 5 > = 5 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < Section name = " resizing bigger changes size and capacity " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . size ( ) = = 10 <nl> + < / Original > <nl> + < Expanded > <nl> + 10 = = 10 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . capacity ( ) > = 10 <nl> + < / Original > <nl> + < Expanded > <nl> + 10 > = 10 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < OverallResults successes = " 2 " failures = " 0 " expectedFailures = " 0 " / > <nl> + < / Section > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . size ( ) = = 5 <nl> + < / Original > <nl> + < Expanded > <nl> + 5 = = 5 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . capacity ( ) > = 5 <nl> + < / Original > <nl> + < Expanded > <nl> + 5 > = 5 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < Section name = " resizing smaller changes size but not capacity " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . size ( ) = = 0 <nl> + < / Original > <nl> + < Expanded > <nl> + 0 = = 0 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . capacity ( ) > = 5 <nl> + < / Original > <nl> + < Expanded > <nl> + 5 > = 5 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < Section name = " We can use the ' swap trick ' to reset the capacity " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . capacity ( ) = = 0 <nl> + < / Original > <nl> + < Expanded > <nl> + 0 = = 0 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < OverallResults successes = " 1 " failures = " 0 " expectedFailures = " 0 " / > <nl> + < / Section > <nl> + < OverallResults successes = " 3 " failures = " 0 " expectedFailures = " 0 " / > <nl> + < / Section > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . size ( ) = = 5 <nl> + < / Original > <nl> + < Expanded > <nl> + 5 = = 5 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . capacity ( ) > = 5 <nl> + < / Original > <nl> + < Expanded > <nl> + 5 > = 5 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < Section name = " reserving bigger changes capacity but not size " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . size ( ) = = 5 <nl> + < / Original > <nl> + < Expanded > <nl> + 5 = = 5 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . capacity ( ) > = 10 <nl> + < / Original > <nl> + < Expanded > <nl> + 10 > = 10 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < OverallResults successes = " 2 " failures = " 0 " expectedFailures = " 0 " / > <nl> + < / Section > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . size ( ) = = 5 <nl> + < / Original > <nl> + < Expanded > <nl> + 5 = = 5 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . capacity ( ) > = 5 <nl> + < / Original > <nl> + < Expanded > <nl> + 5 > = 5 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < Section name = " reserving smaller does not change size or capacity " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . size ( ) = = 5 <nl> + < / Original > <nl> + < Expanded > <nl> + 5 = = 5 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . capacity ( ) > = 5 <nl> + < / Original > <nl> + < Expanded > <nl> + 5 > = 5 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < OverallResults successes = " 2 " failures = " 0 " expectedFailures = " 0 " / > <nl> + < / Section > <nl> + < OverallResult success = " true " / > <nl> + < / TestCase > <nl> + < TestCase name = " TemplateTest : vectors can be sized and resized - int " tags = " [ template ] [ vector ] " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . size ( ) = = 5 <nl> + < / Original > <nl> + < Expanded > <nl> + 5 = = 5 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . capacity ( ) > = 5 <nl> + < / Original > <nl> + < Expanded > <nl> + 5 > = 5 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < Section name = " resizing bigger changes size and capacity " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . size ( ) = = 10 <nl> + < / Original > <nl> + < Expanded > <nl> + 10 = = 10 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . capacity ( ) > = 10 <nl> + < / Original > <nl> + < Expanded > <nl> + 10 > = 10 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < OverallResults successes = " 2 " failures = " 0 " expectedFailures = " 0 " / > <nl> + < / Section > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . size ( ) = = 5 <nl> + < / Original > <nl> + < Expanded > <nl> + 5 = = 5 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . capacity ( ) > = 5 <nl> + < / Original > <nl> + < Expanded > <nl> + 5 > = 5 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < Section name = " resizing smaller changes size but not capacity " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . size ( ) = = 0 <nl> + < / Original > <nl> + < Expanded > <nl> + 0 = = 0 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . capacity ( ) > = 5 <nl> + < / Original > <nl> + < Expanded > <nl> + 5 > = 5 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < Section name = " We can use the ' swap trick ' to reset the capacity " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . capacity ( ) = = 0 <nl> + < / Original > <nl> + < Expanded > <nl> + 0 = = 0 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < OverallResults successes = " 1 " failures = " 0 " expectedFailures = " 0 " / > <nl> + < / Section > <nl> + < OverallResults successes = " 3 " failures = " 0 " expectedFailures = " 0 " / > <nl> + < / Section > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . size ( ) = = 5 <nl> + < / Original > <nl> + < Expanded > <nl> + 5 = = 5 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . capacity ( ) > = 5 <nl> + < / Original > <nl> + < Expanded > <nl> + 5 > = 5 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < Section name = " reserving bigger changes capacity but not size " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . size ( ) = = 5 <nl> + < / Original > <nl> + < Expanded > <nl> + 5 = = 5 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . capacity ( ) > = 10 <nl> + < / Original > <nl> + < Expanded > <nl> + 10 > = 10 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < OverallResults successes = " 2 " failures = " 0 " expectedFailures = " 0 " / > <nl> + < / Section > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . size ( ) = = 5 <nl> + < / Original > <nl> + < Expanded > <nl> + 5 = = 5 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . capacity ( ) > = 5 <nl> + < / Original > <nl> + < Expanded > <nl> + 5 > = 5 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < Section name = " reserving smaller does not change size or capacity " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . size ( ) = = 5 <nl> + < / Original > <nl> + < Expanded > <nl> + 5 = = 5 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . capacity ( ) > = 5 <nl> + < / Original > <nl> + < Expanded > <nl> + 5 > = 5 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < OverallResults successes = " 2 " failures = " 0 " expectedFailures = " 0 " / > <nl> + < / Section > <nl> + < OverallResult success = " true " / > <nl> + < / TestCase > <nl> + < TestCase name = " TemplateTest : vectors can be sized and resized - std : : string " tags = " [ template ] [ vector ] " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . size ( ) = = 5 <nl> + < / Original > <nl> + < Expanded > <nl> + 5 = = 5 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . capacity ( ) > = 5 <nl> + < / Original > <nl> + < Expanded > <nl> + 5 > = 5 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < Section name = " resizing bigger changes size and capacity " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . size ( ) = = 10 <nl> + < / Original > <nl> + < Expanded > <nl> + 10 = = 10 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . capacity ( ) > = 10 <nl> + < / Original > <nl> + < Expanded > <nl> + 10 > = 10 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < OverallResults successes = " 2 " failures = " 0 " expectedFailures = " 0 " / > <nl> + < / Section > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . size ( ) = = 5 <nl> + < / Original > <nl> + < Expanded > <nl> + 5 = = 5 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . capacity ( ) > = 5 <nl> + < / Original > <nl> + < Expanded > <nl> + 5 > = 5 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < Section name = " resizing smaller changes size but not capacity " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . size ( ) = = 0 <nl> + < / Original > <nl> + < Expanded > <nl> + 0 = = 0 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . capacity ( ) > = 5 <nl> + < / Original > <nl> + < Expanded > <nl> + 5 > = 5 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < Section name = " We can use the ' swap trick ' to reset the capacity " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . capacity ( ) = = 0 <nl> + < / Original > <nl> + < Expanded > <nl> + 0 = = 0 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < OverallResults successes = " 1 " failures = " 0 " expectedFailures = " 0 " / > <nl> + < / Section > <nl> + < OverallResults successes = " 3 " failures = " 0 " expectedFailures = " 0 " / > <nl> + < / Section > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . size ( ) = = 5 <nl> + < / Original > <nl> + < Expanded > <nl> + 5 = = 5 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . capacity ( ) > = 5 <nl> + < / Original > <nl> + < Expanded > <nl> + 5 > = 5 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < Section name = " reserving bigger changes capacity but not size " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . size ( ) = = 5 <nl> + < / Original > <nl> + < Expanded > <nl> + 5 = = 5 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . capacity ( ) > = 10 <nl> + < / Original > <nl> + < Expanded > <nl> + 10 > = 10 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < OverallResults successes = " 2 " failures = " 0 " expectedFailures = " 0 " / > <nl> + < / Section > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . size ( ) = = 5 <nl> + < / Original > <nl> + < Expanded > <nl> + 5 = = 5 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . capacity ( ) > = 5 <nl> + < / Original > <nl> + < Expanded > <nl> + 5 > = 5 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < Section name = " reserving smaller does not change size or capacity " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . size ( ) = = 5 <nl> + < / Original > <nl> + < Expanded > <nl> + 5 = = 5 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . capacity ( ) > = 5 <nl> + < / Original > <nl> + < Expanded > <nl> + 5 > = 5 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < OverallResults successes = " 2 " failures = " 0 " expectedFailures = " 0 " / > <nl> + < / Section > <nl> + < OverallResult success = " true " / > <nl> + < / TestCase > <nl> + < TestCase name = " TemplateTest : vectors can be sized and resized - std : : tuple & lt ; int , float > " tags = " [ template ] [ vector ] " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . size ( ) = = 5 <nl> + < / Original > <nl> + < Expanded > <nl> + 5 = = 5 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . capacity ( ) > = 5 <nl> + < / Original > <nl> + < Expanded > <nl> + 5 > = 5 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < Section name = " resizing bigger changes size and capacity " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . size ( ) = = 10 <nl> + < / Original > <nl> + < Expanded > <nl> + 10 = = 10 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . capacity ( ) > = 10 <nl> + < / Original > <nl> + < Expanded > <nl> + 10 > = 10 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < OverallResults successes = " 2 " failures = " 0 " expectedFailures = " 0 " / > <nl> + < / Section > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . size ( ) = = 5 <nl> + < / Original > <nl> + < Expanded > <nl> + 5 = = 5 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . capacity ( ) > = 5 <nl> + < / Original > <nl> + < Expanded > <nl> + 5 > = 5 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < Section name = " resizing smaller changes size but not capacity " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . size ( ) = = 0 <nl> + < / Original > <nl> + < Expanded > <nl> + 0 = = 0 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . capacity ( ) > = 5 <nl> + < / Original > <nl> + < Expanded > <nl> + 5 > = 5 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < Section name = " We can use the ' swap trick ' to reset the capacity " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . capacity ( ) = = 0 <nl> + < / Original > <nl> + < Expanded > <nl> + 0 = = 0 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < OverallResults successes = " 1 " failures = " 0 " expectedFailures = " 0 " / > <nl> + < / Section > <nl> + < OverallResults successes = " 3 " failures = " 0 " expectedFailures = " 0 " / > <nl> + < / Section > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . size ( ) = = 5 <nl> + < / Original > <nl> + < Expanded > <nl> + 5 = = 5 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . capacity ( ) > = 5 <nl> + < / Original > <nl> + < Expanded > <nl> + 5 > = 5 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < Section name = " reserving bigger changes capacity but not size " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . size ( ) = = 5 <nl> + < / Original > <nl> + < Expanded > <nl> + 5 = = 5 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . capacity ( ) > = 10 <nl> + < / Original > <nl> + < Expanded > <nl> + 10 > = 10 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < OverallResults successes = " 2 " failures = " 0 " expectedFailures = " 0 " / > <nl> + < / Section > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . size ( ) = = 5 <nl> + < / Original > <nl> + < Expanded > <nl> + 5 = = 5 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . capacity ( ) > = 5 <nl> + < / Original > <nl> + < Expanded > <nl> + 5 > = 5 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < Section name = " reserving smaller does not change size or capacity " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . size ( ) = = 5 <nl> + < / Original > <nl> + < Expanded > <nl> + 5 = = 5 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < Expression success = " true " type = " REQUIRE " filename = " projects / < exe - name > / UsageTests / Misc . tests . cpp " > <nl> + < Original > <nl> + v . capacity ( ) > = 5 <nl> + < / Original > <nl> + < Expanded > <nl> + 5 > = 5 <nl> + < / Expanded > <nl> + < / Expression > <nl> + < OverallResults successes = " 2 " failures = " 0 " expectedFailures = " 0 " / > <nl> + < / Section > <nl> + < OverallResult success = " true " / > <nl> + < / TestCase > <nl> < TestCase name = " Test case with one argument " filename = " projects / < exe - name > / UsageTests / VariadicMacros . tests . cpp " > <nl> < OverallResult success = " true " / > <nl> < / TestCase > <nl> loose text artifact <nl> < / Section > <nl> < OverallResult success = " true " / > <nl> < / TestCase > <nl> - < OverallResults successes = " 1105 " failures = " 123 " expectedFailures = " 21 " / > <nl> + < OverallResults successes = " 1176 " failures = " 126 " expectedFailures = " 21 " / > <nl> < / Group > <nl> - < OverallResults successes = " 1105 " failures = " 122 " expectedFailures = " 21 " / > <nl> + < OverallResults successes = " 1176 " failures = " 125 " expectedFailures = " 21 " / > <nl> < / Catch > <nl> mmm a / projects / SelfTest / UsageTests / Class . tests . cpp <nl> ppp b / projects / SelfTest / UsageTests / Class . tests . cpp <nl> struct Fixture <nl> int m_a ; <nl> } ; <nl> <nl> + template < typename T > <nl> + struct Template_Fixture { <nl> + Template_Fixture ( ) : m_a ( 1 ) { } <nl> + <nl> + T m_a ; <nl> + } ; <nl> + <nl> # endif <nl> <nl> <nl> TEST_CASE_METHOD ( Fixture , " A TEST_CASE_METHOD based test run that succeeds " , " [ <nl> REQUIRE ( m_a = = 1 ) ; <nl> } <nl> <nl> + TEMPLATE_TEST_CASE_METHOD ( Template_Fixture , " A TEMPLATE_TEST_CASE_METHOD based test run that succeeds " , " [ class ] [ template ] " , int , float , double ) { <nl> + REQUIRE ( Template_Fixture < TestType > : : m_a = = 1 ) ; <nl> + } <nl> + <nl> / / We should be able to write our tests within a different namespace <nl> namespace Inner <nl> { <nl> namespace Inner <nl> { <nl> REQUIRE ( m_a = = 2 ) ; <nl> } <nl> + <nl> + TEMPLATE_TEST_CASE_METHOD ( Template_Fixture , " A TEMPLATE_TEST_CASE_METHOD based test run that fails " , " [ . ] [ class ] [ template ] [ failing ] " , int , float , double ) <nl> + { <nl> + REQUIRE ( Template_Fixture < TestType > : : m_a = = 2 ) ; <nl> + } <nl> } <nl> <nl> + <nl> + <nl> } } / / namespace ClassTests <nl> mmm a / projects / SelfTest / UsageTests / Misc . tests . cpp <nl> ppp b / projects / SelfTest / UsageTests / Misc . tests . cpp <nl> TEST_CASE ( " vectors can be sized and resized " , " [ vector ] " ) { <nl> } <nl> } <nl> <nl> + TEMPLATE_TEST_CASE ( " TemplateTest : vectors can be sized and resized " , " [ vector ] [ template ] " , int , float , std : : string , ( std : : tuple < int , float > ) ) { <nl> + <nl> + std : : vector < TestType > v ( 5 ) ; <nl> + <nl> + REQUIRE ( v . size ( ) = = 5 ) ; <nl> + REQUIRE ( v . capacity ( ) > = 5 ) ; <nl> + <nl> + SECTION ( " resizing bigger changes size and capacity " ) { <nl> + v . resize ( 10 ) ; <nl> + <nl> + REQUIRE ( v . size ( ) = = 10 ) ; <nl> + REQUIRE ( v . capacity ( ) > = 10 ) ; <nl> + } <nl> + SECTION ( " resizing smaller changes size but not capacity " ) { <nl> + v . resize ( 0 ) ; <nl> + <nl> + REQUIRE ( v . size ( ) = = 0 ) ; <nl> + REQUIRE ( v . capacity ( ) > = 5 ) ; <nl> + <nl> + SECTION ( " We can use the ' swap trick ' to reset the capacity " ) { <nl> + std : : vector < TestType > empty ; <nl> + empty . swap ( v ) ; <nl> + <nl> + REQUIRE ( v . capacity ( ) = = 0 ) ; <nl> + } <nl> + } <nl> + SECTION ( " reserving bigger changes capacity but not size " ) { <nl> + v . reserve ( 10 ) ; <nl> + <nl> + REQUIRE ( v . size ( ) = = 5 ) ; <nl> + REQUIRE ( v . capacity ( ) > = 10 ) ; <nl> + } <nl> + SECTION ( " reserving smaller does not change size or capacity " ) { <nl> + v . reserve ( 0 ) ; <nl> + <nl> + REQUIRE ( v . size ( ) = = 5 ) ; <nl> + REQUIRE ( v . capacity ( ) > = 5 ) ; <nl> + } <nl> + } <nl> + <nl> / / https : / / github . com / philsquared / Catch / issues / 166 <nl> TEST_CASE ( " A couple of nested sections followed by a failure " , " [ failing ] [ . ] " ) { <nl> SECTION ( " Outer " ) <nl> | Merge pull request from JoeyGrajciar / type_params_tests_v2 | catchorg/Catch2 | 450dd0562b525f5feacb0a057c0c50b9035f39b5 | 2018-11-16T20:24:19Z |
mmm a / lib / IRGen / IRGenDebugInfo . cpp <nl> ppp b / lib / IRGen / IRGenDebugInfo . cpp <nl> <nl> # include " llvm / Config / config . h " <nl> # include " llvm / IR / DIBuilder . h " <nl> # include " llvm / IR / DebugInfo . h " <nl> + # include " llvm / IR / IntrinsicInst . h " <nl> # include " llvm / IR / Module . h " <nl> # include " llvm / Support / CommandLine . h " <nl> # include " llvm / Support / Debug . h " <nl> void IRGenDebugInfoImpl : : emitDbgIntrinsic ( <nl> auto * BB = Builder . GetInsertBlock ( ) ; <nl> <nl> / / An alloca may only be described by exactly one dbg . declare . <nl> - if ( isa < llvm : : AllocaInst > ( Storage ) & & llvm : : FindAllocaDbgDeclare ( Storage ) ) <nl> + if ( isa < llvm : : AllocaInst > ( Storage ) & & ! llvm : : FindDbgAddrUses ( Storage ) . empty ( ) ) <nl> return ; <nl> <nl> / / A dbg . declare is only meaningful if there is a single alloca for <nl> | master - next : Update IRGenDebugInfo . cpp for LLVM r313825 | apple/swift | 6b446a0d38629d0611b00ecf3a27d749fb12b9b4 | 2017-09-23T05:02:59Z |
mmm a / include / swift / Basic / DiverseList . h <nl> ppp b / include / swift / Basic / DiverseList . h <nl> class DiverseListBase { <nl> <nl> / / / An " abstract " base class for DiverseList < T > which does not <nl> / / / explicitly the preferred inline capacity . Most of the <nl> - / / / implementation is on this class . <nl> + / / / implementation is in this class . <nl> template < class T > class DiverseListImpl : private DiverseListBase { <nl> DiverseListImpl ( const DiverseListImpl < T > & other ) = delete ; <nl> DiverseListImpl ( DiverseListImpl < T > & & other ) = delete ; <nl> | Fix DiverseListImpl comment grammar | apple/swift | 25bda30cfd3f3339963d5e03f4cce5f6c69f076f | 2016-03-24T16:12:32Z |
new file mode 100644 <nl> index 00000000000 . . 62651053fbf <nl> mmm / dev / null <nl> ppp b / modules / dnn / src / graph_simplifier . cpp <nl> <nl> + / / This file is part of OpenCV project . <nl> + / / It is subject to the license terms in the LICENSE file found in the top - level directory <nl> + / / of this distribution and at http : / / opencv . org / license . html . <nl> + <nl> + / / Copyright ( C ) 2020 , Intel Corporation , all rights reserved . <nl> + / / Third party copyrights are property of their respective owners . <nl> + <nl> + # include " precomp . hpp " <nl> + <nl> + # include " graph_simplifier . hpp " <nl> + <nl> + # include < queue > <nl> + <nl> + namespace cv { namespace dnn { <nl> + <nl> + Subgraph : : ~ Subgraph ( ) { } <nl> + <nl> + int Subgraph : : addNodeToMatch ( const std : : string & op , int input_0 , int input_1 , <nl> + int input_2 , int input_3 ) <nl> + { <nl> + int nodeInputs [ ] = { input_0 , input_1 , input_2 , input_3 } ; <nl> + int numInputs = 0 ; <nl> + for ( int i = 0 ; i < 4 ; + + i ) <nl> + { <nl> + numInputs + = ( int ) ( nodeInputs [ i ] ! = - 1 ) ; <nl> + } <nl> + return addNodeToMatch ( op , std : : vector < int > ( & nodeInputs [ 0 ] , & nodeInputs [ 0 ] + numInputs ) ) ; <nl> + } <nl> + <nl> + int Subgraph : : addNodeToMatch ( const std : : string & op , const std : : vector < int > & inputs_ ) <nl> + { <nl> + for ( int i = 0 ; i < inputs_ . size ( ) ; + + i ) <nl> + { <nl> + CV_Assert ( inputs_ [ i ] < ( int ) nodes . size ( ) ) ; <nl> + } <nl> + nodes . push_back ( op ) ; <nl> + inputs . push_back ( inputs_ ) ; <nl> + return nodes . size ( ) - 1 ; <nl> + } <nl> + <nl> + void Subgraph : : setFusedNode ( const std : : string & op , int input_0 , int input_1 , <nl> + int input_2 , int input_3 , int input_4 , int input_5 ) <nl> + { <nl> + int nodeInputs [ ] = { input_0 , input_1 , input_2 , input_3 , input_4 , input_5 } ; <nl> + int numInputs = 0 ; <nl> + for ( int i = 0 ; i < 6 ; + + i ) <nl> + { <nl> + CV_Assert ( nodeInputs [ i ] < ( int ) nodes . size ( ) ) ; <nl> + numInputs + = ( int ) ( nodeInputs [ i ] ! = - 1 ) ; <nl> + } <nl> + setFusedNode ( op , std : : vector < int > ( & nodeInputs [ 0 ] , & nodeInputs [ 0 ] + numInputs ) ) ; <nl> + } <nl> + <nl> + void Subgraph : : setFusedNode ( const std : : string & op , const std : : vector < int > & inputs_ ) <nl> + { <nl> + fusedNodeInputs = inputs_ ; <nl> + fusedNodeOp = op ; <nl> + } <nl> + <nl> + int Subgraph : : getInputNodeId ( const Ptr < ImportGraphWrapper > & net , <nl> + const Ptr < ImportNodeWrapper > & node , <nl> + int inpId ) <nl> + { <nl> + CV_Assert ( inpId < node - > getNumInputs ( ) ) ; <nl> + std : : string name = node - > getInputName ( inpId ) ; <nl> + / / If operation produces several tensors , they are specified by index <nl> + / / after ' : ' character . In example , " input : 0 " . <nl> + name = name . substr ( 0 , name . rfind ( ' : ' ) ) ; <nl> + const int numNodes = net - > getNumNodes ( ) ; <nl> + for ( int i = 0 ; i < numNodes ; + + i ) <nl> + { <nl> + if ( net - > getNodeName ( i ) = = name ) <nl> + return i ; <nl> + } <nl> + CV_Error ( Error : : StsParseError , " Input node with name " + name + " not found " ) ; <nl> + } <nl> + <nl> + bool Subgraph : : match ( const Ptr < ImportGraphWrapper > & net , int nodeId , <nl> + std : : vector < int > & matchedNodesIds , <nl> + std : : vector < int > & targetNodesIds ) <nl> + { <nl> + matchedNodesIds . clear ( ) ; <nl> + targetNodesIds . clear ( ) ; <nl> + <nl> + std : : queue < int > nodesToMatch ; <nl> + std : : queue < int > targetNodes ; <nl> + nodesToMatch . push ( nodeId ) ; <nl> + targetNodes . push ( nodes . size ( ) - 1 ) ; <nl> + while ( ! nodesToMatch . empty ( ) ) <nl> + { <nl> + int nodeToMatch = nodesToMatch . front ( ) ; <nl> + int targetNodeId = targetNodes . front ( ) ; <nl> + nodesToMatch . pop ( ) ; <nl> + targetNodes . pop ( ) ; <nl> + <nl> + if ( std : : find ( matchedNodesIds . begin ( ) , matchedNodesIds . end ( ) , nodeToMatch ) ! = <nl> + matchedNodesIds . end ( ) ) <nl> + continue ; <nl> + <nl> + const Ptr < ImportNodeWrapper > node = net - > getNode ( nodeToMatch ) ; <nl> + if ( node - > getType ( ) ! = nodes [ targetNodeId ] ) <nl> + return false ; <nl> + <nl> + std : : vector < int > & inputNodes = inputs [ targetNodeId ] ; <nl> + if ( inputNodes . size ( ) ! = node - > getNumInputs ( ) ) <nl> + return false ; <nl> + <nl> + for ( int j = 0 ; j < inputNodes . size ( ) ; + + j ) <nl> + { <nl> + if ( nodes [ inputNodes [ j ] ] . empty ( ) ) / / Unknown input node type . <nl> + continue ; <nl> + nodeId = getInputNodeId ( net , node , j ) ; <nl> + const Ptr < ImportNodeWrapper > inpNode = net - > getNode ( nodeId ) ; <nl> + if ( inpNode - > getType ( ) ! = " Const " ) <nl> + { <nl> + nodesToMatch . push ( nodeId ) ; <nl> + targetNodes . push ( inputNodes [ j ] ) ; <nl> + } <nl> + else if ( nodes [ inputNodes [ j ] ] ! = " Const " ) <nl> + return false ; <nl> + } <nl> + matchedNodesIds . push_back ( nodeToMatch ) ; <nl> + targetNodesIds . push_back ( targetNodeId ) ; <nl> + } <nl> + <nl> + const int n = matchedNodesIds . size ( ) ; <nl> + std : : vector < std : : pair < int , int > > elements ( n ) ; <nl> + for ( int i = 0 ; i < n ; + + i ) <nl> + elements [ i ] = std : : make_pair ( matchedNodesIds [ i ] , targetNodesIds [ i ] ) ; <nl> + std : : sort ( elements . begin ( ) , elements . end ( ) ) ; <nl> + for ( int i = 0 ; i < n ; + + i ) <nl> + { <nl> + matchedNodesIds [ i ] = elements [ i ] . first ; <nl> + targetNodesIds [ i ] = elements [ i ] . second ; <nl> + } <nl> + return true ; <nl> + } <nl> + <nl> + void Subgraph : : replace ( const Ptr < ImportGraphWrapper > & net , const std : : vector < int > & matchedNodesIds , <nl> + const std : : vector < int > & targetNodesIds ) <nl> + { <nl> + / / Extract names of input nodes . <nl> + std : : vector < std : : string > inputsNames ( fusedNodeInputs . size ( ) ) ; <nl> + for ( int i = 0 ; i < fusedNodeInputs . size ( ) ; + + i ) <nl> + { <nl> + std : : string inpName ; <nl> + / / Find input node name looking at inputs of fused nodes . <nl> + for ( int j = 0 ; j < matchedNodesIds . size ( ) & & inpName . empty ( ) ; + + j ) <nl> + { <nl> + Ptr < ImportNodeWrapper > node = net - > getNode ( matchedNodesIds [ j ] ) ; <nl> + std : : vector < int > & inpIndices = inputs [ targetNodesIds [ j ] ] ; <nl> + <nl> + CV_Assert ( node - > getNumInputs ( ) = = inpIndices . size ( ) ) ; <nl> + for ( int k = 0 ; k < inpIndices . size ( ) ; + + k ) <nl> + { <nl> + if ( inpIndices [ k ] = = fusedNodeInputs [ i ] ) <nl> + { <nl> + inpName = node - > getInputName ( k ) ; <nl> + break ; <nl> + } <nl> + } <nl> + } <nl> + CV_Assert ( ! inpName . empty ( ) ) ; <nl> + inputsNames [ i ] = inpName ; <nl> + } <nl> + <nl> + / / Remove matched nodes except the last one . Indices in ascending order are expected . <nl> + Ptr < ImportNodeWrapper > node = net - > getNode ( matchedNodesIds . back ( ) ) ; <nl> + for ( int i = matchedNodesIds . size ( ) - 2 ; i > = 0 ; - - i ) <nl> + net - > removeNode ( matchedNodesIds [ i ] ) ; <nl> + <nl> + / / Modify the last node to be a fused one . <nl> + node - > setType ( fusedNodeOp ) ; <nl> + node - > setInputNames ( inputsNames ) ; <nl> + <nl> + std : : vector < Ptr < ImportNodeWrapper > > inputNodes ( inputsNames . size ( ) ) ; <nl> + for ( int i = 0 ; i < inputsNames . size ( ) ; + + i ) <nl> + { <nl> + inputNodes [ i ] = net - > getNode ( getInputNodeId ( net , node , i ) ) ; <nl> + } <nl> + finalize ( net , node , inputNodes ) ; <nl> + } <nl> + <nl> + void Subgraph : : finalize ( const Ptr < ImportGraphWrapper > & net , <nl> + const Ptr < ImportNodeWrapper > & fusedNode , <nl> + std : : vector < Ptr < ImportNodeWrapper > > & inputs ) { } <nl> + <nl> + void simplifySubgraphs ( const Ptr < ImportGraphWrapper > & net , <nl> + const std : : vector < Ptr < Subgraph > > & patterns ) <nl> + { <nl> + int numNodes = net - > getNumNodes ( ) ; <nl> + std : : vector < int > matchedNodesIds , targetNodesIds ; <nl> + for ( int i = 0 ; i < numNodes ; + + i ) <nl> + { <nl> + for ( int j = 0 ; j < patterns . size ( ) ; + + j ) <nl> + { <nl> + if ( patterns [ j ] - > match ( net , i , matchedNodesIds , targetNodesIds ) ) <nl> + { <nl> + patterns [ j ] - > replace ( net , matchedNodesIds , targetNodesIds ) ; <nl> + numNodes - = matchedNodesIds . size ( ) - 1 ; / / # matchedNodes removed and one added . <nl> + break ; <nl> + } <nl> + } <nl> + } <nl> + } <nl> + <nl> + } } / / namespace cv : : dnn <nl> new file mode 100644 <nl> index 00000000000 . . 8f3958ba52b <nl> mmm / dev / null <nl> ppp b / modules / dnn / src / graph_simplifier . hpp <nl> <nl> + / / This file is part of OpenCV project . <nl> + / / It is subject to the license terms in the LICENSE file found in the top - level directory <nl> + / / of this distribution and at http : / / opencv . org / license . html . <nl> + <nl> + / / Copyright ( C ) 2020 , Intel Corporation , all rights reserved . <nl> + / / Third party copyrights are property of their respective owners . <nl> + <nl> + # ifndef __OPENCV_DNN_GRAPH_SIMPLIFIER_HPP__ <nl> + # define __OPENCV_DNN_GRAPH_SIMPLIFIER_HPP__ <nl> + <nl> + # include < string > <nl> + <nl> + # include < opencv2 / core . hpp > <nl> + <nl> + namespace cv { namespace dnn { <nl> + <nl> + class ImportNodeWrapper <nl> + { <nl> + public : <nl> + virtual ~ ImportNodeWrapper ( ) { } ; <nl> + <nl> + virtual int getNumInputs ( ) const = 0 ; <nl> + <nl> + virtual std : : string getInputName ( int idx ) const = 0 ; <nl> + <nl> + virtual std : : string getType ( ) const = 0 ; <nl> + <nl> + virtual void setType ( const std : : string & type ) = 0 ; <nl> + <nl> + virtual void setInputNames ( const std : : vector < std : : string > & inputs ) = 0 ; <nl> + } ; <nl> + <nl> + class ImportGraphWrapper <nl> + { <nl> + public : <nl> + virtual ~ ImportGraphWrapper ( ) { } ; <nl> + <nl> + virtual Ptr < ImportNodeWrapper > getNode ( int idx ) const = 0 ; <nl> + <nl> + virtual int getNumNodes ( ) const = 0 ; <nl> + <nl> + virtual std : : string getNodeName ( int idx ) const = 0 ; <nl> + <nl> + virtual void removeNode ( int idx ) = 0 ; <nl> + } ; <nl> + <nl> + class Subgraph / / Interface to match and replace subgraphs . <nl> + { <nl> + public : <nl> + virtual ~ Subgraph ( ) ; <nl> + <nl> + / / Add a node to be matched in the origin graph . Specify ids of nodes that <nl> + / / are expected to be inputs . Returns id of a newly added node . <nl> + / / TODO : Replace inputs to std : : vector < int > in C + + 11 <nl> + int addNodeToMatch ( const std : : string & op , int input_0 = - 1 , int input_1 = - 1 , <nl> + int input_2 = - 1 , int input_3 = - 1 ) ; <nl> + <nl> + int addNodeToMatch ( const std : : string & op , const std : : vector < int > & inputs_ ) ; <nl> + <nl> + / / Specify resulting node . All the matched nodes in subgraph excluding <nl> + / / input nodes will be fused into this single node . <nl> + / / TODO : Replace inputs to std : : vector < int > in C + + 11 <nl> + void setFusedNode ( const std : : string & op , int input_0 = - 1 , int input_1 = - 1 , <nl> + int input_2 = - 1 , int input_3 = - 1 , int input_4 = - 1 , <nl> + int input_5 = - 1 ) ; <nl> + <nl> + void setFusedNode ( const std : : string & op , const std : : vector < int > & inputs_ ) ; <nl> + <nl> + static int getInputNodeId ( const Ptr < ImportGraphWrapper > & net , <nl> + const Ptr < ImportNodeWrapper > & node , <nl> + int inpId ) ; <nl> + <nl> + / / Match TensorFlow subgraph starting from < nodeId > with a set of nodes to be fused . <nl> + / / Const nodes are skipped during matching . Returns true if nodes are matched and can be fused . <nl> + virtual bool match ( const Ptr < ImportGraphWrapper > & net , int nodeId , <nl> + std : : vector < int > & matchedNodesIds , <nl> + std : : vector < int > & targetNodesIds ) ; <nl> + <nl> + / / Fuse matched subgraph . <nl> + void replace ( const Ptr < ImportGraphWrapper > & net , const std : : vector < int > & matchedNodesIds , <nl> + const std : : vector < int > & targetNodesIds ) ; <nl> + <nl> + virtual void finalize ( const Ptr < ImportGraphWrapper > & net , <nl> + const Ptr < ImportNodeWrapper > & fusedNode , <nl> + std : : vector < Ptr < ImportNodeWrapper > > & inputs ) ; <nl> + <nl> + private : <nl> + std : : vector < std : : string > nodes ; / / Nodes to be matched in the origin graph . <nl> + std : : vector < std : : vector < int > > inputs ; / / Connections of an every node to it ' s inputs . <nl> + <nl> + std : : string fusedNodeOp ; / / Operation name of resulting fused node . <nl> + std : : vector < int > fusedNodeInputs ; / / Inputs of fused node . <nl> + } ; <nl> + <nl> + void simplifySubgraphs ( const Ptr < ImportGraphWrapper > & net , <nl> + const std : : vector < Ptr < Subgraph > > & patterns ) ; <nl> + <nl> + } } / / namespace dnn , namespace cv <nl> + <nl> + # endif / / __OPENCV_DNN_GRAPH_SIMPLIFIER_HPP__ <nl> new file mode 100644 <nl> index 00000000000 . . f9f9194a22f <nl> mmm / dev / null <nl> ppp b / modules / dnn / src / onnx / onnx_graph_simplifier . cpp <nl> <nl> + / / This file is part of OpenCV project . <nl> + / / It is subject to the license terms in the LICENSE file found in the top - level directory <nl> + / / of this distribution and at http : / / opencv . org / license . html . <nl> + <nl> + / / Copyright ( C ) 2020 , Intel Corporation , all rights reserved . <nl> + / / Third party copyrights are property of their respective owners . <nl> + <nl> + # include " . . / precomp . hpp " <nl> + <nl> + # include " . . / graph_simplifier . hpp " <nl> + # include " onnx_graph_simplifier . hpp " <nl> + <nl> + # include < queue > <nl> + <nl> + namespace cv { namespace dnn { <nl> + CV__DNN_EXPERIMENTAL_NS_BEGIN <nl> + <nl> + / / This wrapper can behave differently for fake input nodes and real graph nodes . <nl> + class ONNXNodeWrapper : public ImportNodeWrapper <nl> + { <nl> + public : <nl> + ONNXNodeWrapper ( opencv_onnx : : NodeProto * _node = 0 ) : node ( _node ) { } <nl> + <nl> + virtual int getNumInputs ( ) const CV_OVERRIDE <nl> + { <nl> + return node ? node - > input_size ( ) : 0 ; <nl> + } <nl> + <nl> + virtual std : : string getInputName ( int idx ) const CV_OVERRIDE <nl> + { <nl> + CV_Assert_N ( node , idx < node - > input_size ( ) ) ; <nl> + return node - > input ( idx ) ; <nl> + } <nl> + <nl> + virtual std : : string getType ( ) const CV_OVERRIDE <nl> + { <nl> + return node ? node - > op_type ( ) : " " ; <nl> + } <nl> + <nl> + virtual void setType ( const std : : string & type ) CV_OVERRIDE <nl> + { <nl> + CV_Assert ( node ) ; <nl> + node - > set_op_type ( type ) ; <nl> + } <nl> + <nl> + virtual void setInputNames ( const std : : vector < std : : string > & inputs ) CV_OVERRIDE <nl> + { <nl> + CV_Assert ( node ) ; <nl> + node - > clear_input ( ) ; <nl> + for ( int i = 0 ; i < inputs . size ( ) ; + + i ) <nl> + node - > add_input ( inputs [ i ] ) ; <nl> + } <nl> + <nl> + opencv_onnx : : NodeProto * node ; <nl> + } ; <nl> + <nl> + / / ONNX graph ' s inputs are separate from nodes so we index them before the rest of nodes . <nl> + class ONNXGraphWrapper : public ImportGraphWrapper <nl> + { <nl> + public : <nl> + ONNXGraphWrapper ( opencv_onnx : : GraphProto & _net ) : net ( _net ) <nl> + { <nl> + numInputs = net . input_size ( ) ; <nl> + } <nl> + <nl> + virtual Ptr < ImportNodeWrapper > getNode ( int idx ) const CV_OVERRIDE <nl> + { <nl> + opencv_onnx : : NodeProto * node = 0 ; <nl> + if ( idx > = numInputs ) <nl> + node = net . mutable_node ( idx - numInputs ) ; <nl> + return makePtr < ONNXNodeWrapper > ( node ) ; <nl> + } <nl> + <nl> + virtual int getNumNodes ( ) const CV_OVERRIDE <nl> + { <nl> + return numInputs + net . node_size ( ) ; <nl> + } <nl> + <nl> + virtual std : : string getNodeName ( int idx ) const CV_OVERRIDE <nl> + { <nl> + if ( idx < numInputs ) <nl> + return net . input ( idx ) . name ( ) ; <nl> + else <nl> + return net . node ( idx - numInputs ) . output ( 0 ) ; <nl> + } <nl> + <nl> + virtual void removeNode ( int idx ) CV_OVERRIDE <nl> + { <nl> + CV_Assert ( idx > = numInputs ) ; <nl> + net . mutable_node ( ) - > DeleteSubrange ( idx - numInputs , 1 ) ; <nl> + } <nl> + <nl> + private : <nl> + int numInputs ; <nl> + opencv_onnx : : GraphProto & net ; <nl> + } ; <nl> + <nl> + class SoftMaxSubgraph : public Subgraph <nl> + { <nl> + public : <nl> + SoftMaxSubgraph ( ) <nl> + { <nl> + int input = addNodeToMatch ( " " ) ; <nl> + int inpExp = addNodeToMatch ( " Exp " , input ) ; <nl> + int sum = addNodeToMatch ( " ReduceSum " , inpExp ) ; <nl> + addNodeToMatch ( " Div " , inpExp , sum ) ; <nl> + setFusedNode ( " Softmax " , input ) ; <nl> + } <nl> + <nl> + virtual bool match ( const Ptr < ImportGraphWrapper > & net , int nodeId , <nl> + std : : vector < int > & matchedNodesIds , <nl> + std : : vector < int > & targetNodesIds ) CV_OVERRIDE <nl> + { <nl> + if ( Subgraph : : match ( net , nodeId , matchedNodesIds , targetNodesIds ) ) <nl> + { <nl> + Ptr < ImportNodeWrapper > sum = net - > getNode ( matchedNodesIds [ 1 ] ) ; <nl> + opencv_onnx : : NodeProto * node = sum . dynamicCast < ONNXNodeWrapper > ( ) - > node ; <nl> + <nl> + for ( int i = 0 ; i < node - > attribute_size ( ) ; i + + ) <nl> + { <nl> + opencv_onnx : : AttributeProto attr = node - > attribute ( i ) ; <nl> + if ( attr . name ( ) ! = " axes " ) <nl> + continue ; <nl> + if ( attr . ints_size ( ) ! = 1 ) <nl> + CV_Error ( Error : : StsNotImplemented , format ( " Unexpected number of axes : % d " , attr . ints_size ( ) ) ) ; <nl> + axis = attr . ints ( 0 ) ; <nl> + return true ; <nl> + } <nl> + CV_Error ( Error : : StsNotImplemented , " Missed axes attribute " ) ; <nl> + } <nl> + return false ; <nl> + } <nl> + <nl> + virtual void finalize ( const Ptr < ImportGraphWrapper > & , <nl> + const Ptr < ImportNodeWrapper > & fusedNode , <nl> + std : : vector < Ptr < ImportNodeWrapper > > & ) CV_OVERRIDE <nl> + { <nl> + opencv_onnx : : NodeProto * node = fusedNode . dynamicCast < ONNXNodeWrapper > ( ) - > node ; <nl> + opencv_onnx : : AttributeProto * attr = node - > add_attribute ( ) ; <nl> + attr - > set_name ( " axis " ) ; <nl> + attr - > set_i ( axis ) ; <nl> + } <nl> + <nl> + private : <nl> + int axis ; <nl> + } ; <nl> + <nl> + void simplifySubgraphs ( opencv_onnx : : GraphProto & net ) <nl> + { <nl> + std : : vector < Ptr < Subgraph > > subgraphs ; <nl> + subgraphs . push_back ( makePtr < SoftMaxSubgraph > ( ) ) ; <nl> + <nl> + simplifySubgraphs ( Ptr < ImportGraphWrapper > ( new ONNXGraphWrapper ( net ) ) , subgraphs ) ; <nl> + } <nl> + <nl> + CV__DNN_EXPERIMENTAL_NS_END <nl> + } } / / namespace cv : : dnn <nl> new file mode 100644 <nl> index 00000000000 . . 52b4e5ecc07 <nl> mmm / dev / null <nl> ppp b / modules / dnn / src / onnx / onnx_graph_simplifier . hpp <nl> <nl> + / / This file is part of OpenCV project . <nl> + / / It is subject to the license terms in the LICENSE file found in the top - level directory <nl> + / / of this distribution and at http : / / opencv . org / license . html . <nl> + <nl> + / / Copyright ( C ) 2020 , Intel Corporation , all rights reserved . <nl> + / / Third party copyrights are property of their respective owners . <nl> + <nl> + # ifndef __OPENCV_DNN_ONNX_SIMPLIFIER_HPP__ <nl> + # define __OPENCV_DNN_ONNX_SIMPLIFIER_HPP__ <nl> + <nl> + # include " . . / precomp . hpp " <nl> + <nl> + # if defined ( __GNUC__ ) & & __GNUC__ > = 5 <nl> + # pragma GCC diagnostic push <nl> + # pragma GCC diagnostic ignored " - Wsuggest - override " <nl> + # endif <nl> + # include " opencv - onnx . pb . h " <nl> + # if defined ( __GNUC__ ) & & __GNUC__ > = 5 <nl> + # pragma GCC diagnostic pop <nl> + # endif <nl> + <nl> + namespace cv { namespace dnn { <nl> + CV__DNN_EXPERIMENTAL_NS_BEGIN <nl> + <nl> + void simplifySubgraphs ( opencv_onnx : : GraphProto & net ) ; <nl> + <nl> + CV__DNN_EXPERIMENTAL_NS_END <nl> + } } / / namespace dnn , namespace cv <nl> + <nl> + # endif / / __OPENCV_DNN_ONNX_SIMPLIFIER_HPP__ <nl> mmm a / modules / dnn / src / onnx / onnx_importer . cpp <nl> ppp b / modules / dnn / src / onnx / onnx_importer . cpp <nl> <nl> # pragma GCC diagnostic pop <nl> # endif <nl> <nl> + # include " onnx_graph_simplifier . hpp " <nl> + <nl> namespace cv { <nl> namespace dnn { <nl> CV__DNN_EXPERIMENTAL_NS_BEGIN <nl> void ONNXImporter : : populateNet ( Net dstNet ) <nl> { <nl> CV_Assert ( model_proto . has_graph ( ) ) ; <nl> opencv_onnx : : GraphProto graph_proto = model_proto . graph ( ) ; <nl> + <nl> + simplifySubgraphs ( graph_proto ) ; <nl> + <nl> std : : map < std : : string , Mat > constBlobs = getGraphTensors ( graph_proto ) ; <nl> / / List of internal blobs shapes . <nl> std : : map < std : : string , MatShape > outShapes ; <nl> mmm a / modules / dnn / src / tensorflow / tf_graph_simplifier . cpp <nl> ppp b / modules / dnn / src / tensorflow / tf_graph_simplifier . cpp <nl> <nl> <nl> # ifdef HAVE_PROTOBUF <nl> <nl> + # include " . . / graph_simplifier . hpp " <nl> # include " tf_graph_simplifier . hpp " <nl> # include < queue > <nl> <nl> CV__DNN_EXPERIMENTAL_NS_BEGIN <nl> using : : google : : protobuf : : RepeatedField ; <nl> using : : google : : protobuf : : MapPair ; <nl> <nl> - class Subgraph / / Interface to match and replace TensorFlow subgraphs . <nl> + class TFNodeWrapper : public ImportNodeWrapper <nl> { <nl> public : <nl> - virtual ~ Subgraph ( ) { } <nl> + TFNodeWrapper ( tensorflow : : NodeDef * _node ) : node ( _node ) { } <nl> <nl> - / / Add a node to be matched in the origin graph . Specify ids of nodes that <nl> - / / are expected to be inputs . Returns id of a newly added node . <nl> - / / TODO : Replace inputs to std : : vector < int > in C + + 11 <nl> - int addNodeToMatch ( const std : : string & op , int input_0 = - 1 , int input_1 = - 1 , <nl> - int input_2 = - 1 , int input_3 = - 1 ) <nl> + virtual int getNumInputs ( ) const CV_OVERRIDE <nl> { <nl> - int nodeInputs [ ] = { input_0 , input_1 , input_2 , input_3 } ; <nl> - int numInputs = 0 ; <nl> - for ( int i = 0 ; i < 4 ; + + i ) <nl> - { <nl> - numInputs + = ( int ) ( nodeInputs [ i ] ! = - 1 ) ; <nl> - } <nl> - return addNodeToMatch ( op , std : : vector < int > ( & nodeInputs [ 0 ] , & nodeInputs [ 0 ] + numInputs ) ) ; <nl> + return node - > input_size ( ) ; <nl> } <nl> <nl> - int addNodeToMatch ( const std : : string & op , const std : : vector < int > & inputs_ ) <nl> + virtual std : : string getInputName ( int idx ) const CV_OVERRIDE <nl> { <nl> - for ( int i = 0 ; i < inputs_ . size ( ) ; + + i ) <nl> - { <nl> - CV_Assert ( inputs_ [ i ] < ( int ) nodes . size ( ) ) ; <nl> - } <nl> - nodes . push_back ( op ) ; <nl> - inputs . push_back ( inputs_ ) ; <nl> - return nodes . size ( ) - 1 ; <nl> + return node - > input ( idx ) ; <nl> } <nl> <nl> - / / Specify resulting node . All the matched nodes in subgraph excluding <nl> - / / input nodes will be fused into this single node . <nl> - / / TODO : Replace inputs to std : : vector < int > in C + + 11 <nl> - void setFusedNode ( const std : : string & op , int input_0 = - 1 , int input_1 = - 1 , <nl> - int input_2 = - 1 , int input_3 = - 1 , int input_4 = - 1 , <nl> - int input_5 = - 1 ) <nl> + virtual std : : string getType ( ) const CV_OVERRIDE <nl> { <nl> - int nodeInputs [ ] = { input_0 , input_1 , input_2 , input_3 , input_4 , input_5 } ; <nl> - int numInputs = 0 ; <nl> - for ( int i = 0 ; i < 6 ; + + i ) <nl> - { <nl> - CV_Assert ( nodeInputs [ i ] < ( int ) nodes . size ( ) ) ; <nl> - numInputs + = ( int ) ( nodeInputs [ i ] ! = - 1 ) ; <nl> - } <nl> - setFusedNode ( op , std : : vector < int > ( & nodeInputs [ 0 ] , & nodeInputs [ 0 ] + numInputs ) ) ; <nl> + return node - > op ( ) ; <nl> } <nl> <nl> - void setFusedNode ( const std : : string & op , const std : : vector < int > & inputs_ ) <nl> + virtual void setType ( const std : : string & type ) CV_OVERRIDE <nl> { <nl> - fusedNodeInputs = inputs_ ; <nl> - fusedNodeOp = op ; <nl> + node - > set_op ( type ) ; <nl> } <nl> <nl> - static int getInputNodeId ( const tensorflow : : GraphDef & net , <nl> - const tensorflow : : NodeDef & node , <nl> - int inpId ) <nl> + virtual void setInputNames ( const std : : vector < std : : string > & inputs ) CV_OVERRIDE <nl> { <nl> - CV_Assert ( inpId < node . input_size ( ) ) ; <nl> - std : : string name = node . input ( inpId ) ; <nl> - / / If operation produces several tensors , they are specified by index <nl> - / / after ' : ' character . In example , " input : 0 " . <nl> - name = name . substr ( 0 , name . rfind ( ' : ' ) ) ; <nl> - const int numNodes = net . node_size ( ) ; <nl> - for ( int i = 0 ; i < numNodes ; + + i ) <nl> - { <nl> - if ( net . node ( i ) . name ( ) = = name ) <nl> - return i ; <nl> - } <nl> - CV_Error ( Error : : StsParseError , " Input node with name " + name + " not found " ) ; <nl> + node - > clear_input ( ) ; <nl> + for ( int i = 0 ; i < inputs . size ( ) ; + + i ) <nl> + node - > add_input ( inputs [ i ] ) ; <nl> } <nl> <nl> - / / Match TensorFlow subgraph starting from < nodeId > with a set of nodes to be fused . <nl> - / / Const nodes are skipped during matching . Returns true if nodes are matched and can be fused . <nl> - virtual bool match ( const tensorflow : : GraphDef & net , int nodeId , <nl> - std : : vector < int > & matchedNodesIds , <nl> - std : : vector < int > & targetNodesIds ) <nl> - { <nl> - matchedNodesIds . clear ( ) ; <nl> - targetNodesIds . clear ( ) ; <nl> - <nl> - std : : queue < int > nodesToMatch ; <nl> - std : : queue < int > targetNodes ; <nl> - nodesToMatch . push ( nodeId ) ; <nl> - targetNodes . push ( nodes . size ( ) - 1 ) ; <nl> - while ( ! nodesToMatch . empty ( ) ) <nl> - { <nl> - int nodeToMatch = nodesToMatch . front ( ) ; <nl> - int targetNodeId = targetNodes . front ( ) ; <nl> - nodesToMatch . pop ( ) ; <nl> - targetNodes . pop ( ) ; <nl> - <nl> - if ( std : : find ( matchedNodesIds . begin ( ) , matchedNodesIds . end ( ) , nodeToMatch ) ! = <nl> - matchedNodesIds . end ( ) ) <nl> - continue ; <nl> - <nl> - const tensorflow : : NodeDef & node = net . node ( nodeToMatch ) ; <nl> - if ( node . op ( ) ! = nodes [ targetNodeId ] ) <nl> - return false ; <nl> - <nl> - std : : vector < int > & inputNodes = inputs [ targetNodeId ] ; <nl> - if ( inputNodes . size ( ) ! = node . input_size ( ) ) <nl> - return false ; <nl> + tensorflow : : NodeDef * node ; <nl> + } ; <nl> <nl> - for ( int j = 0 ; j < inputNodes . size ( ) ; + + j ) <nl> - { <nl> - if ( nodes [ inputNodes [ j ] ] . empty ( ) ) / / Unknown input node type . <nl> - continue ; <nl> - nodeId = getInputNodeId ( net , node , j ) ; <nl> - const tensorflow : : NodeDef & inpNode = net . node ( nodeId ) ; <nl> - if ( inpNode . op ( ) ! = " Const " ) <nl> - { <nl> - nodesToMatch . push ( nodeId ) ; <nl> - targetNodes . push ( inputNodes [ j ] ) ; <nl> - } <nl> - else if ( nodes [ inputNodes [ j ] ] ! = " Const " ) <nl> - return false ; <nl> - } <nl> - matchedNodesIds . push_back ( nodeToMatch ) ; <nl> - targetNodesIds . push_back ( targetNodeId ) ; <nl> - } <nl> + class TFGraphWrapper : public ImportGraphWrapper <nl> + { <nl> + public : <nl> + TFGraphWrapper ( tensorflow : : GraphDef & _net ) : net ( _net ) { } <nl> <nl> - const int n = matchedNodesIds . size ( ) ; <nl> - std : : vector < std : : pair < int , int > > elements ( n ) ; <nl> - for ( int i = 0 ; i < n ; + + i ) <nl> - elements [ i ] = std : : make_pair ( matchedNodesIds [ i ] , targetNodesIds [ i ] ) ; <nl> - std : : sort ( elements . begin ( ) , elements . end ( ) ) ; <nl> - for ( int i = 0 ; i < n ; + + i ) <nl> - { <nl> - matchedNodesIds [ i ] = elements [ i ] . first ; <nl> - targetNodesIds [ i ] = elements [ i ] . second ; <nl> - } <nl> - return true ; <nl> + virtual Ptr < ImportNodeWrapper > getNode ( int idx ) const CV_OVERRIDE <nl> + { <nl> + return makePtr < TFNodeWrapper > ( net . mutable_node ( idx ) ) ; <nl> } <nl> <nl> - / / Fuse matched subgraph . <nl> - void replace ( tensorflow : : GraphDef & net , const std : : vector < int > & matchedNodesIds , <nl> - const std : : vector < int > & targetNodesIds ) <nl> + virtual int getNumNodes ( ) const CV_OVERRIDE <nl> { <nl> - / / Extract names of input nodes . <nl> - std : : vector < std : : string > inputsNames ( fusedNodeInputs . size ( ) ) ; <nl> - for ( int i = 0 ; i < fusedNodeInputs . size ( ) ; + + i ) <nl> - { <nl> - std : : string inpName ; <nl> - / / Find input node name looking at inputs of fused nodes . <nl> - for ( int j = 0 ; j < matchedNodesIds . size ( ) & & inpName . empty ( ) ; + + j ) <nl> - { <nl> - const tensorflow : : NodeDef & node = net . node ( matchedNodesIds [ j ] ) ; <nl> - std : : vector < int > & inpIndices = inputs [ targetNodesIds [ j ] ] ; <nl> - <nl> - CV_Assert ( node . input_size ( ) = = inpIndices . size ( ) ) ; <nl> - for ( int k = 0 ; k < inpIndices . size ( ) ; + + k ) <nl> - { <nl> - if ( inpIndices [ k ] = = fusedNodeInputs [ i ] ) <nl> - { <nl> - inpName = node . input ( k ) ; <nl> - break ; <nl> - } <nl> - } <nl> - } <nl> - CV_Assert ( ! inpName . empty ( ) ) ; <nl> - inputsNames [ i ] = inpName ; <nl> - } <nl> - <nl> - / / Remove matched nodes except the last one . Indices in ascending order are expected . <nl> - tensorflow : : NodeDef * node = net . mutable_node ( matchedNodesIds . back ( ) ) ; <nl> - for ( int i = matchedNodesIds . size ( ) - 2 ; i > = 0 ; - - i ) <nl> - net . mutable_node ( ) - > DeleteSubrange ( matchedNodesIds [ i ] , 1 ) ; <nl> + return net . node_size ( ) ; <nl> + } <nl> <nl> - / / Modify the last node to be a fused one . <nl> - node - > set_op ( fusedNodeOp ) ; <nl> - node - > clear_input ( ) ; <nl> - for ( int i = 0 ; i < inputsNames . size ( ) ; + + i ) <nl> - { <nl> - node - > add_input ( inputsNames [ i ] ) ; <nl> - } <nl> + virtual std : : string getNodeName ( int idx ) const CV_OVERRIDE <nl> + { <nl> + return net . node ( idx ) . name ( ) ; <nl> + } <nl> <nl> - std : : vector < tensorflow : : NodeDef * > inputNodes ( inputsNames . size ( ) ) ; <nl> - for ( int i = 0 ; i < inputsNames . size ( ) ; + + i ) <nl> - { <nl> - inputNodes [ i ] = net . mutable_node ( getInputNodeId ( net , * node , i ) ) ; <nl> - } <nl> - finalize ( net , node , inputNodes ) ; <nl> + virtual void removeNode ( int idx ) CV_OVERRIDE <nl> + { <nl> + net . mutable_node ( ) - > DeleteSubrange ( idx , 1 ) ; <nl> } <nl> <nl> - virtual void finalize ( tensorflow : : GraphDef & , tensorflow : : NodeDef * , <nl> - std : : vector < tensorflow : : NodeDef * > & ) { } <nl> + tensorflow : : GraphDef & net ; <nl> + } ; <nl> <nl> - private : <nl> - std : : vector < std : : string > nodes ; / / Nodes to be matched in the origin graph . <nl> - std : : vector < std : : vector < int > > inputs ; / / Connections of an every node to it ' s inputs . <nl> + class TFSubgraph : public Subgraph <nl> + { <nl> + virtual void finalize ( const Ptr < ImportGraphWrapper > & netWrapper , <nl> + const Ptr < ImportNodeWrapper > & fusedNodeWrapper , <nl> + std : : vector < Ptr < ImportNodeWrapper > > & inputs ) CV_OVERRIDE <nl> + { <nl> + std : : vector < tensorflow : : NodeDef * > inputNodes ( inputs . size ( ) ) ; <nl> + for ( int i = 0 ; i < inputs . size ( ) ; + + i ) <nl> + inputNodes [ i ] = inputs [ i ] . dynamicCast < TFNodeWrapper > ( ) - > node ; <nl> + finalize ( netWrapper . dynamicCast < TFGraphWrapper > ( ) - > net , <nl> + fusedNodeWrapper . dynamicCast < TFNodeWrapper > ( ) - > node , inputNodes ) ; <nl> + } <nl> <nl> - std : : string fusedNodeOp ; / / Operation name of resulting fused node . <nl> - std : : vector < int > fusedNodeInputs ; / / Inputs of fused node . <nl> + virtual void finalize ( tensorflow : : GraphDef & , tensorflow : : NodeDef * fusedNode , <nl> + std : : vector < tensorflow : : NodeDef * > & inputNodes ) { } <nl> } ; <nl> <nl> - class BatchNormSubgraph : public Subgraph <nl> + class BatchNormSubgraph : public TFSubgraph <nl> { <nl> public : <nl> BatchNormSubgraph ( ) <nl> class BatchNormSubgraph : public Subgraph <nl> } <nl> } ; <nl> <nl> - class BatchNormNoGammaSubgraph : public Subgraph <nl> + class BatchNormNoGammaSubgraph : public TFSubgraph <nl> { <nl> public : <nl> BatchNormNoGammaSubgraph ( ) <nl> class ReLU6KerasSubgraph : public Subgraph <nl> setFusedNode ( " Relu6 " , input ) ; <nl> } <nl> <nl> - virtual bool match ( const tensorflow : : GraphDef & net , int nodeId , <nl> + virtual bool match ( const Ptr < ImportGraphWrapper > & net , int nodeId , <nl> std : : vector < int > & matchedNodesIds , <nl> std : : vector < int > & targetNodesIds ) CV_OVERRIDE <nl> { <nl> if ( ! Subgraph : : match ( net , nodeId , matchedNodesIds , targetNodesIds ) ) <nl> return false ; <nl> - Mat maxValue = getTensorContent ( net . node ( matchedNodesIds . front ( ) + 1 ) . attr ( ) . at ( " value " ) . tensor ( ) ) ; <nl> + tensorflow : : NodeDef * node = net - > getNode ( matchedNodesIds . front ( ) + 1 ) . dynamicCast < TFNodeWrapper > ( ) - > node ; <nl> + Mat maxValue = getTensorContent ( node - > attr ( ) . at ( " value " ) . tensor ( ) ) ; <nl> return maxValue . type ( ) = = CV_32FC1 & & maxValue . total ( ) = = 1 & & maxValue . at < float > ( 0 ) = = 6 ; <nl> } <nl> } ; <nl> <nl> / / Keras ' reshape stores output shape in separate Const nodes by one value . <nl> / / Need to merge them into a single Const node . <nl> - class ReshapeKerasSubgraph : public Subgraph <nl> + class ReshapeKerasSubgraph : public TFSubgraph <nl> { <nl> public : <nl> ReshapeKerasSubgraph ( int _numOutDims ) : numOutDims ( _numOutDims ) <nl> class ReshapeKerasSubgraph : public Subgraph <nl> setFusedNode ( " Reshape " , ids ) ; <nl> } <nl> <nl> - virtual bool match ( const tensorflow : : GraphDef & net , int nodeId , <nl> + virtual bool match ( const Ptr < ImportGraphWrapper > & net , int nodeId , <nl> std : : vector < int > & matchedNodesIds , <nl> std : : vector < int > & targetNodesIds ) CV_OVERRIDE <nl> { <nl> - const tensorflow : : NodeDef & node = net . node ( nodeId ) ; <nl> - if ( node . input_size ( ) = = 0 ) <nl> + Ptr < ImportNodeWrapper > node = net - > getNode ( nodeId ) ; <nl> + if ( node - > getNumInputs ( ) = = 0 ) <nl> return false ; <nl> <nl> - inpName = node . input ( 0 ) ; <nl> + inpName = node - > getInputName ( 0 ) ; <nl> return Subgraph : : match ( net , nodeId , matchedNodesIds , targetNodesIds ) ; <nl> } <nl> <nl> class L2NormalizeSubgraph : public Subgraph <nl> } <nl> } ; <nl> <nl> - class DeconvolutionValidKerasSubgraph : public Subgraph <nl> + class DeconvolutionValidKerasSubgraph : public TFSubgraph <nl> { <nl> public : <nl> DeconvolutionValidKerasSubgraph ( ) <nl> class DeconvolutionValidKerasSubgraph : public Subgraph <nl> } <nl> } ; <nl> <nl> - class DeconvolutionSameKerasSubgraph : public Subgraph <nl> + class DeconvolutionSameKerasSubgraph : public TFSubgraph <nl> { <nl> public : <nl> DeconvolutionSameKerasSubgraph ( ) <nl> class ResizeBilinearSubgraph : public Subgraph <nl> } ; <nl> <nl> / / In case of resizing by factor . <nl> - class UpsamplingKerasSubgraph : public Subgraph <nl> + class UpsamplingKerasSubgraph : public TFSubgraph <nl> { <nl> public : <nl> UpsamplingKerasSubgraph ( const std : : string & type ) <nl> class SoftMaxSlimV2Subgraph : public Subgraph <nl> } <nl> } ; <nl> <nl> - class KerasMVNSubgraph : public Subgraph <nl> + class KerasMVNSubgraph : public TFSubgraph <nl> { <nl> public : <nl> KerasMVNSubgraph ( ) <nl> void simplifySubgraphs ( tensorflow : : GraphDef & net ) <nl> subgraphs . push_back ( Ptr < Subgraph > ( new ReshapeAsShapeSubgraph ( ) ) ) ; <nl> subgraphs . push_back ( Ptr < Subgraph > ( new KerasMVNSubgraph ( ) ) ) ; <nl> <nl> - int numNodes = net . node_size ( ) ; <nl> - std : : vector < int > matchedNodesIds , targetNodesIds ; <nl> - for ( int i = 0 ; i < numNodes ; + + i ) <nl> - { <nl> - for ( int j = 0 ; j < subgraphs . size ( ) ; + + j ) <nl> - { <nl> - if ( subgraphs [ j ] - > match ( net , i , matchedNodesIds , targetNodesIds ) ) <nl> - { <nl> - subgraphs [ j ] - > replace ( net , matchedNodesIds , targetNodesIds ) ; <nl> - numNodes - = matchedNodesIds . size ( ) - 1 ; / / # matchedNodes removed and one added . <nl> - break ; <nl> - } <nl> - } <nl> - } <nl> + simplifySubgraphs ( Ptr < ImportGraphWrapper > ( new TFGraphWrapper ( net ) ) , subgraphs ) ; <nl> } <nl> <nl> void RemoveIdentityOps ( tensorflow : : GraphDef & net ) <nl> mmm a / modules / dnn / test / test_onnx_importer . cpp <nl> ppp b / modules / dnn / test / test_onnx_importer . cpp <nl> TEST_P ( Test_ONNX_layers , Softmax ) <nl> { <nl> testONNXModels ( " softmax " ) ; <nl> testONNXModels ( " log_softmax " , npy , 0 , 0 , false , false ) ; <nl> + testONNXModels ( " softmax_unfused " ) ; <nl> } <nl> <nl> TEST_P ( Test_ONNX_layers , Split_EltwiseMax ) <nl> | ONNX graphs simplifier | opencv/opencv | c1c84d2fd1b4be0724546cd4a65e351bc29c1652 | 2020-01-14T09:45:49Z |
mmm a / CMakeLists . txt <nl> ppp b / CMakeLists . txt <nl> target_sources ( leveldb <nl> " $ { PROJECT_SOURCE_DIR } / db / version_set . h " <nl> " $ { PROJECT_SOURCE_DIR } / db / write_batch_internal . h " <nl> " $ { PROJECT_SOURCE_DIR } / db / write_batch . cc " <nl> - " $ { PROJECT_SOURCE_DIR } / port / atomic_pointer . h " <nl> " $ { PROJECT_SOURCE_DIR } / port / port_stdcxx . h " <nl> " $ { PROJECT_SOURCE_DIR } / port / port . h " <nl> " $ { PROJECT_SOURCE_DIR } / port / thread_annotations . h " <nl> | Deleted dangling reference to deleted atomic_pointer . h . | google/leveldb | 9ce30510d482f5b2fa2965201453f0fc914f700c | 2019-03-11T20:41:25Z |
mmm a / lib / ClangImporter / ImportDecl . cpp <nl> ppp b / lib / ClangImporter / ImportDecl . cpp <nl> void ClangImporter : : Implementation : : finishedImportingEntity ( ) { <nl> <nl> void ClangImporter : : Implementation : : finishPendingActions ( ) { <nl> while ( ! RegisteredExternalDecls . empty ( ) ) { <nl> - Decl * D = RegisteredExternalDecls . front ( ) ; <nl> - RegisteredExternalDecls . pop ( ) ; <nl> + Decl * D = RegisteredExternalDecls . pop_back_val ( ) ; <nl> SwiftContext . addedExternalDecl ( D ) ; <nl> } <nl> } <nl> mmm a / lib / ClangImporter / ImporterImpl . h <nl> ppp b / lib / ClangImporter / ImporterImpl . h <nl> <nl> # include " llvm / ADT / APSInt . h " <nl> # include " llvm / ADT / DenseMap . h " <nl> # include " llvm / ADT / IntrusiveRefCntPtr . h " <nl> - # include < queue > <nl> # include < set > <nl> <nl> namespace clang { <nl> class ClangImporter : : Implementation : public LazyMemberLoader { <nl> Optional < bool > checkedFoundationModule ; <nl> <nl> / / / External Decls that we have imported but not passed to the ASTContext yet . <nl> - std : : queue < Decl * > RegisteredExternalDecls ; <nl> + SmallVector < Decl * , 4 > RegisteredExternalDecls ; <nl> <nl> unsigned NumCurrentImportingEntities = 0 ; <nl> <nl> class ClangImporter : : Implementation : public LazyMemberLoader { <nl> <nl> public : <nl> void registerExternalDecl ( Decl * D ) { <nl> - RegisteredExternalDecls . push ( D ) ; <nl> + RegisteredExternalDecls . push_back ( D ) ; <nl> } <nl> <nl> / / / \ brief Retrieve the Clang AST context . <nl> | [ ClangImporter ] When passing imported declarations to ASTContext , mimic the order that they would | apple/swift | ffbd199770472f11b046751576ee253f89e1d232 | 2014-02-09T18:27:39Z |
mmm a / utils / gyb . py <nl> ppp b / utils / gyb . py <nl> def execute_template ( ast , line_directive = ' ' , * * local_bindings ) : <nl> . . . THIS SHOULD NOT APPEAR IN THE OUTPUT <nl> . . . ' ' ' ) <nl> > > > out = execute_template ( ast , line_directive = ' / / # sourceLocation ' , x = 1 ) <nl> + > > > out = out . replace ( os . path . abspath ( os . sep ) + ' dummy . file ' , " DUMMY - FILE " ) <nl> > > > print ( out , end = " " ) <nl> - / / # sourceLocation ( file : " / dummy . file " , line : 1 ) <nl> + / / # sourceLocation ( file : " DUMMY - FILE " , line : 1 ) <nl> Nothing <nl> - / / # sourceLocation ( file : " / dummy . file " , line : 4 ) <nl> + / / # sourceLocation ( file : " DUMMY - FILE " , line : 4 ) <nl> 0 <nl> - / / # sourceLocation ( file : " / dummy . file " , line : 4 ) <nl> + / / # sourceLocation ( file : " DUMMY - FILE " , line : 4 ) <nl> 1 <nl> - / / # sourceLocation ( file : " / dummy . file " , line : 4 ) <nl> + / / # sourceLocation ( file : " DUMMY - FILE " , line : 4 ) <nl> 2 <nl> <nl> > > > ast = parse_template ( ' / dummy . file ' , text = <nl> def execute_template ( ast , line_directive = ' ' , * * local_bindings ) : <nl> . . . $ { a } <nl> . . . ' ' ' ) <nl> > > > out = execute_template ( ast , line_directive = ' / / # sourceLocation ' , x = 1 ) <nl> + > > > out = out . replace ( os . path . abspath ( os . sep ) + ' dummy . file ' , " DUMMY - FILE " ) <nl> > > > print ( out , end = " " ) <nl> - / / # sourceLocation ( file : " / dummy . file " , line : 1 ) <nl> + / / # sourceLocation ( file : " DUMMY - FILE " , line : 1 ) <nl> Nothing <nl> - / / # sourceLocation ( file : " / dummy . file " , line : 6 ) <nl> + / / # sourceLocation ( file : " DUMMY - FILE " , line : 6 ) <nl> [ 0 , 1 , 2 ] <nl> " " " <nl> execution_context = ExecutionContext ( <nl> | Merge pull request from hughbe / gyb - windows - doctests | apple/swift | f751cb0355d6e428b0c547d368089a7dfd4bc331 | 2017-03-21T01:25:19Z |
mmm a / db / repl / rs_initiate . cpp <nl> ppp b / db / repl / rs_initiate . cpp <nl> namespace mongo { <nl> * / <nl> void checkAllMembersUpForConfigChange ( const ReplSetConfig & cfg , bool initial ) { <nl> int me = 0 ; <nl> - for ( vector < ReplSetConfig : : MemberCfg > : : const_iterator i = cfg . members . begin ( ) ; i ! = cfg . members . end ( ) ; i + + ) <nl> - if ( i - > h . isSelf ( ) ) <nl> + for ( vector < ReplSetConfig : : MemberCfg > : : const_iterator i = cfg . members . begin ( ) ; i ! = cfg . members . end ( ) ; i + + ) { <nl> + if ( i - > h . isSelf ( ) ) { <nl> me + + ; <nl> + if ( ! i - > potentiallyHot ( ) ) { <nl> + uasserted ( 13420 , " initiation and reconfiguration of a replica set must be sent to a node that can become primary " ) ; <nl> + } <nl> + } <nl> + } <nl> uassert ( 13278 , " bad config ? " , me < = 1 ) ; <nl> uassert ( 13279 , " can ' t find self in the replset config " , me = = 1 ) ; <nl> <nl> | must initiate at a primary | mongodb/mongo | d28fb4689d45ebb9fd3a6b2217be874a1878e5d6 | 2010-07-29T18:17:54Z |
mmm a / js / common / modules / @ arangodb / general - graph . js <nl> ppp b / js / common / modules / @ arangodb / general - graph . js <nl> var registerCompatibilityFunctions = function ( ) { <nl> } , false ) ; <nl> } ; <nl> <nl> + var fixWeight = function ( options ) { <nl> + if ( ! options . hasOwnProperty ( " weightAttribute " ) & & options . hasOwnProperty ( " weight " ) ) { <nl> + options . weightAttribute = options . weight ; <nl> + } <nl> + } ; <nl> + <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> / / / @ brief transform a string into an array . <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> Graph . prototype . _shortestPath = function ( startVertexExample , endVertexExample , o <nl> query + = " ANY " ; <nl> } <nl> query + = ` SHORTEST_PATH start TO target GRAPH @ graphName ` ; <nl> - if ( options . hasOwnProperty ( " weight " ) & & options . hasOwnProperty ( " defaultWeight " ) ) { <nl> - query + = ` OPTIONS { weight : @ attribute , defaultWeight : @ default } <nl> + fixWeight ( options ) ; <nl> + if ( options . hasOwnProperty ( " weightAttribute " ) & & options . hasOwnProperty ( " defaultWeight " ) ) { <nl> + query + = ` OPTIONS { weightAttribute : @ attribute , defaultWeight : @ default } <nl> RETURN { <nl> v : v , <nl> e : e , <nl> d : IS_NULL ( e ) ? 0 : ( IS_NUMBER ( e [ @ attribute ] ) ? e [ @ attribute ] : @ default ) <nl> } ) ` ; <nl> - bindVars . attribute = options . weight ; <nl> + bindVars . attribute = options . weightAttribute ; <nl> bindVars . default = options . defaultWeight ; <nl> } else { <nl> query + = " RETURN { v : v , e : e , d : IS_NULL ( e ) ? 0 : 1 } ) " ; <nl> Graph . prototype . _distanceTo = function ( startVertexExample , endVertexExample , opt <nl> query + = " ANY " ; <nl> } <nl> query + = ` SHORTEST_PATH start TO target GRAPH @ graphName ` ; <nl> - if ( options . hasOwnProperty ( " weight " ) & & options . hasOwnProperty ( " defaultWeight " ) ) { <nl> - query + = ` OPTIONS { weight : @ attribute , defaultWeight : @ default } <nl> + fixWeight ( options ) ; <nl> + if ( options . hasOwnProperty ( " weightAttribute " ) & & options . hasOwnProperty ( " defaultWeight " ) ) { <nl> + query + = ` OPTIONS { weightAttribute : @ attribute , defaultWeight : @ default } <nl> FILTER e ! = null RETURN IS_NUMBER ( e [ @ attribute ] ) ? e [ @ attribute ] : @ default ) ` ; <nl> - bindVars . attribute = options . weight ; <nl> + bindVars . attribute = options . weightAttribute ; <nl> bindVars . default = options . defaultWeight ; <nl> } else { <nl> query + = " FILTER e ! = null RETURN 1 ) " ; <nl> Graph . prototype . _absoluteEccentricity = function ( vertexExample , options ) { <nl> query + = " ANY " ; <nl> } <nl> query + = " SHORTEST_PATH start TO target GRAPH @ graphName " ; <nl> - if ( options . hasOwnProperty ( " weight " ) & & options . hasOwnProperty ( " defaultWeight " ) ) { <nl> - query + = ` OPTIONS { weight : @ attribute , defaultWeight : @ default } <nl> + fixWeight ( options ) ; <nl> + if ( options . hasOwnProperty ( " weightAttribute " ) & & options . hasOwnProperty ( " defaultWeight " ) ) { <nl> + query + = ` OPTIONS { weightAttribute : @ attribute , defaultWeight : @ default } <nl> FILTER e ! = null RETURN IS_NUMBER ( e [ @ attribute ] ) ? e [ @ attribute ] : @ default ) ` ; <nl> - bindVars . attribute = options . weight ; <nl> + bindVars . attribute = options . weightAttribute ; <nl> bindVars . default = options . defaultWeight ; <nl> } else { <nl> query + = " FILTER e ! = null RETURN 1 ) " ; <nl> Graph . prototype . _farness = Graph . prototype . _absoluteCloseness = function ( vertexE <nl> query + = " ANY " ; <nl> } <nl> query + = " SHORTEST_PATH start TO target GRAPH @ graphName " ; <nl> - if ( options . hasOwnProperty ( " weight " ) & & options . hasOwnProperty ( " defaultWeight " ) ) { <nl> - query + = ` OPTIONS { weight : @ attribute , defaultWeight : @ default } <nl> + fixWeight ( options ) ; <nl> + if ( options . hasOwnProperty ( " weightAttribute " ) & & options . hasOwnProperty ( " defaultWeight " ) ) { <nl> + query + = ` OPTIONS { weightAttribute : @ attribute , defaultWeight : @ default } <nl> FILTER e ! = null RETURN IS_NUMBER ( e [ @ attribute ] ) ? e [ @ attribute ] : @ default ) ` ; <nl> - bindVars . attribute = options . weight ; <nl> + bindVars . attribute = options . weightAttribute ; <nl> bindVars . default = options . defaultWeight ; <nl> } else { <nl> query + = " FILTER e ! = null RETURN 1 ) " ; <nl> Graph . prototype . _absoluteBetweenness = function ( example , options ) { <nl> query + = " ANY " ; <nl> } <nl> query + = " SHORTEST_PATH start TO target GRAPH @ graphName " ; <nl> - if ( options . hasOwnProperty ( " weight " ) & & options . hasOwnProperty ( " defaultWeight " ) ) { <nl> - query + = ` OPTIONS { weight : @ attribute , defaultWeight : @ default } ` ; <nl> - bindVars . attribute = options . weight ; <nl> + fixWeight ( options ) ; <nl> + if ( options . hasOwnProperty ( " weightAttribute " ) & & options . hasOwnProperty ( " defaultWeight " ) ) { <nl> + query + = ` OPTIONS { weightAttribute : @ attribute , defaultWeight : @ default } ` ; <nl> + bindVars . attribute = options . weightAttribute ; <nl> bindVars . default = options . defaultWeight ; <nl> } <nl> query + = ` <nl> Graph . prototype . _radius = function ( options ) { <nl> query + = " ANY " ; <nl> } <nl> query + = " SHORTEST_PATH s TO t GRAPH @ graphName " ; <nl> - if ( options . hasOwnProperty ( " weight " ) & & options . hasOwnProperty ( " defaultWeight " ) ) { <nl> - query + = ` OPTIONS { weight : @ attribute , defaultWeight : @ default } <nl> + fixWeight ( options ) ; <nl> + if ( options . hasOwnProperty ( " weightAttribute " ) & & options . hasOwnProperty ( " defaultWeight " ) ) { <nl> + query + = ` OPTIONS { weightAttribute : @ attribute , defaultWeight : @ default } <nl> FILTER e ! = null RETURN IS_NUMBER ( e [ @ attribute ] ) ? e [ @ attribute ] : @ default ) ` ; <nl> - bindVars . attribute = options . weight ; <nl> + bindVars . attribute = options . weightAttribute ; <nl> bindVars . default = options . defaultWeight ; <nl> } else { <nl> query + = " FILTER e ! = null RETURN 1 ) " ; <nl> Graph . prototype . _diameter = function ( options ) { <nl> " graphName " : this . __name <nl> } ; <nl> query + = " SHORTEST_PATH s TO t GRAPH @ graphName " ; <nl> - if ( options . hasOwnProperty ( " weight " ) & & options . hasOwnProperty ( " defaultWeight " ) ) { <nl> - query + = ` OPTIONS { weight : @ attribute , defaultWeight : @ default } <nl> + fixWeight ( options ) ; <nl> + if ( options . hasOwnProperty ( " weightAttribute " ) & & options . hasOwnProperty ( " defaultWeight " ) ) { <nl> + query + = ` OPTIONS { weightAttribute : @ attribute , defaultWeight : @ default } <nl> FILTER e ! = null RETURN IS_NUMBER ( e [ @ attribute ] ) ? e [ @ attribute ] : @ default ) ) ` ; <nl> - bindVars . attribute = options . weight ; <nl> + bindVars . attribute = options . weightAttribute ; <nl> bindVars . default = options . defaultWeight ; <nl> } else { <nl> query + = " RETURN 1 ) ) - 1 " ; <nl> | Graph : fixed weight / weightAttribute name | arangodb/arangodb | 925f96d5c2754ff90ffaff05af25623692ba9a53 | 2016-06-16T07:21:25Z |
mmm a / src / builtins . h <nl> ppp b / src / builtins . h <nl> enum BuiltinExtraArguments { <nl> V ( SAR_STRONG , 1 ) \ <nl> V ( SHR , 1 ) \ <nl> V ( SHR_STRONG , 1 ) \ <nl> - V ( IN , 1 ) \ <nl> V ( INSTANCE_OF , 1 ) \ <nl> V ( CALL_NON_FUNCTION , 0 ) \ <nl> V ( CALL_NON_FUNCTION_AS_CONSTRUCTOR , 0 ) \ <nl> mmm a / src / compiler / js - generic - lowering . cc <nl> ppp b / src / compiler / js - generic - lowering . cc <nl> void JSGenericLowering : : LowerJSDeleteProperty ( Node * node ) { <nl> <nl> <nl> void JSGenericLowering : : LowerJSHasProperty ( Node * node ) { <nl> - ReplaceWithBuiltinCall ( node , Builtins : : IN , 2 ) ; <nl> + ReplaceWithRuntimeCall ( node , Runtime : : kHasProperty ) ; <nl> } <nl> <nl> <nl> mmm a / src / compiler / js - intrinsic - lowering . cc <nl> ppp b / src / compiler / js - intrinsic - lowering . cc <nl> Reduction JSIntrinsicLowering : : Reduce ( Node * node ) { <nl> return ReduceIsInstanceType ( node , JS_TYPED_ARRAY_TYPE ) ; <nl> case Runtime : : kInlineIsFunction : <nl> return ReduceIsInstanceType ( node , JS_FUNCTION_TYPE ) ; <nl> - case Runtime : : kInlineIsNonNegativeSmi : <nl> - return ReduceIsNonNegativeSmi ( node ) ; <nl> case Runtime : : kInlineIsRegExp : <nl> return ReduceIsInstanceType ( node , JS_REGEXP_TYPE ) ; <nl> case Runtime : : kInlineIsSmi : <nl> Reduction JSIntrinsicLowering : : ReduceIsInstanceType ( <nl> } <nl> <nl> <nl> - Reduction JSIntrinsicLowering : : ReduceIsNonNegativeSmi ( Node * node ) { <nl> - return Change ( node , simplified ( ) - > ObjectIsNonNegativeSmi ( ) ) ; <nl> - } <nl> - <nl> - <nl> Reduction JSIntrinsicLowering : : ReduceIsSmi ( Node * node ) { <nl> return Change ( node , simplified ( ) - > ObjectIsSmi ( ) ) ; <nl> } <nl> mmm a / src / compiler / js - intrinsic - lowering . h <nl> ppp b / src / compiler / js - intrinsic - lowering . h <nl> class JSIntrinsicLowering final : public AdvancedReducer { <nl> Reduction ReduceIncrementStatsCounter ( Node * node ) ; <nl> Reduction ReduceIsMinusZero ( Node * node ) ; <nl> Reduction ReduceIsInstanceType ( Node * node , InstanceType instance_type ) ; <nl> - Reduction ReduceIsNonNegativeSmi ( Node * node ) ; <nl> Reduction ReduceIsSmi ( Node * node ) ; <nl> Reduction ReduceJSValueGetValue ( Node * node ) ; <nl> Reduction ReduceMapGetInstanceType ( Node * node ) ; <nl> mmm a / src / compiler / typer . cc <nl> ppp b / src / compiler / typer . cc <nl> Bounds Typer : : Visitor : : TypeJSCallFunction ( Node * node ) { <nl> Bounds Typer : : Visitor : : TypeJSCallRuntime ( Node * node ) { <nl> switch ( CallRuntimeParametersOf ( node - > op ( ) ) . id ( ) ) { <nl> case Runtime : : kInlineIsSmi : <nl> - case Runtime : : kInlineIsNonNegativeSmi : <nl> case Runtime : : kInlineIsArray : <nl> case Runtime : : kInlineIsDate : <nl> case Runtime : : kInlineIsTypedArray : <nl> mmm a / src / full - codegen / arm / full - codegen - arm . cc <nl> ppp b / src / full - codegen / arm / full - codegen - arm . cc <nl> void FullCodeGenerator : : EmitIsSmi ( CallRuntime * expr ) { <nl> } <nl> <nl> <nl> - void FullCodeGenerator : : EmitIsNonNegativeSmi ( CallRuntime * expr ) { <nl> - ZoneList < Expression * > * args = expr - > arguments ( ) ; <nl> - DCHECK ( args - > length ( ) = = 1 ) ; <nl> - <nl> - VisitForAccumulatorValue ( args - > at ( 0 ) ) ; <nl> - <nl> - Label materialize_true , materialize_false ; <nl> - Label * if_true = NULL ; <nl> - Label * if_false = NULL ; <nl> - Label * fall_through = NULL ; <nl> - context ( ) - > PrepareTest ( & materialize_true , & materialize_false , <nl> - & if_true , & if_false , & fall_through ) ; <nl> - <nl> - PrepareForBailoutBeforeSplit ( expr , true , if_true , if_false ) ; <nl> - __ NonNegativeSmiTst ( r0 ) ; <nl> - Split ( eq , if_true , if_false , fall_through ) ; <nl> - <nl> - context ( ) - > Plug ( if_true , if_false ) ; <nl> - } <nl> - <nl> - <nl> void FullCodeGenerator : : EmitIsObject ( CallRuntime * expr ) { <nl> ZoneList < Expression * > * args = expr - > arguments ( ) ; <nl> DCHECK ( args - > length ( ) = = 1 ) ; <nl> void FullCodeGenerator : : VisitCompareOperation ( CompareOperation * expr ) { <nl> switch ( op ) { <nl> case Token : : IN : <nl> VisitForStackValue ( expr - > right ( ) ) ; <nl> - __ InvokeBuiltin ( Builtins : : IN , CALL_FUNCTION ) ; <nl> + __ CallRuntime ( Runtime : : kHasProperty , 2 ) ; <nl> PrepareForBailoutBeforeSplit ( expr , false , NULL , NULL ) ; <nl> __ LoadRoot ( ip , Heap : : kTrueValueRootIndex ) ; <nl> __ cmp ( r0 , ip ) ; <nl> mmm a / src / full - codegen / arm64 / full - codegen - arm64 . cc <nl> ppp b / src / full - codegen / arm64 / full - codegen - arm64 . cc <nl> void FullCodeGenerator : : EmitIsSmi ( CallRuntime * expr ) { <nl> } <nl> <nl> <nl> - void FullCodeGenerator : : EmitIsNonNegativeSmi ( CallRuntime * expr ) { <nl> - ZoneList < Expression * > * args = expr - > arguments ( ) ; <nl> - DCHECK ( args - > length ( ) = = 1 ) ; <nl> - <nl> - VisitForAccumulatorValue ( args - > at ( 0 ) ) ; <nl> - <nl> - Label materialize_true , materialize_false ; <nl> - Label * if_true = NULL ; <nl> - Label * if_false = NULL ; <nl> - Label * fall_through = NULL ; <nl> - context ( ) - > PrepareTest ( & materialize_true , & materialize_false , <nl> - & if_true , & if_false , & fall_through ) ; <nl> - <nl> - uint64_t sign_mask = V8_UINT64_C ( 1 ) < < ( kSmiShift + kSmiValueSize - 1 ) ; <nl> - <nl> - PrepareForBailoutBeforeSplit ( expr , true , if_true , if_false ) ; <nl> - __ TestAndSplit ( x0 , kSmiTagMask | sign_mask , if_true , if_false , fall_through ) ; <nl> - <nl> - context ( ) - > Plug ( if_true , if_false ) ; <nl> - } <nl> - <nl> - <nl> void FullCodeGenerator : : EmitIsObject ( CallRuntime * expr ) { <nl> ZoneList < Expression * > * args = expr - > arguments ( ) ; <nl> DCHECK ( args - > length ( ) = = 1 ) ; <nl> void FullCodeGenerator : : VisitCompareOperation ( CompareOperation * expr ) { <nl> switch ( op ) { <nl> case Token : : IN : <nl> VisitForStackValue ( expr - > right ( ) ) ; <nl> - __ InvokeBuiltin ( Builtins : : IN , CALL_FUNCTION ) ; <nl> + __ CallRuntime ( Runtime : : kHasProperty , 2 ) ; <nl> PrepareForBailoutBeforeSplit ( expr , false , NULL , NULL ) ; <nl> __ CompareRoot ( x0 , Heap : : kTrueValueRootIndex ) ; <nl> Split ( eq , if_true , if_false , fall_through ) ; <nl> mmm a / src / full - codegen / full - codegen . h <nl> ppp b / src / full - codegen / full - codegen . h <nl> class FullCodeGenerator : public AstVisitor { <nl> <nl> # define FOR_EACH_FULL_CODE_INTRINSIC ( F ) \ <nl> F ( IsSmi ) \ <nl> - F ( IsNonNegativeSmi ) \ <nl> F ( IsArray ) \ <nl> F ( IsTypedArray ) \ <nl> F ( IsRegExp ) \ <nl> mmm a / src / full - codegen / ia32 / full - codegen - ia32 . cc <nl> ppp b / src / full - codegen / ia32 / full - codegen - ia32 . cc <nl> void FullCodeGenerator : : EmitIsSmi ( CallRuntime * expr ) { <nl> } <nl> <nl> <nl> - void FullCodeGenerator : : EmitIsNonNegativeSmi ( CallRuntime * expr ) { <nl> - ZoneList < Expression * > * args = expr - > arguments ( ) ; <nl> - DCHECK ( args - > length ( ) = = 1 ) ; <nl> - <nl> - VisitForAccumulatorValue ( args - > at ( 0 ) ) ; <nl> - <nl> - Label materialize_true , materialize_false ; <nl> - Label * if_true = NULL ; <nl> - Label * if_false = NULL ; <nl> - Label * fall_through = NULL ; <nl> - context ( ) - > PrepareTest ( & materialize_true , & materialize_false , <nl> - & if_true , & if_false , & fall_through ) ; <nl> - <nl> - PrepareForBailoutBeforeSplit ( expr , true , if_true , if_false ) ; <nl> - __ test ( eax , Immediate ( kSmiTagMask | 0x80000000 ) ) ; <nl> - Split ( zero , if_true , if_false , fall_through ) ; <nl> - <nl> - context ( ) - > Plug ( if_true , if_false ) ; <nl> - } <nl> - <nl> - <nl> void FullCodeGenerator : : EmitIsObject ( CallRuntime * expr ) { <nl> ZoneList < Expression * > * args = expr - > arguments ( ) ; <nl> DCHECK ( args - > length ( ) = = 1 ) ; <nl> void FullCodeGenerator : : VisitCompareOperation ( CompareOperation * expr ) { <nl> switch ( op ) { <nl> case Token : : IN : <nl> VisitForStackValue ( expr - > right ( ) ) ; <nl> - __ InvokeBuiltin ( Builtins : : IN , CALL_FUNCTION ) ; <nl> + __ CallRuntime ( Runtime : : kHasProperty , 2 ) ; <nl> PrepareForBailoutBeforeSplit ( expr , false , NULL , NULL ) ; <nl> __ cmp ( eax , isolate ( ) - > factory ( ) - > true_value ( ) ) ; <nl> Split ( equal , if_true , if_false , fall_through ) ; <nl> mmm a / src / full - codegen / mips / full - codegen - mips . cc <nl> ppp b / src / full - codegen / mips / full - codegen - mips . cc <nl> void FullCodeGenerator : : EmitIsSmi ( CallRuntime * expr ) { <nl> } <nl> <nl> <nl> - void FullCodeGenerator : : EmitIsNonNegativeSmi ( CallRuntime * expr ) { <nl> - ZoneList < Expression * > * args = expr - > arguments ( ) ; <nl> - DCHECK ( args - > length ( ) = = 1 ) ; <nl> - <nl> - VisitForAccumulatorValue ( args - > at ( 0 ) ) ; <nl> - <nl> - Label materialize_true , materialize_false ; <nl> - Label * if_true = NULL ; <nl> - Label * if_false = NULL ; <nl> - Label * fall_through = NULL ; <nl> - context ( ) - > PrepareTest ( & materialize_true , & materialize_false , <nl> - & if_true , & if_false , & fall_through ) ; <nl> - <nl> - PrepareForBailoutBeforeSplit ( expr , true , if_true , if_false ) ; <nl> - __ NonNegativeSmiTst ( v0 , at ) ; <nl> - Split ( eq , at , Operand ( zero_reg ) , if_true , if_false , fall_through ) ; <nl> - <nl> - context ( ) - > Plug ( if_true , if_false ) ; <nl> - } <nl> - <nl> - <nl> void FullCodeGenerator : : EmitIsObject ( CallRuntime * expr ) { <nl> ZoneList < Expression * > * args = expr - > arguments ( ) ; <nl> DCHECK ( args - > length ( ) = = 1 ) ; <nl> void FullCodeGenerator : : VisitCompareOperation ( CompareOperation * expr ) { <nl> switch ( op ) { <nl> case Token : : IN : <nl> VisitForStackValue ( expr - > right ( ) ) ; <nl> - __ InvokeBuiltin ( Builtins : : IN , CALL_FUNCTION ) ; <nl> + __ CallRuntime ( Runtime : : kHasProperty , 2 ) ; <nl> PrepareForBailoutBeforeSplit ( expr , false , NULL , NULL ) ; <nl> __ LoadRoot ( t0 , Heap : : kTrueValueRootIndex ) ; <nl> Split ( eq , v0 , Operand ( t0 ) , if_true , if_false , fall_through ) ; <nl> mmm a / src / full - codegen / mips64 / full - codegen - mips64 . cc <nl> ppp b / src / full - codegen / mips64 / full - codegen - mips64 . cc <nl> void FullCodeGenerator : : EmitIsSmi ( CallRuntime * expr ) { <nl> } <nl> <nl> <nl> - void FullCodeGenerator : : EmitIsNonNegativeSmi ( CallRuntime * expr ) { <nl> - ZoneList < Expression * > * args = expr - > arguments ( ) ; <nl> - DCHECK ( args - > length ( ) = = 1 ) ; <nl> - <nl> - VisitForAccumulatorValue ( args - > at ( 0 ) ) ; <nl> - <nl> - Label materialize_true , materialize_false ; <nl> - Label * if_true = NULL ; <nl> - Label * if_false = NULL ; <nl> - Label * fall_through = NULL ; <nl> - context ( ) - > PrepareTest ( & materialize_true , & materialize_false , <nl> - & if_true , & if_false , & fall_through ) ; <nl> - <nl> - PrepareForBailoutBeforeSplit ( expr , true , if_true , if_false ) ; <nl> - __ NonNegativeSmiTst ( v0 , at ) ; <nl> - Split ( eq , at , Operand ( zero_reg ) , if_true , if_false , fall_through ) ; <nl> - <nl> - context ( ) - > Plug ( if_true , if_false ) ; <nl> - } <nl> - <nl> - <nl> void FullCodeGenerator : : EmitIsObject ( CallRuntime * expr ) { <nl> ZoneList < Expression * > * args = expr - > arguments ( ) ; <nl> DCHECK ( args - > length ( ) = = 1 ) ; <nl> void FullCodeGenerator : : VisitCompareOperation ( CompareOperation * expr ) { <nl> switch ( op ) { <nl> case Token : : IN : <nl> VisitForStackValue ( expr - > right ( ) ) ; <nl> - __ InvokeBuiltin ( Builtins : : IN , CALL_FUNCTION ) ; <nl> + __ CallRuntime ( Runtime : : kHasProperty , 2 ) ; <nl> PrepareForBailoutBeforeSplit ( expr , false , NULL , NULL ) ; <nl> __ LoadRoot ( a4 , Heap : : kTrueValueRootIndex ) ; <nl> Split ( eq , v0 , Operand ( a4 ) , if_true , if_false , fall_through ) ; <nl> mmm a / src / full - codegen / ppc / full - codegen - ppc . cc <nl> ppp b / src / full - codegen / ppc / full - codegen - ppc . cc <nl> void FullCodeGenerator : : EmitIsSmi ( CallRuntime * expr ) { <nl> } <nl> <nl> <nl> - void FullCodeGenerator : : EmitIsNonNegativeSmi ( CallRuntime * expr ) { <nl> - ZoneList < Expression * > * args = expr - > arguments ( ) ; <nl> - DCHECK ( args - > length ( ) = = 1 ) ; <nl> - <nl> - VisitForAccumulatorValue ( args - > at ( 0 ) ) ; <nl> - <nl> - Label materialize_true , materialize_false ; <nl> - Label * if_true = NULL ; <nl> - Label * if_false = NULL ; <nl> - Label * fall_through = NULL ; <nl> - context ( ) - > PrepareTest ( & materialize_true , & materialize_false , & if_true , <nl> - & if_false , & fall_through ) ; <nl> - <nl> - PrepareForBailoutBeforeSplit ( expr , true , if_true , if_false ) ; <nl> - __ TestIfPositiveSmi ( r3 , r0 ) ; <nl> - Split ( eq , if_true , if_false , fall_through , cr0 ) ; <nl> - <nl> - context ( ) - > Plug ( if_true , if_false ) ; <nl> - } <nl> - <nl> - <nl> void FullCodeGenerator : : EmitIsObject ( CallRuntime * expr ) { <nl> ZoneList < Expression * > * args = expr - > arguments ( ) ; <nl> DCHECK ( args - > length ( ) = = 1 ) ; <nl> void FullCodeGenerator : : VisitCompareOperation ( CompareOperation * expr ) { <nl> switch ( op ) { <nl> case Token : : IN : <nl> VisitForStackValue ( expr - > right ( ) ) ; <nl> - __ InvokeBuiltin ( Builtins : : IN , CALL_FUNCTION ) ; <nl> + __ CallRuntime ( Runtime : : kHasProperty , 2 ) ; <nl> PrepareForBailoutBeforeSplit ( expr , false , NULL , NULL ) ; <nl> __ LoadRoot ( ip , Heap : : kTrueValueRootIndex ) ; <nl> __ cmp ( r3 , ip ) ; <nl> mmm a / src / full - codegen / x64 / full - codegen - x64 . cc <nl> ppp b / src / full - codegen / x64 / full - codegen - x64 . cc <nl> void FullCodeGenerator : : EmitIsSmi ( CallRuntime * expr ) { <nl> } <nl> <nl> <nl> - void FullCodeGenerator : : EmitIsNonNegativeSmi ( CallRuntime * expr ) { <nl> - ZoneList < Expression * > * args = expr - > arguments ( ) ; <nl> - DCHECK ( args - > length ( ) = = 1 ) ; <nl> - <nl> - VisitForAccumulatorValue ( args - > at ( 0 ) ) ; <nl> - <nl> - Label materialize_true , materialize_false ; <nl> - Label * if_true = NULL ; <nl> - Label * if_false = NULL ; <nl> - Label * fall_through = NULL ; <nl> - context ( ) - > PrepareTest ( & materialize_true , & materialize_false , <nl> - & if_true , & if_false , & fall_through ) ; <nl> - <nl> - PrepareForBailoutBeforeSplit ( expr , true , if_true , if_false ) ; <nl> - Condition non_negative_smi = masm ( ) - > CheckNonNegativeSmi ( rax ) ; <nl> - Split ( non_negative_smi , if_true , if_false , fall_through ) ; <nl> - <nl> - context ( ) - > Plug ( if_true , if_false ) ; <nl> - } <nl> - <nl> - <nl> void FullCodeGenerator : : EmitIsObject ( CallRuntime * expr ) { <nl> ZoneList < Expression * > * args = expr - > arguments ( ) ; <nl> DCHECK ( args - > length ( ) = = 1 ) ; <nl> void FullCodeGenerator : : VisitCompareOperation ( CompareOperation * expr ) { <nl> switch ( op ) { <nl> case Token : : IN : <nl> VisitForStackValue ( expr - > right ( ) ) ; <nl> - __ InvokeBuiltin ( Builtins : : IN , CALL_FUNCTION ) ; <nl> + __ CallRuntime ( Runtime : : kHasProperty , 2 ) ; <nl> PrepareForBailoutBeforeSplit ( expr , false , NULL , NULL ) ; <nl> __ CompareRoot ( rax , Heap : : kTrueValueRootIndex ) ; <nl> Split ( equal , if_true , if_false , fall_through ) ; <nl> mmm a / src / full - codegen / x87 / full - codegen - x87 . cc <nl> ppp b / src / full - codegen / x87 / full - codegen - x87 . cc <nl> void FullCodeGenerator : : EmitIsSmi ( CallRuntime * expr ) { <nl> } <nl> <nl> <nl> - void FullCodeGenerator : : EmitIsNonNegativeSmi ( CallRuntime * expr ) { <nl> - ZoneList < Expression * > * args = expr - > arguments ( ) ; <nl> - DCHECK ( args - > length ( ) = = 1 ) ; <nl> - <nl> - VisitForAccumulatorValue ( args - > at ( 0 ) ) ; <nl> - <nl> - Label materialize_true , materialize_false ; <nl> - Label * if_true = NULL ; <nl> - Label * if_false = NULL ; <nl> - Label * fall_through = NULL ; <nl> - context ( ) - > PrepareTest ( & materialize_true , & materialize_false , <nl> - & if_true , & if_false , & fall_through ) ; <nl> - <nl> - PrepareForBailoutBeforeSplit ( expr , true , if_true , if_false ) ; <nl> - __ test ( eax , Immediate ( kSmiTagMask | 0x80000000 ) ) ; <nl> - Split ( zero , if_true , if_false , fall_through ) ; <nl> - <nl> - context ( ) - > Plug ( if_true , if_false ) ; <nl> - } <nl> - <nl> - <nl> void FullCodeGenerator : : EmitIsObject ( CallRuntime * expr ) { <nl> ZoneList < Expression * > * args = expr - > arguments ( ) ; <nl> DCHECK ( args - > length ( ) = = 1 ) ; <nl> void FullCodeGenerator : : VisitCompareOperation ( CompareOperation * expr ) { <nl> switch ( op ) { <nl> case Token : : IN : <nl> VisitForStackValue ( expr - > right ( ) ) ; <nl> - __ InvokeBuiltin ( Builtins : : IN , CALL_FUNCTION ) ; <nl> + __ CallRuntime ( Runtime : : kHasProperty , 2 ) ; <nl> PrepareForBailoutBeforeSplit ( expr , false , NULL , NULL ) ; <nl> __ cmp ( eax , isolate ( ) - > factory ( ) - > true_value ( ) ) ; <nl> Split ( equal , if_true , if_false , fall_through ) ; <nl> mmm a / src / hydrogen . cc <nl> ppp b / src / hydrogen . cc <nl> void HOptimizedGraphBuilder : : VisitCompareOperation ( CompareOperation * expr ) { <nl> return ast_context ( ) - > ReturnInstruction ( result , expr - > id ( ) ) ; <nl> <nl> } else if ( op = = Token : : IN ) { <nl> - HValue * function = AddLoadJSBuiltin ( Builtins : : IN ) ; <nl> Add < HPushArguments > ( left , right ) ; <nl> - / / TODO ( olivf ) InvokeFunction produces a check for the parameter count , <nl> - / / even though we are certain to pass the correct number of arguments here . <nl> - HInstruction * result = New < HInvokeFunction > ( function , 2 ) ; <nl> + HInstruction * result = <nl> + New < HCallRuntime > ( isolate ( ) - > factory ( ) - > empty_string ( ) , <nl> + Runtime : : FunctionForId ( Runtime : : kHasProperty ) , 2 ) ; <nl> return ast_context ( ) - > ReturnInstruction ( result , expr - > id ( ) ) ; <nl> } <nl> <nl> mmm a / src / runtime . js <nl> ppp b / src / runtime . js <nl> var SAR ; <nl> var SAR_STRONG ; <nl> var SHR ; <nl> var SHR_STRONG ; <nl> - var IN ; <nl> var INSTANCE_OF ; <nl> var CALL_NON_FUNCTION ; <nl> var CALL_NON_FUNCTION_AS_CONSTRUCTOR ; <nl> SHR_STRONG = function SHR_STRONG ( y ) { <nl> mmmmmmmmmmmmmmmmmmmmmmmmmmm - - <nl> * / <nl> <nl> - / / ECMA - 262 , section 11 . 8 . 7 , page 54 . <nl> - IN = function IN ( x ) { <nl> - if ( ! IS_SPEC_OBJECT ( x ) ) { <nl> - throw % MakeTypeError ( kInvalidInOperatorUse , this , x ) ; <nl> - } <nl> - if ( % _IsNonNegativeSmi ( this ) ) { <nl> - if ( IS_ARRAY ( x ) & & % _HasFastPackedElements ( x ) ) { <nl> - return this < x . length ; <nl> - } <nl> - return % HasElement ( x , this ) ; <nl> - } <nl> - return % HasProperty ( x , % $ toName ( this ) ) ; <nl> - } <nl> - <nl> - <nl> / / ECMA - 262 , section 11 . 8 . 6 , page 54 . To make the implementation more <nl> / / efficient , the return value should be zero if the ' this ' is an <nl> / / instance of F , and non - zero if not . This makes it possible to avoid <nl> mmm a / src / runtime / runtime - numbers . cc <nl> ppp b / src / runtime / runtime - numbers . cc <nl> RUNTIME_FUNCTION ( Runtime_IsSmi ) { <nl> } <nl> <nl> <nl> - RUNTIME_FUNCTION ( Runtime_IsNonNegativeSmi ) { <nl> - SealHandleScope shs ( isolate ) ; <nl> - DCHECK ( args . length ( ) = = 1 ) ; <nl> - CONVERT_ARG_CHECKED ( Object , obj , 0 ) ; <nl> - return isolate - > heap ( ) - > ToBoolean ( obj - > IsSmi ( ) & & <nl> - Smi : : cast ( obj ) - > value ( ) > = 0 ) ; <nl> - } <nl> - <nl> - <nl> RUNTIME_FUNCTION ( Runtime_GetRootNaN ) { <nl> SealHandleScope shs ( isolate ) ; <nl> DCHECK ( args . length ( ) = = 0 ) ; <nl> mmm a / src / runtime / runtime - object . cc <nl> ppp b / src / runtime / runtime - object . cc <nl> RUNTIME_FUNCTION ( Runtime_HasOwnProperty ) { <nl> } <nl> <nl> <nl> + / / ES6 section 12 . 9 . 3 , operator in . <nl> RUNTIME_FUNCTION ( Runtime_HasProperty ) { <nl> HandleScope scope ( isolate ) ; <nl> - DCHECK ( args . length ( ) = = 2 ) ; <nl> - CONVERT_ARG_HANDLE_CHECKED ( JSReceiver , receiver , 0 ) ; <nl> - CONVERT_ARG_HANDLE_CHECKED ( Name , key , 1 ) ; <nl> + DCHECK_EQ ( 2 , args . length ( ) ) ; <nl> + CONVERT_ARG_HANDLE_CHECKED ( Object , key , 0 ) ; <nl> + CONVERT_ARG_HANDLE_CHECKED ( Object , object , 1 ) ; <nl> <nl> - Maybe < bool > maybe = JSReceiver : : HasProperty ( receiver , key ) ; <nl> - if ( ! maybe . IsJust ( ) ) return isolate - > heap ( ) - > exception ( ) ; <nl> - return isolate - > heap ( ) - > ToBoolean ( maybe . FromJust ( ) ) ; <nl> - } <nl> + / / Check that { object } is actually a receiver . <nl> + if ( ! object - > IsJSReceiver ( ) ) { <nl> + THROW_NEW_ERROR_RETURN_FAILURE ( <nl> + isolate , <nl> + NewTypeError ( MessageTemplate : : kInvalidInOperatorUse , key , object ) ) ; <nl> + } <nl> + Handle < JSReceiver > receiver = Handle < JSReceiver > : : cast ( object ) ; <nl> <nl> + / / Check for fast element case . <nl> + uint32_t index = 0 ; <nl> + if ( key - > ToArrayIndex ( & index ) ) { <nl> + Maybe < bool > maybe = JSReceiver : : HasElement ( receiver , index ) ; <nl> + if ( ! maybe . IsJust ( ) ) return isolate - > heap ( ) - > exception ( ) ; <nl> + return isolate - > heap ( ) - > ToBoolean ( maybe . FromJust ( ) ) ; <nl> + } <nl> <nl> - RUNTIME_FUNCTION ( Runtime_HasElement ) { <nl> - HandleScope scope ( isolate ) ; <nl> - DCHECK ( args . length ( ) = = 2 ) ; <nl> - CONVERT_ARG_HANDLE_CHECKED ( JSReceiver , receiver , 0 ) ; <nl> - CONVERT_SMI_ARG_CHECKED ( index , 1 ) ; <nl> + / / Convert { key } to a Name first . <nl> + Handle < Name > name ; <nl> + ASSIGN_RETURN_FAILURE_ON_EXCEPTION ( isolate , name , <nl> + Runtime : : ToName ( isolate , key ) ) ; <nl> <nl> - Maybe < bool > maybe = JSReceiver : : HasElement ( receiver , index ) ; <nl> + / / Lookup property by { name } on { receiver } . <nl> + Maybe < bool > maybe = JSReceiver : : HasProperty ( receiver , name ) ; <nl> if ( ! maybe . IsJust ( ) ) return isolate - > heap ( ) - > exception ( ) ; <nl> return isolate - > heap ( ) - > ToBoolean ( maybe . FromJust ( ) ) ; <nl> } <nl> mmm a / src / runtime / runtime . h <nl> ppp b / src / runtime / runtime . h <nl> namespace internal { <nl> F ( SmiLexicographicCompare , 2 , 1 ) \ <nl> F ( MaxSmi , 0 , 1 ) \ <nl> F ( IsSmi , 1 , 1 ) \ <nl> - F ( IsNonNegativeSmi , 1 , 1 ) \ <nl> F ( GetRootNaN , 0 , 1 ) <nl> <nl> <nl> namespace internal { <nl> F ( DeleteProperty_Strict , 2 , 1 ) \ <nl> F ( HasOwnProperty , 2 , 1 ) \ <nl> F ( HasProperty , 2 , 1 ) \ <nl> - F ( HasElement , 2 , 1 ) \ <nl> F ( IsPropertyEnumerable , 2 , 1 ) \ <nl> F ( GetPropertyNamesFast , 1 , 1 ) \ <nl> F ( GetOwnPropertyNames , 2 , 1 ) \ <nl> mmm a / test / cctest / compiler / test - run - inlining . cc <nl> ppp b / test / cctest / compiler / test - run - inlining . cc <nl> TEST ( InlineIntrinsicIsSmi ) { <nl> } <nl> <nl> <nl> - TEST ( InlineIntrinsicIsNonNegativeSmi ) { <nl> - FunctionTester T ( <nl> - " ( function ( ) { " <nl> - " var x = 42 ; " <nl> - " function bar ( s , t ) { return % _IsNonNegativeSmi ( x ) ; } ; " <nl> - " return bar ; " <nl> - " } ) ( ) ; " , <nl> - kInlineFlags ) ; <nl> - <nl> - InstallAssertInlineCountHelper ( CcTest : : isolate ( ) ) ; <nl> - T . CheckCall ( T . true_value ( ) , T . Val ( 12 ) , T . Val ( 4 ) ) ; <nl> - } <nl> - <nl> - <nl> TEST ( InlineIntrinsicIsArray ) { <nl> FunctionTester T ( <nl> " ( function ( ) { " <nl> mmm a / test / cctest / compiler / test - run - intrinsics . cc <nl> ppp b / test / cctest / compiler / test - run - intrinsics . cc <nl> TEST ( IsMinusZero ) { <nl> } <nl> <nl> <nl> - TEST ( IsNonNegativeSmi ) { <nl> - FunctionTester T ( " ( function ( a ) { return % _IsNonNegativeSmi ( a ) ; } ) " , flags ) ; <nl> - <nl> - T . CheckTrue ( T . Val ( 1 ) ) ; <nl> - T . CheckFalse ( T . Val ( 1 . 1 ) ) ; <nl> - T . CheckFalse ( T . Val ( - 0 . 0 ) ) ; <nl> - T . CheckFalse ( T . Val ( - 2 ) ) ; <nl> - T . CheckFalse ( T . Val ( - 2 . 3 ) ) ; <nl> - T . CheckFalse ( T . undefined ( ) ) ; <nl> - } <nl> - <nl> - <nl> TEST ( IsObject ) { <nl> FunctionTester T ( " ( function ( a ) { return % _IsObject ( a ) ; } ) " , flags ) ; <nl> <nl> mmm a / test / cctest / test - api . cc <nl> ppp b / test / cctest / test - api . cc <nl> TEST ( AccessCheckThrows ) { <nl> CheckCorrectThrow ( " % DeleteProperty_Sloppy ( other , ' 1 ' ) " ) ; <nl> CheckCorrectThrow ( " % DeleteProperty_Strict ( other , ' 1 ' ) " ) ; <nl> CheckCorrectThrow ( " % HasOwnProperty ( other , ' x ' ) " ) ; <nl> - CheckCorrectThrow ( " % HasProperty ( other , ' x ' ) " ) ; <nl> - CheckCorrectThrow ( " % HasElement ( other , 1 ) " ) ; <nl> + CheckCorrectThrow ( " % HasProperty ( ' x ' , other ) " ) ; <nl> CheckCorrectThrow ( " % IsPropertyEnumerable ( other , ' x ' ) " ) ; <nl> / / PROPERTY_ATTRIBUTES_NONE = 0 <nl> CheckCorrectThrow ( " % DefineAccessorPropertyUnchecked ( " <nl> mmm a / test / unittests / compiler / js - intrinsic - lowering - unittest . cc <nl> ppp b / test / unittests / compiler / js - intrinsic - lowering - unittest . cc <nl> TEST_F ( JSIntrinsicLoweringTest , InlineIsSmi ) { <nl> } <nl> <nl> <nl> - / / mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - <nl> - / / % _IsNonNegativeSmi <nl> - <nl> - <nl> - TEST_F ( JSIntrinsicLoweringTest , InlineIsNonNegativeSmi ) { <nl> - Node * const input = Parameter ( 0 ) ; <nl> - Node * const context = Parameter ( 1 ) ; <nl> - Node * const effect = graph ( ) - > start ( ) ; <nl> - Node * const control = graph ( ) - > start ( ) ; <nl> - Reduction const r = Reduce ( graph ( ) - > NewNode ( <nl> - javascript ( ) - > CallRuntime ( Runtime : : kInlineIsNonNegativeSmi , 1 ) , input , <nl> - context , effect , control ) ) ; <nl> - ASSERT_TRUE ( r . Changed ( ) ) ; <nl> - EXPECT_THAT ( r . replacement ( ) , IsObjectIsNonNegativeSmi ( input ) ) ; <nl> - } <nl> - <nl> - <nl> / / mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - <nl> / / % _IsArray <nl> <nl> | [ runtime ] Remove useless IN builtin . | v8/v8 | 72d60a1e80e81e2e68ca402665e2acbc46c5e471 | 2015-08-13T12:39:21Z |
mmm a / js / apps / system / _admin / aardvark / APP / frontend / js / modules / org / arangodb / aql / explainer . js <nl> ppp b / js / apps / system / _admin / aardvark / APP / frontend / js / modules / org / arangodb / aql / explainer . js <nl> function processQuery ( query , explain ) { <nl> return keyword ( " REPLACE " ) + " " + variableName ( node . inDocVariable ) + " " + keyword ( " IN " ) + " " + collection ( node . collection ) ; <nl> case " UpsertNode " : <nl> modificationFlags = node . modificationFlags ; <nl> - return keyword ( " UPSERT " ) + " " + variableName ( node . inDocVariable ) + " " + keyword ( " INSERT " ) + " " + variableName ( node . insertVariable ) + " " + keyword ( node . isReplace ? " REPLACE " : " UPDATE " ) + variableName ( node . updateVariable ) + " " + keyword ( " IN " ) + " " + collection ( node . collection ) ; <nl> + return keyword ( " UPSERT " ) + " " + variableName ( node . inDocVariable ) + " " + keyword ( " INSERT " ) + " " + variableName ( node . insertVariable ) + " " + keyword ( node . isReplace ? " REPLACE " : " UPDATE " ) + " " + variableName ( node . updateVariable ) + " " + keyword ( " IN " ) + " " + collection ( node . collection ) ; <nl> case " RemoveNode " : <nl> modificationFlags = node . modificationFlags ; <nl> return keyword ( " REMOVE " ) + " " + variableName ( node . inVariable ) + " " + keyword ( " IN " ) + " " + collection ( node . collection ) ; <nl> mmm a / js / common / modules / org / arangodb / aql / explainer . js <nl> ppp b / js / common / modules / org / arangodb / aql / explainer . js <nl> function processQuery ( query , explain ) { <nl> return keyword ( " REPLACE " ) + " " + variableName ( node . inDocVariable ) + " " + keyword ( " IN " ) + " " + collection ( node . collection ) ; <nl> case " UpsertNode " : <nl> modificationFlags = node . modificationFlags ; <nl> - return keyword ( " UPSERT " ) + " " + variableName ( node . inDocVariable ) + " " + keyword ( " INSERT " ) + " " + variableName ( node . insertVariable ) + " " + keyword ( node . isReplace ? " REPLACE " : " UPDATE " ) + variableName ( node . updateVariable ) + " " + keyword ( " IN " ) + " " + collection ( node . collection ) ; <nl> + return keyword ( " UPSERT " ) + " " + variableName ( node . inDocVariable ) + " " + keyword ( " INSERT " ) + " " + variableName ( node . insertVariable ) + " " + keyword ( node . isReplace ? " REPLACE " : " UPDATE " ) + " " + variableName ( node . updateVariable ) + " " + keyword ( " IN " ) + " " + collection ( node . collection ) ; <nl> case " RemoveNode " : <nl> modificationFlags = node . modificationFlags ; <nl> return keyword ( " REMOVE " ) + " " + variableName ( node . inVariable ) + " " + keyword ( " IN " ) + " " + collection ( node . collection ) ; <nl> | added missing whitespace | arangodb/arangodb | 4b7d04de4704e3c9617578d775607dffafa90fa8 | 2015-08-19T23:46:21Z |
mmm a / src / mongo / util / options_parser / options_parser . cpp <nl> ppp b / src / mongo / util / options_parser / options_parser . cpp <nl> Status addYAMLNodesToEnvironment ( const YAML : : Node & root , <nl> if ( ! ret . isOK ( ) ) { <nl> return ret ; <nl> } <nl> - invariant ( option ) ; <nl> + <nl> + std : : string canonicalName ; <nl> + if ( option ) { <nl> + canonicalName = option - > _dottedName ; <nl> + } else { <nl> + / / Possible if using non - strict parsing . <nl> + canonicalName = dottedName ; <nl> + } <nl> <nl> Value dummyVal ; <nl> - if ( environment - > get ( option - > _dottedName , & dummyVal ) . isOK ( ) ) { <nl> + if ( environment - > get ( canonicalName , & dummyVal ) . isOK ( ) ) { <nl> StringBuilder sb ; <nl> - sb < < " Error parsing YAML config : duplicate key : " < < dottedName <nl> - < < " ( canonical key : " < < option - > _dottedName < < " ) " ; <nl> - return Status ( ErrorCodes : : BadValue , sb . str ( ) ) ; <nl> + sb < < " Error parsing YAML config : duplicate key : " < < dottedName ; <nl> + if ( dottedName ! = canonicalName ) { <nl> + sb < < " ( canonical key : " < < canonicalName < < " ) " ; <nl> + } <nl> + return { ErrorCodes : : BadValue , sb . str ( ) } ; <nl> } <nl> <nl> / / Only add the value if it is not empty . YAMLNodeToValue will set the <nl> / / optionValue to an empty Value if we should not set it in the Environment . <nl> if ( ! optionValue . isEmpty ( ) ) { <nl> - ret = environment - > set ( option - > _dottedName , optionValue ) ; <nl> + ret = environment - > set ( canonicalName , optionValue ) ; <nl> if ( ! ret . isOK ( ) ) { <nl> return ret ; <nl> } <nl> <nl> - ret = canonicalizeOption ( * option , environment ) ; <nl> - if ( ! ret . isOK ( ) ) { <nl> - return ret ; <nl> + if ( option ) { <nl> + ret = canonicalizeOption ( * option , environment ) ; <nl> + if ( ! ret . isOK ( ) ) { <nl> + return ret ; <nl> + } <nl> } <nl> } <nl> } <nl> | SERVER - 41347 Fix handling of non - strict arguments during option canonicalization | mongodb/mongo | e11c234484fe58681597ba74fd23fa76de734250 | 2019-05-29T16:45:16Z |
mmm a / utils / buildbot - script . sh <nl> ppp b / utils / buildbot - script . sh <nl> KNOWN_SETTINGS = ( <nl> skip - build - ios - device " " " set to skip building Swift stdlibs for iOS devices ( i . e . build simulators only ) " <nl> skip - build - ios - simulator " " " set to skip building Swift stdlibs for iOS simulators ( i . e . build devices only ) " <nl> skip - build - sourcekit " " " set to skip building SourceKit " <nl> - skip - build - xctest " " " set to skip building the XCTest overlay module " <nl> skip - test - swift " " " set to skip testing Swift " <nl> skip - test - ios " " " set to skip testing Swift stdlibs for iOS " <nl> skip - test - ios - device " " " set to skip testing Swift stdlibs for iOS devices ( i . e . test simulators only ) " <nl> LLVM_TARGETS_TO_BUILD = " X86 ; ARM " <nl> # macosx - x86_64 stdlib is part of the swift product itself <nl> if [ [ ! " $ SKIP_IOS " ] ] ; then <nl> IOS_SIMULATOR_PRODUCTS = ( swift_stdlib_ios_simulator_x86_64 swift_stdlib_ios_simulator_i386 ) <nl> - # FIXME swift_stdlib_ios_arm64 disabled pending rdar : / / 16641385 <nl> - IOS_DEVICE_PRODUCTS = ( swift_stdlib_ios_armv7 ) <nl> + IOS_DEVICE_PRODUCTS = ( swift_stdlib_ios_arm64 swift_stdlib_ios_armv7 ) <nl> LLVM_TARGETS_TO_BUILD = " X86 ; ARM ; ARM64 " <nl> if [ [ ! " $ SKIP_BUILD_IOS " ] ] ; then <nl> if [ [ ! " $ SKIP_BUILD_IOS_SIMULATOR " ] ] ; then <nl> fi <nl> # CMake options used for all targets , including LLVM / Clang <nl> COMMON_CMAKE_OPTIONS = ( <nl> " $ { CMAKE_COMPILER_OPTIONS [ @ ] } " <nl> + - DCMAKE_BUILD_TYPE = " $ BUILD_TYPE " <nl> - DLLVM_ENABLE_ASSERTIONS = " ON " <nl> ) <nl> <nl> if [ \ ! " $ SKIP_BUILD_LLVM " ] ; then <nl> # LLDB <nl> ( cd " $ { LLVM_BUILD_DIR } " & & <nl> " $ CMAKE " - G " $ { CMAKE_GENERATOR } " " $ { COMMON_CMAKE_OPTIONS [ @ ] } " \ <nl> - - DCMAKE_BUILD_TYPE = " RelWithDebInfo " \ <nl> - DCMAKE_CXX_FLAGS = " - stdlib = libc + + " \ <nl> - DCMAKE_EXE_LINKER_FLAGS = " - stdlib = libc + + " \ <nl> - DCMAKE_SHARED_LINKER_FLAGS = " - stdlib = libc + + " \ <nl> SWIFT_CMAKE_OPTIONS = ( <nl> - DCMAKE_CXX_FLAGS = " - isysroot $ { SYSROOT } " <nl> - DSWIFT_RUN_LONG_TESTS = " ON " <nl> - DLLVM_CONFIG = " $ { LLVM_BUILD_DIR } / bin / llvm - config " <nl> - - DCMAKE_BUILD_TYPE = " RelWithDebInfo " <nl> ) <nl> <nl> - if [ [ " $ SKIP_BUILD_XCTEST " ] ] ; then <nl> - SWIFT_CMAKE_OPTIONS = ( " $ { SWIFT_CMAKE_OPTIONS [ @ ] } " - DSWIFT_BUILD_XCTEST = OFF ) <nl> - fi <nl> - <nl> # set_ios_options options_var platform deployment_target internal_suffix arch <nl> function set_ios_options { <nl> local platform = $ 2 <nl> function set_ios_options { <nl> local sdkroot = $ ( xcrun - sdk $ { platform } $ { internal_suffix } - show - sdk - path ) <nl> <nl> local opts = ( <nl> - - DCMAKE_BUILD_TYPE = " Debug " <nl> - DCMAKE_TOOLCHAIN_FILE = " $ { SWIFT_SOURCE_DIR } / cmake / $ { platform } . cmake " <nl> - DCMAKE_SYSTEM_PROCESSOR = $ { arch } <nl> - DCMAKE_OSX_ARCHITECTURES = $ { arch } <nl> for product in " $ { SWIFT_TEST_PRODUCTS [ @ ] } " ; do <nl> " $ { build_cmd [ @ ] } " $ { BUILD_TARGET_FLAG } SwiftUnitTests <nl> fi <nl> <nl> - if [ [ $ { build_cmd [ @ ] } = = * ninja * ] ] ; then <nl> + if [ [ " $ { CMAKE_GENERATOR } " = = Ninja ] ] ; then <nl> # Ninja buffers command output to avoid scrambling the output <nl> # of parallel jobs , which is awesome . . . except that it <nl> # interferes with the progress meter when testing . Instead of <nl> # executing ninja directly , have it dump the commands it would <nl> # run , strip Ninja ' s progress prefix with sed , and tell the <nl> # shell to execute that . <nl> - echo " $ ( " $ { build_cmd [ @ ] } " - n - v check - $ { product } | sed - e ' s / [ ^ ] ] * ] / / ' ) " <nl> sh - c " $ ( " $ { build_cmd [ @ ] } " - n - v check - $ { product } | sed - e ' s / [ ^ ] ] * ] / / ' ) " <nl> else <nl> " $ { build_cmd [ @ ] } " $ { BUILD_TARGET_FLAG } check - $ { product } <nl> | Revert r16429 . | apple/swift | 23a3b577728e6f7a138012a878928bc5ab83d375 | 2014-04-17T01:18:33Z |
new file mode 100644 <nl> index 000000000000 . . 9696eac36b54 <nl> mmm / dev / null <nl> ppp b / spec / fixtures / pages / close . html <nl> <nl> + < html > <nl> + < link rel = " icon " type = " image / png " href = " / favicon . png " / > <nl> + < link rel = " icon " type = " image / png " href = " http : / / test . com / favicon . png " / > <nl> + < body > <nl> + < script type = " text / javascript " charset = " utf - 8 " > <nl> + window . close ( ) ; <nl> + < / script > <nl> + < / body > <nl> + < / html > <nl> mmm a / spec / webview - spec . coffee <nl> ppp b / spec / webview - spec . coffee <nl> describe ' < webview > tag ' , - > <nl> webview . src = " file : / / # { fixtures } / pages / a . html " <nl> document . body . appendChild webview <nl> <nl> + describe ' close event ' , - > <nl> + it ' should fire when interior page calls window . close ' , - > <nl> + webview . addEventListener ' close ' , - > <nl> + done ( ) <nl> + <nl> + webview . src = " file : / / # { fixtures } / pages / close . html " <nl> + document . body . appendChild webview <nl> + <nl> describe ' < webview > . reload ( ) ' , - > <nl> it ' should emit beforeunload handler ' , ( done ) - > <nl> listener = ( e ) - > <nl> describe ' < webview > tag ' , - > <nl> webview . removeEventListener ' ipc - message ' , listener <nl> done ( ) <nl> webview . addEventListener ' ipc - message ' , listener <nl> - webview . setAttribute ' nodeintegration ' , ' on ' <nl> + webview . setAttribute ' nodeintegration ' , ' on ' <nl> webview . src = " file : / / # { fixtures } / pages / history . html " <nl> document . body . appendChild webview <nl> | Add a test to verify the webview close event | electron/electron | ea63a04388ab4bc80564430f6ffea5519cfa6051 | 2015-07-08T21:34:44Z |
mmm a / build / deps / github_hashes / facebook / folly - rev . txt <nl> ppp b / build / deps / github_hashes / facebook / folly - rev . txt <nl> @ @ - 1 + 1 @ @ <nl> - Subproject commit 87229af6537ef8bb3fcf7d52c926a98a07ccd6a1 <nl> + Subproject commit 1755d86db51db2612e0eb835894a5a690788ccfa <nl> mmm a / build / deps / github_hashes / facebook / wangle - rev . txt <nl> ppp b / build / deps / github_hashes / facebook / wangle - rev . txt <nl> @ @ - 1 + 1 @ @ <nl> - Subproject commit a29d23f8b1a5992b3d8b66818147d6b45b095155 <nl> + Subproject commit 04d70d859eea6a57f341d799be494e70bd803438 <nl> | Updating submodules | facebook/watchman | 8ce1a168d6d09050ac2641742870d87541fadc6f | 2018-10-12T08:24:40Z |
mmm a / Marlin / src / MarlinCore . cpp <nl> ppp b / Marlin / src / MarlinCore . cpp <nl> bool wait_for_heatup = true ; <nl> bool wait_for_user ; / / = false ; <nl> # endif <nl> <nl> - # if HAS_AUTO_REPORTING | | ENABLED ( HOST_KEEPALIVE_FEATURE ) <nl> - bool suspend_auto_report ; / / = false <nl> - # endif <nl> - <nl> / / Inactivity shutdown <nl> millis_t max_inactive_time , / / = 0 <nl> stepper_inactive_time = ( DEFAULT_STEPPER_DEACTIVE_TIME ) * 1000UL ; <nl> void startOrResumeJob ( ) { <nl> * - Pulse FET_SAFETY_PIN if it exists <nl> * / <nl> <nl> - void manage_inactivity ( const bool ignore_stepper_queue / * = false * / ) { <nl> + inline void manage_inactivity ( const bool ignore_stepper_queue = false ) { <nl> <nl> # if HAS_FILAMENT_SENSOR <nl> runout . run ( ) ; <nl> void idle ( <nl> # endif <nl> <nl> # if HAS_AUTO_REPORTING <nl> - if ( ! suspend_auto_report ) { <nl> + if ( ! gcode . autoreport_paused ) { <nl> # if ENABLED ( AUTO_REPORT_TEMPERATURES ) <nl> thermalManager . auto_report_temperatures ( ) ; <nl> # endif <nl> mmm a / Marlin / src / MarlinCore . h <nl> ppp b / Marlin / src / MarlinCore . h <nl> void idle ( <nl> # endif <nl> ) ; <nl> <nl> - void manage_inactivity ( const bool ignore_stepper_queue = false ) ; <nl> - <nl> # if ENABLED ( EXPERIMENTAL_I2CBUS ) <nl> # include " feature / twibus . h " <nl> extern TWIBus i2c ; <nl> extern bool wait_for_heatup ; <nl> extern bool wait_for_user ; <nl> # endif <nl> <nl> - # if HAS_AUTO_REPORTING | | ENABLED ( HOST_KEEPALIVE_FEATURE ) <nl> - extern bool suspend_auto_report ; <nl> - # endif <nl> - <nl> / / Inactivity shutdown timer <nl> extern millis_t max_inactive_time , stepper_inactive_time ; <nl> <nl> mmm a / Marlin / src / core / utility . cpp <nl> ppp b / Marlin / src / core / utility . cpp <nl> void safe_delay ( millis_t ms ) { <nl> thermalManager . manage_heater ( ) ; / / This keeps us safe if too many small safe_delay ( ) calls are made <nl> } <nl> <nl> + / / A delay to provide brittle hosts time to receive bytes <nl> + # if ENABLED ( SERIAL_OVERRUN_PROTECTION ) <nl> + <nl> + # include " . . / gcode / gcode . h " / / for set_autoreport_paused <nl> + <nl> + void serial_delay ( const millis_t ms ) { <nl> + const bool was = gcode . set_autoreport_paused ( true ) ; <nl> + safe_delay ( ms ) ; <nl> + gcode . set_autoreport_paused ( was ) ; <nl> + } <nl> + # endif <nl> + <nl> # if ENABLED ( DEBUG_LEVELING_FEATURE ) <nl> <nl> # include " . . / module / probe . h " <nl> mmm a / Marlin / src / core / utility . h <nl> ppp b / Marlin / src / core / utility . h <nl> <nl> / / Delay that ensures heaters and watchdog are kept alive <nl> void safe_delay ( millis_t ms ) ; <nl> <nl> - / / A delay to provide brittle hosts time to receive bytes <nl> - inline void serial_delay ( const millis_t ms ) { <nl> - # if ENABLED ( SERIAL_OVERRUN_PROTECTION ) <nl> - safe_delay ( ms ) ; <nl> - # else <nl> - UNUSED ( ms ) ; <nl> - # endif <nl> - } <nl> + # if ENABLED ( SERIAL_OVERRUN_PROTECTION ) <nl> + void serial_delay ( const millis_t ms ) ; <nl> + # else <nl> + inline void serial_delay ( const millis_t ) { } <nl> + # endif <nl> <nl> # if GRID_MAX_POINTS_X & & GRID_MAX_POINTS_Y <nl> <nl> mmm a / Marlin / src / feature / bedlevel / ubl / ubl . cpp <nl> ppp b / Marlin / src / feature / bedlevel / ubl / ubl . cpp <nl> <nl> <nl> unified_bed_leveling ubl ; <nl> <nl> + # include " . . / . . / . . / MarlinCore . h " <nl> + <nl> # include " . . / . . / . . / module / configuration_store . h " <nl> # include " . . / . . / . . / module / planner . h " <nl> # include " . . / . . / . . / module / motion . h " <nl> <nl> * 4 : Compact Human - Readable <nl> * / <nl> void unified_bed_leveling : : display_map ( const int map_type ) { <nl> - # if HAS_AUTO_REPORTING | | ENABLED ( HOST_KEEPALIVE_FEATURE ) <nl> - suspend_auto_report = true ; <nl> - # endif <nl> + const bool was = gcode . set_autoreport_paused ( true ) ; <nl> <nl> constexpr uint8_t eachsp = 1 + 6 + 1 , / / [ - 3 . 567 ] <nl> twixt = eachsp * ( GRID_MAX_POINTS_X ) - 9 * 2 ; / / Leading 4sp , Coordinates 9sp each <nl> <nl> SERIAL_EOL ( ) ; <nl> } <nl> <nl> - # if HAS_AUTO_REPORTING | | ENABLED ( HOST_KEEPALIVE_FEATURE ) <nl> - suspend_auto_report = false ; <nl> - # endif <nl> + set_gcode . set_autoreport_paused ( was ) ; <nl> } <nl> <nl> bool unified_bed_leveling : : sanity_check ( ) { <nl> mmm a / Marlin / src / gcode / gcode . cpp <nl> ppp b / Marlin / src / gcode / gcode . cpp <nl> GcodeSuite gcode ; <nl> # include " . . / feature / cancel_object . h " <nl> # endif <nl> <nl> - # include " . . / MarlinCore . h " / / for idle ( ) and suspend_auto_report <nl> + # include " . . / MarlinCore . h " / / for idle ( ) <nl> <nl> millis_t GcodeSuite : : previous_move_ms ; <nl> <nl> uint8_t GcodeSuite : : axis_relative = ( <nl> | ( ar_init . e ? _BV ( REL_E ) : 0 ) <nl> ) ; <nl> <nl> + # if HAS_AUTO_REPORTING | | ENABLED ( HOST_KEEPALIVE_FEATURE ) <nl> + bool GcodeSuite : : autoreport_paused ; / / = false <nl> + # endif <nl> + <nl> # if ENABLED ( HOST_KEEPALIVE_FEATURE ) <nl> GcodeSuite : : MarlinBusyState GcodeSuite : : busy_state = NOT_BUSY ; <nl> uint8_t GcodeSuite : : host_keepalive_interval = DEFAULT_KEEPALIVE_INTERVAL ; <nl> void GcodeSuite : : process_subcommands_now ( char * gcode ) { <nl> void GcodeSuite : : host_keepalive ( ) { <nl> const millis_t ms = millis ( ) ; <nl> static millis_t next_busy_signal_ms = 0 ; <nl> - if ( ! suspend_auto_report & & host_keepalive_interval & & busy_state ! = NOT_BUSY ) { <nl> + if ( ! autoreport_paused & & host_keepalive_interval & & busy_state ! = NOT_BUSY ) { <nl> if ( PENDING ( ms , next_busy_signal_ms ) ) return ; <nl> switch ( busy_state ) { <nl> case IN_HANDLER : <nl> mmm a / Marlin / src / gcode / gcode . h <nl> ppp b / Marlin / src / gcode / gcode . h <nl> class GcodeSuite { <nl> process_subcommands_now_P ( G28_STR ) ; <nl> } <nl> <nl> + # if HAS_AUTO_REPORTING | | ENABLED ( HOST_KEEPALIVE_FEATURE ) <nl> + static bool autoreport_paused ; <nl> + static inline bool set_autoreport_paused ( const bool p ) { <nl> + const bool was = autoreport_paused ; <nl> + autoreport_paused = p ; <nl> + return was ; <nl> + } <nl> + # else <nl> + static constexpr bool autoreport_paused = false ; <nl> + static inline bool set_autoreport_paused ( const bool ) { return false ; } <nl> + # endif <nl> + <nl> # if ENABLED ( HOST_KEEPALIVE_FEATURE ) <nl> / * * <nl> * States for managing Marlin and host communication <nl> | Inline manage_inactivity , tweak autoreport_paused | MarlinFirmware/Marlin | a1f026f57ad7dbf4c50b135cf1dc0f9eacc06025 | 2020-02-21T02:36:50Z |
mmm a / src / parser . js <nl> ppp b / src / parser . js <nl> function parseParamTokens ( params ) { <nl> <nl> function parseGetElementPtr ( segment ) { <nl> segment = segment . slice ( 0 ) ; <nl> - if ( segment [ 1 ] . text = = = ' noalias ' ) { <nl> + while ( [ ' noalias ' , ' sret ' , ' nocapture ' , ' nest ' , ' zeroext ' , ' signext ' ] . indexOf ( segment [ 1 ] . text ) ! = - 1 ) { <nl> segment . splice ( 1 , 1 ) ; <nl> } <nl> + assertTrue ( [ ' inreg ' , ' byval ' ] . indexOf ( segment [ 1 ] . text ) = = - 1 ) ; <nl> var ret = { <nl> intertype : ' getelementptr ' , <nl> type : segment [ 0 ] , <nl> function intertyper ( data ) { <nl> selectItem : function ( item ) { return item . tokens & & item . tokens . length > = 3 & & item . indent = = = 0 & & item . tokens [ 1 ] . text = = ' = ' } , <nl> processItem : function ( item ) { <nl> if ( item . tokens [ 2 ] . text = = ' type ' ) { <nl> - dprint ( ' linenum : ' + item . lineNum + ' : ' + dump ( item ) ) ; <nl> + / / dprint ( ' type / const linenum : ' + item . lineNum + ' : ' + dump ( item ) ) ; <nl> var fields = [ ] ; <nl> if ( item . tokens [ 3 ] . text ! = ' opaque ' ) { <nl> if ( item . tokens [ 3 ] . type = = ' < ' ) / / type < { i8 } > XXX - check spec <nl> mmm a / src / utility . js <nl> ppp b / src / utility . js <nl> <nl> <nl> function dump ( item ) { <nl> try { <nl> - return JSON . stringify ( item ) ; <nl> + return JSON . stringify ( item ) . substr ( 0 , 200 ) ; <nl> } catch ( e ) { <nl> var ret = [ ] ; <nl> for ( var i in item ) { <nl> function dump ( item ) { <nl> ret . push ( i + ' : [ ? ] ' ) ; <nl> } <nl> } <nl> - return ret . join ( ' , ' ) ; <nl> + return ret . join ( ' , ' ) . substr ( 0 , 200 ) ; <nl> } <nl> } <nl> <nl> mmm a / tests / runner . py <nl> ppp b / tests / runner . py <nl> def do_test ( self , src , expected_output , args = [ ] , output_nicerizer = None , no_pytho <nl> if DEBUG : print output <nl> cwd = os . getcwd ( ) <nl> os . chdir ( path_from_root ( [ ' src ' ] ) ) <nl> - output = timeout_run ( Popen ( [ PARSER_ENGINE ] + PARSER_OPTS + [ JS_COMPILER ] , stdin = open ( filename + ' . o . llvm ' , ' r ' ) , stdout = open ( filename + ' . o . js ' , ' w ' ) , stderr = STDOUT ) , 20 , ' Parser ' ) <nl> + output = timeout_run ( Popen ( [ PARSER_ENGINE ] + PARSER_OPTS + [ JS_COMPILER ] , stdin = open ( filename + ' . o . llvm ' , ' r ' ) , stdout = open ( filename + ' . o . js ' , ' w ' ) , stderr = STDOUT ) , 200 , ' Parser ' ) <nl> os . chdir ( cwd ) <nl> # return <nl> if DEBUG : print output <nl> | fix for ' sret ' & other GEP features | emscripten-core/emscripten | c203d5c8e027a2cf0bea23540fc0edf93e4e37eb | 2010-08-30T00:36:10Z |
mmm a / test / extensions / transport_sockets / tls / BUILD <nl> ppp b / test / extensions / transport_sockets / tls / BUILD <nl> envoy_cc_test ( <nl> " / / source / common / network : utility_lib " , <nl> " / / source / common / stats : isolated_store_lib " , <nl> " / / source / common / stats : stats_lib " , <nl> - " / / source / extensions / filters / listener / tls_inspector : tls_inspector_lib " , <nl> " / / source / extensions / transport_sockets / tls : context_config_lib " , <nl> " / / source / extensions / transport_sockets / tls : context_lib " , <nl> " / / source / extensions / transport_sockets / tls : ssl_socket_lib " , <nl> mmm a / test / extensions / transport_sockets / tls / integration / BUILD <nl> ppp b / test / extensions / transport_sockets / tls / integration / BUILD <nl> envoy_cc_test ( <nl> " / / source / common / event : dispatcher_lib " , <nl> " / / source / common / network : connection_lib " , <nl> " / / source / common / network : utility_lib " , <nl> - " / / source / extensions / filters / listener / tls_inspector : config " , <nl> " / / source / extensions / transport_sockets / tls : config " , <nl> " / / source / extensions / transport_sockets / tls : context_config_lib " , <nl> " / / source / extensions / transport_sockets / tls : context_lib " , <nl> mmm a / test / extensions / transport_sockets / tls / ssl_socket_test . cc <nl> ppp b / test / extensions / transport_sockets / tls / ssl_socket_test . cc <nl> <nl> # include " common / network / transport_socket_options_impl . h " <nl> # include " common / network / utility . h " <nl> <nl> - # include " extensions / filters / listener / tls_inspector / tls_inspector . h " <nl> # include " extensions / transport_sockets / tls / context_config_impl . h " <nl> # include " extensions / transport_sockets / tls / context_impl . h " <nl> # include " extensions / transport_sockets / tls / private_key / private_key_manager_impl . h " <nl> mmm a / test / integration / BUILD <nl> ppp b / test / integration / BUILD <nl> envoy_cc_test ( <nl> " / / source / common / event : dispatcher_lib " , <nl> " / / source / common / network : connection_lib " , <nl> " / / source / common / network : utility_lib " , <nl> - " / / source / extensions / filters / listener / tls_inspector : config " , <nl> " / / source / extensions / transport_sockets / tls : config " , <nl> " / / source / extensions / transport_sockets / tls : context_config_lib " , <nl> " / / source / extensions / transport_sockets / tls : context_lib " , <nl> envoy_cc_test ( <nl> " / / source / common / event : dispatcher_lib " , <nl> " / / source / common / network : connection_lib " , <nl> " / / source / common / network : utility_lib " , <nl> - " / / source / extensions / filters / listener / tls_inspector : config " , <nl> " / / source / extensions / transport_sockets / tls : config " , <nl> " / / source / extensions / transport_sockets / tls : context_config_lib " , <nl> " / / source / extensions / transport_sockets / tls : context_lib " , <nl> envoy_cc_test ( <nl> deps = [ <nl> " : http_integration_lib " , <nl> " / / source / common / http : header_map_lib " , <nl> - " / / source / extensions / filters / listener / tls_inspector : config " , <nl> " / / source / extensions / transport_sockets / tls : config " , <nl> " / / test / mocks / server : server_mocks " , <nl> " / / test / test_common : utility_lib " , <nl> mmm a / test / server / BUILD <nl> ppp b / test / server / BUILD <nl> envoy_cc_test ( <nl> " / / source / common / network : socket_option_lib " , <nl> " / / source / common / network : utility_lib " , <nl> " / / source / common / protobuf " , <nl> - " / / source / extensions / filters / listener / original_dst : config " , <nl> - " / / source / extensions / filters / listener / tls_inspector : config " , <nl> " / / source / extensions / filters / network / http_connection_manager : config " , <nl> " / / source / extensions / transport_sockets / raw_buffer : config " , <nl> " / / source / extensions / transport_sockets / tls : config " , <nl> mmm a / test / server / filter_chain_manager_impl_test . cc <nl> ppp b / test / server / filter_chain_manager_impl_test . cc <nl> <nl> # include " server / filter_chain_manager_impl . h " <nl> # include " server / listener_manager_impl . h " <nl> <nl> - # include " extensions / filters / listener / original_dst / original_dst . h " <nl> # include " extensions / transport_sockets / tls / ssl_socket . h " <nl> <nl> # include " test / mocks / network / mocks . h " <nl> TEST_F ( FilterChainManagerImplTest , AddSingleFilterChain ) { <nl> EXPECT_NE ( filter_chain , nullptr ) ; <nl> } <nl> } / / namespace Server <nl> - } / / namespace Envoy <nl> \ No newline at end of file <nl> + } / / namespace Envoy <nl> | Cleanup : Remove unused dependencies ( ) | envoyproxy/envoy | e98ceeb358cd65d8fc45fc61858cb70e2a094b9b | 2019-12-13T19:18:05Z |
mmm a / validation - test / Python / bug - reducer . test - sh <nl> ppp b / validation - test / Python / bug - reducer . test - sh <nl> <nl> / / REQUIRES : OS = macosx <nl> / / REQUIRES : asserts <nl> / / REQUIRES : CMAKE_GENERATOR = Ninja <nl> + / / <nl> + / / TODO : Re - enable this . <nl> + / / REQUIRES : disabled <nl> | Merge pull request from gottesmm / pr - e2ce007bc9b1b068b1e5e826b43e6973f900347d | apple/swift | c3b98ef63ed2ba9de96a3b5631dfbfa06a170689 | 2019-08-14T18:41:12Z |
mmm a / . circleci / config . yml <nl> ppp b / . circleci / config . yml <nl> jobs : <nl> steps : <nl> # See Note [ Workspace for CircleCI scripts ] in job - specs - setup . yml <nl> - checkout <nl> + - attach_workspace : <nl> + at : / root / workspace <nl> - run : <nl> < < : * binary_checkout <nl> - run : <nl> mmm a / . circleci / verbatim - sources / job - specs / binary - job - specs . yml <nl> ppp b / . circleci / verbatim - sources / job - specs / binary - job - specs . yml <nl> <nl> steps : <nl> # See Note [ Workspace for CircleCI scripts ] in job - specs - setup . yml <nl> - checkout <nl> + - attach_workspace : <nl> + at : / root / workspace <nl> - run : <nl> < < : * binary_checkout <nl> - run : <nl> | Fix windows upload jobs ( ) | pytorch/pytorch | b44f02f8f58ca8bacb331cbe6d9fee5b36af6b52 | 2020-05-29T16:57:36Z |
mmm a / tensorflow / lite / core / shims / c / builtin_op_data . h <nl> ppp b / tensorflow / lite / core / shims / c / builtin_op_data . h <nl> WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied . <nl> See the License for the specific language governing permissions and <nl> limitations under the License . <nl> = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = * / <nl> - # ifndef PARTY_TENSORFLOW_LITE_CORE_SHIMS_C_BUILTIN_OP_DATA_H_ <nl> - # define PARTY_TENSORFLOW_LITE_CORE_SHIMS_C_BUILTIN_OP_DATA_H_ <nl> + # ifndef TENSORFLOW_LITE_CORE_SHIMS_C_BUILTIN_OP_DATA_H_ <nl> + # define TENSORFLOW_LITE_CORE_SHIMS_C_BUILTIN_OP_DATA_H_ <nl> <nl> # include " tensorflow / lite / c / builtin_op_data . h " <nl> <nl> - # endif / / PARTY_TENSORFLOW_LITE_CORE_SHIMS_C_BUILTIN_OP_DATA_H_ <nl> + # endif / / TENSORFLOW_LITE_CORE_SHIMS_C_BUILTIN_OP_DATA_H_ <nl> mmm a / tensorflow / lite / core / shims / c / c_api . h <nl> ppp b / tensorflow / lite / core / shims / c / c_api . h <nl> WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied . <nl> See the License for the specific language governing permissions and <nl> limitations under the License . <nl> = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = * / <nl> - # ifndef PARTY_TENSORFLOW_LITE_CORE_SHIMS_C_C_API_H_ <nl> - # define PARTY_TENSORFLOW_LITE_CORE_SHIMS_C_C_API_H_ <nl> + # ifndef TENSORFLOW_LITE_CORE_SHIMS_C_C_API_H_ <nl> + # define TENSORFLOW_LITE_CORE_SHIMS_C_C_API_H_ <nl> <nl> # include " tensorflow / lite / c / c_api . h " <nl> <nl> - # endif / / PARTY_TENSORFLOW_LITE_CORE_SHIMS_C_C_API_H_ <nl> + # endif / / TENSORFLOW_LITE_CORE_SHIMS_C_C_API_H_ <nl> mmm a / tensorflow / lite / core / shims / c / c_api_experimental . h <nl> ppp b / tensorflow / lite / core / shims / c / c_api_experimental . h <nl> WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied . <nl> See the License for the specific language governing permissions and <nl> limitations under the License . <nl> = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = * / <nl> - # ifndef PARTY_TENSORFLOW_LITE_CORE_SHIMS_C_C_API_EXPERIMENTAL_H_ <nl> - # define PARTY_TENSORFLOW_LITE_CORE_SHIMS_C_C_API_EXPERIMENTAL_H_ <nl> + # ifndef TENSORFLOW_LITE_CORE_SHIMS_C_C_API_EXPERIMENTAL_H_ <nl> + # define TENSORFLOW_LITE_CORE_SHIMS_C_C_API_EXPERIMENTAL_H_ <nl> <nl> # include " tensorflow / lite / c / c_api_experimental . h " <nl> <nl> - # endif / / PARTY_TENSORFLOW_LITE_CORE_SHIMS_C_C_API_EXPERIMENTAL_H_ <nl> + # endif / / TENSORFLOW_LITE_CORE_SHIMS_C_C_API_EXPERIMENTAL_H_ <nl> mmm a / tensorflow / lite / core / shims / c / common . h <nl> ppp b / tensorflow / lite / core / shims / c / common . h <nl> WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied . <nl> See the License for the specific language governing permissions and <nl> limitations under the License . <nl> = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = * / <nl> - # ifndef PARTY_TENSORFLOW_LITE_CORE_SHIMS_C_COMMON_H_ <nl> - # define PARTY_TENSORFLOW_LITE_CORE_SHIMS_C_COMMON_H_ <nl> + # ifndef TENSORFLOW_LITE_CORE_SHIMS_C_COMMON_H_ <nl> + # define TENSORFLOW_LITE_CORE_SHIMS_C_COMMON_H_ <nl> <nl> # include " tensorflow / lite / c / common . h " <nl> <nl> - # endif / / PARTY_TENSORFLOW_LITE_CORE_SHIMS_C_COMMON_H_ <nl> + # endif / / TENSORFLOW_LITE_CORE_SHIMS_C_COMMON_H_ <nl> | Fix header guards symbol names in the shim headers . | tensorflow/tensorflow | 9583f40df414f8ec9783546771700e1917adbaef | 2020-12-11T15:48:54Z |
mmm a / hphp / hack / src / typing / typing . ml <nl> ppp b / hphp / hack / src / typing / typing . ml <nl> and check_extend_abstract_typeconst ~ is_final p seq = <nl> <nl> and check_extend_abstract_const ~ is_final p seq = <nl> Sequence . iter seq begin fun ( x , cc ) - > <nl> - match cc . cc_type with <nl> - | r , _ when cc . cc_abstract & & not cc . cc_synthesized - > <nl> - Errors . implement_abstract ~ is_final p ( Reason . to_pos r ) " constant " x <nl> - | _ , <nl> - ( <nl> - Terr <nl> - | Tdynamic <nl> - | Tany <nl> - | Tmixed <nl> - | Tnonnull <nl> - | Tnothing <nl> - | Tarray ( _ , _ ) <nl> - | Tdarray ( _ , _ ) <nl> - | Tvarray _ <nl> - | Tvarray_or_darray _ <nl> - | Toption _ <nl> - | Tprim _ <nl> - | Tfun _ <nl> - | Tapply ( _ , _ ) <nl> - | Ttuple _ <nl> - | Tshape _ <nl> - | Taccess ( _ , _ ) <nl> - | Tthis <nl> - | Tgeneric _ <nl> - ) - > ( ) <nl> + if cc . cc_abstract & & not cc . cc_synthesized then <nl> + let cc_pos = Reason . to_pos ( fst cc . cc_type ) in <nl> + Errors . implement_abstract ~ is_final p cc_pos " constant " x <nl> end <nl> <nl> and typeconst_abstract_kind = function <nl> | Simplify Typing . check_extend_abstract_const | facebook/hhvm | 820fc784574c64123c7eeeb17fb943bf7d2282dd | 2019-04-16T19:57:28Z |
mmm a / UnitTests / Basics / PathEnumeratorTest . cpp <nl> ppp b / UnitTests / Basics / PathEnumeratorTest . cpp <nl> BOOST_FIXTURE_TEST_SUITE ( PathEnumeratorTest , PathEnumeratorSetup ) <nl> <nl> BOOST_AUTO_TEST_CASE ( test_fullPathEnumerator ) { <nl> int startVertex = 1 ; <nl> - PathEnumerator < int , int , int > it ( integerEdgeEnumerator , integerVertexEnumerator , startVertex ) ; <nl> + DepthFirstEnumerator < int , int , int > it ( integerEdgeEnumerator , integerVertexEnumerator , startVertex ) ; <nl> EnumeratedPath < int , int > path ; <nl> for ( int k = 1 ; k < 4 ; k + + ) { <nl> path = it . next ( ) ; <nl> | Adapted test of PathEnumerator to DepthFirstEnumerator . | arangodb/arangodb | b348c20961a8ae9ab7ce8117f2df85ab3f532f94 | 2016-06-10T13:45:46Z |
mmm a / googletest / include / gtest / gtest - typed - test . h <nl> ppp b / googletest / include / gtest / gtest - typed - test . h <nl> TYPED_TEST_SUITE ( FooTest , MyTypes ) ; <nl> / / Then , use TYPED_TEST ( ) instead of TEST_F ( ) to define as many typed <nl> / / tests for this test suite as you want . <nl> TYPED_TEST ( FooTest , DoesBlah ) { <nl> - / / Inside a test , refer to TypeParam to get the type parameter . <nl> - / / Since we are inside a derived class template , C + + requires use to <nl> - / / visit the members of FooTest via ' this ' . <nl> + / / Inside a test , refer to the special name TypeParam to get the type <nl> + / / parameter . Since we are inside a derived class template , C + + requires <nl> + / / us to visit the members of FooTest via ' this ' . <nl> TypeParam n = this - > value_ ; <nl> <nl> / / To visit static members of the fixture , add the TestFixture : : <nl> | adjust a comment to the similar section in advanced . md | google/googletest | 96c851d051c154257e6a0f263fdd9dc7e1e58203 | 2019-07-31T17:40:10Z |
mmm a / src / mongo / db / commands / mr . cpp <nl> ppp b / src / mongo / db / commands / mr . cpp <nl> namespace mongo { <nl> <nl> / / check to see if this is a new object we don ' t own yet <nl> / / because of a chunk migration <nl> - if ( chunkManager & & ! chunkManager - > belongsToMe ( o ) ) <nl> - continue ; <nl> + if ( chunkManager ) { <nl> + KeyPattern kp ( chunkManager - > getKeyPattern ( ) ) ; <nl> + if ( ! chunkManager - > keyBelongsToMe ( kp . extractSingleKey ( o ) ) ) { <nl> + continue ; <nl> + } <nl> + } <nl> <nl> / / do map <nl> if ( config . verbose ) mt . reset ( ) ; <nl> mmm a / src / mongo / db / dbhelpers . cpp <nl> ppp b / src / mongo / db / dbhelpers . cpp <nl> namespace mongo { <nl> verify ( shardingState . enabled ( ) ) ; <nl> <nl> / / In write lock , so will be the most up - to - date version <nl> - ShardChunkManagerPtr managerNow = shardingState . getShardChunkManager ( ns ) ; <nl> - <nl> - if ( ! managerNow | | managerNow - > belongsToMe ( obj ) ) { <nl> + / / TODO : This is not quite correct , we may be transferring docs in the same <nl> + / / range . Right now we ' re protected since we can ' t transfer docs in while we <nl> + / / delete . <nl> + ShardChunkManagerPtr managerNow = shardingState . getShardChunkManager ( ns ) ; <nl> + bool docIsOrphan = true ; <nl> + if ( managerNow ) { <nl> + KeyPattern kp ( managerNow - > getKeyPattern ( ) ) ; <nl> + docIsOrphan = ! managerNow - > keyBelongsToMe ( kp . extractSingleKey ( obj ) ) ; <nl> + } <nl> + else { <nl> + docIsOrphan = false ; <nl> + } <nl> <nl> - warning ( ) < < " aborting migration cleanup for chunk " <nl> - < < min < < " to " < < max <nl> - < < ( managerNow ? ( string ) " at document " + obj . toString ( ) : " " ) <nl> + if ( ! docIsOrphan ) { <nl> + warning ( ) < < " aborting migration cleanup for chunk " < < min < < " to " < < max <nl> + < < ( managerNow ? ( string ) " at document " + obj . toString ( ) : " " ) <nl> < < " , collection " < < ns < < " has changed " < < endl ; <nl> - <nl> break ; <nl> } <nl> } <nl> mmm a / src / mongo / db / ops / query . cpp <nl> ppp b / src / mongo / db / ops / query . cpp <nl> namespace mongo { <nl> <nl> / / This manager may be stale , but it ' s the state of chunking when the cursor was created . <nl> ShardChunkManagerPtr manager = cc - > getChunkManager ( ) ; <nl> + KeyPattern keyPattern ( manager ? manager - > getKeyPattern ( ) : BSONObj ( ) ) ; <nl> <nl> while ( 1 ) { <nl> if ( ! c - > ok ( ) ) { <nl> namespace mongo { <nl> / / in some cases ( clone collection ) there won ' t be a matcher <nl> if ( ! c - > currentMatches ( & details ) ) { <nl> } <nl> - else if ( manager & & ! manager - > belongsToMe ( cc ) ) { <nl> - LOG ( 2 ) < < " cursor skipping document in un - owned chunk : " < < c - > current ( ) < < endl ; <nl> + else if ( manager & & ! manager - > keyBelongsToMe ( cc - > extractKey ( keyPattern ) ) ) { <nl> + LOG ( 2 ) < < " cursor skipping document in un - owned chunk : " < < c - > current ( ) <nl> + < < endl ; <nl> } <nl> else { <nl> if ( c - > getsetdup ( c - > currLoc ( ) ) ) { <nl> namespace mongo { <nl> } <nl> / / TODO : should make this covered at some point <nl> resultDetails - > loadedRecord = true ; <nl> - if ( _chunkManager - > belongsToMe ( _cursor - > current ( ) ) ) { <nl> + KeyPattern kp ( _chunkManager - > getKeyPattern ( ) ) ; <nl> + if ( _chunkManager - > keyBelongsToMe ( kp . extractSingleKey ( _cursor - > current ( ) ) ) ) { <nl> return true ; <nl> } <nl> resultDetails - > chunkSkip = true ; <nl> namespace mongo { <nl> <nl> if ( shardingState . needShardChunkManager ( ns ) ) { <nl> ShardChunkManagerPtr m = shardingState . getShardChunkManager ( ns ) ; <nl> - if ( m & & ! m - > belongsToMe ( resObject ) ) { <nl> - / / I have something this _id <nl> - / / but it doesn ' t belong to me <nl> - / / so return nothing <nl> - resObject = BSONObj ( ) ; <nl> - found = false ; <nl> + if ( m ) { <nl> + KeyPattern kp ( m - > getKeyPattern ( ) ) ; <nl> + if ( ! m - > keyBelongsToMe ( kp . extractSingleKey ( resObject ) ) ) { <nl> + / / I have something this _id <nl> + / / but it doesn ' t belong to me <nl> + / / so return nothing <nl> + resObject = BSONObj ( ) ; <nl> + found = false ; <nl> + } <nl> } <nl> } <nl> <nl> mmm a / src / mongo / db / pipeline / document_source_cursor . cpp <nl> ppp b / src / mongo / db / pipeline / document_source_cursor . cpp <nl> namespace mongo { <nl> <nl> / / check to see if this is a new object we don ' t own yet <nl> / / because of a chunk migration <nl> - if ( chunkMgr ( ) & & ! chunkMgr ( ) - > belongsToMe ( next ) ) <nl> - continue ; <nl> + if ( chunkMgr ( ) ) { <nl> + KeyPattern kp ( chunkMgr ( ) - > getKeyPattern ( ) ) ; <nl> + if ( ! chunkMgr ( ) - > keyBelongsToMe ( kp . extractSingleKey ( next ) ) ) continue ; <nl> + } <nl> <nl> if ( ! _projection ) { <nl> pCurrent = Document ( next ) ; <nl> mmm a / src / mongo / dbtests / d_chunk_manager_tests . cpp <nl> ppp b / src / mongo / dbtests / d_chunk_manager_tests . cpp <nl> namespace { <nl> ShardChunkManager s ( collection , chunks ) ; <nl> <nl> BSONObj k1 = BSON ( " a " < < MINKEY ) ; <nl> - ASSERT ( s . belongsToMe ( k1 ) ) ; <nl> + ASSERT ( s . keyBelongsToMe ( k1 ) ) ; <nl> BSONObj k2 = BSON ( " a " < < MAXKEY ) ; <nl> - ASSERT ( ! s . belongsToMe ( k2 ) ) ; <nl> + ASSERT ( ! s . keyBelongsToMe ( k2 ) ) ; <nl> BSONObj k3 = BSON ( " a " < < 1 < < " b " < < 2 ) ; <nl> - ASSERT ( s . belongsToMe ( k3 ) ) ; <nl> + ASSERT ( s . keyBelongsToMe ( k3 ) ) ; <nl> } <nl> } ; <nl> <nl> namespace { <nl> ShardChunkManager s ( collection , chunks ) ; <nl> <nl> BSONObj k1 = BSON ( " a " < < MINKEY < < " b " < < MINKEY ) ; <nl> - ASSERT ( s . belongsToMe ( k1 ) ) ; <nl> + ASSERT ( s . keyBelongsToMe ( k1 ) ) ; <nl> BSONObj k2 = BSON ( " a " < < MAXKEY < < " b " < < MAXKEY ) ; <nl> - ASSERT ( ! s . belongsToMe ( k2 ) ) ; <nl> + ASSERT ( ! s . keyBelongsToMe ( k2 ) ) ; <nl> BSONObj k3 = BSON ( " a " < < MINKEY < < " b " < < 10 ) ; <nl> - ASSERT ( s . belongsToMe ( k3 ) ) ; <nl> + ASSERT ( s . keyBelongsToMe ( k3 ) ) ; <nl> BSONObj k4 = BSON ( " a " < < 10 < < " b " < < 20 ) ; <nl> - ASSERT ( s . belongsToMe ( k4 ) ) ; <nl> + ASSERT ( s . keyBelongsToMe ( k4 ) ) ; <nl> } <nl> } ; <nl> <nl> namespace { <nl> ShardChunkManager s ( collection , chunks ) ; <nl> <nl> BSONObj k1 = BSON ( " a " < < 5 ) ; <nl> - ASSERT ( s . belongsToMe ( k1 ) ) ; <nl> + ASSERT ( s . keyBelongsToMe ( k1 ) ) ; <nl> BSONObj k2 = BSON ( " a " < < 10 ) ; <nl> - ASSERT ( s . belongsToMe ( k2 ) ) ; <nl> + ASSERT ( s . keyBelongsToMe ( k2 ) ) ; <nl> BSONObj k3 = BSON ( " a " < < 25 ) ; <nl> - ASSERT ( ! s . belongsToMe ( k3 ) ) ; <nl> + ASSERT ( ! s . keyBelongsToMe ( k3 ) ) ; <nl> BSONObj k4 = BSON ( " a " < < 30 ) ; <nl> - ASSERT ( s . belongsToMe ( k4 ) ) ; <nl> + ASSERT ( s . keyBelongsToMe ( k4 ) ) ; <nl> BSONObj k5 = BSON ( " a " < < 40 ) ; <nl> - ASSERT ( s . belongsToMe ( k5 ) ) ; <nl> + ASSERT ( s . keyBelongsToMe ( k5 ) ) ; <nl> } <nl> } ; <nl> <nl> namespace { <nl> ShardChunkManagerPtr cloned ( s . clonePlus ( min , max , ChunkVersion ( 1 , 0 , OID ( ) ) / * TODO test version * / ) ) ; <nl> <nl> BSONObj k1 = BSON ( " a " < < 5 < < " b " < < 0 ) ; <nl> - ASSERT ( ! cloned - > belongsToMe ( k1 ) ) ; <nl> + ASSERT ( ! cloned - > keyBelongsToMe ( k1 ) ) ; <nl> BSONObj k2 = BSON ( " a " < < 20 < < " b " < < 0 ) ; <nl> - ASSERT ( cloned - > belongsToMe ( k2 ) ) ; <nl> + ASSERT ( cloned - > keyBelongsToMe ( k2 ) ) ; <nl> BSONObj k3 = BSON ( " a " < < 25 < < " b " < < 0 ) ; <nl> - ASSERT ( cloned - > belongsToMe ( k3 ) ) ; <nl> + ASSERT ( cloned - > keyBelongsToMe ( k3 ) ) ; <nl> BSONObj k4 = BSON ( " a " < < 30 < < " b " < < 0 ) ; <nl> - ASSERT ( ! cloned - > belongsToMe ( k4 ) ) ; <nl> + ASSERT ( ! cloned - > keyBelongsToMe ( k4 ) ) ; <nl> } <nl> } ; <nl> <nl> namespace { <nl> ShardChunkManagerPtr cloned ( s . cloneMinus ( min , max , ChunkVersion ( 1 , 0 , OID ( ) ) / * TODO test version * / ) ) ; <nl> <nl> BSONObj k1 = BSON ( " a " < < 5 < < " b " < < 0 ) ; <nl> - ASSERT ( ! cloned - > belongsToMe ( k1 ) ) ; <nl> + ASSERT ( ! cloned - > keyBelongsToMe ( k1 ) ) ; <nl> BSONObj k2 = BSON ( " a " < < 15 < < " b " < < 0 ) ; <nl> - ASSERT ( ! cloned - > belongsToMe ( k2 ) ) ; <nl> + ASSERT ( ! cloned - > keyBelongsToMe ( k2 ) ) ; <nl> BSONObj k3 = BSON ( " a " < < 30 < < " b " < < 0 ) ; <nl> - ASSERT ( cloned - > belongsToMe ( k3 ) ) ; <nl> + ASSERT ( cloned - > keyBelongsToMe ( k3 ) ) ; <nl> BSONObj k4 = BSON ( " a " < < 35 < < " b " < < 0 ) ; <nl> - ASSERT ( cloned - > belongsToMe ( k4 ) ) ; <nl> + ASSERT ( cloned - > keyBelongsToMe ( k4 ) ) ; <nl> BSONObj k5 = BSON ( " a " < < 40 < < " b " < < 0 ) ; <nl> - ASSERT ( ! cloned - > belongsToMe ( k5 ) ) ; <nl> + ASSERT ( ! cloned - > keyBelongsToMe ( k5 ) ) ; <nl> } <nl> } ; <nl> <nl> namespace { <nl> ASSERT_EQUALS ( cloned - > getVersion ( ) . toLong ( ) , version . toLong ( ) / * 1 | 101 * / ) ; <nl> ASSERT_EQUALS ( s . getNumChunks ( ) , 1u ) ; <nl> ASSERT_EQUALS ( cloned - > getNumChunks ( ) , 3u ) ; <nl> - ASSERT ( cloned - > belongsToMe ( min ) ) ; <nl> - ASSERT ( cloned - > belongsToMe ( split1 ) ) ; <nl> - ASSERT ( cloned - > belongsToMe ( split2 ) ) ; <nl> - ASSERT ( ! cloned - > belongsToMe ( max ) ) ; <nl> + ASSERT ( cloned - > keyBelongsToMe ( min ) ) ; <nl> + ASSERT ( cloned - > keyBelongsToMe ( split1 ) ) ; <nl> + ASSERT ( cloned - > keyBelongsToMe ( split2 ) ) ; <nl> + ASSERT ( ! cloned - > keyBelongsToMe ( max ) ) ; <nl> } <nl> } ; <nl> <nl> namespace { <nl> ASSERT_EQUALS ( empty - > getVersion ( ) . toLong ( ) , ChunkVersion ( 0 , 0 , OID ( ) ) . toLong ( ) ) ; <nl> ASSERT_EQUALS ( empty - > getNumChunks ( ) , 0u ) ; <nl> BSONObj k = BSON ( " a " < < 15 < < " b " < < 0 ) ; <nl> - ASSERT ( ! empty - > belongsToMe ( k ) ) ; <nl> + ASSERT ( ! empty - > keyBelongsToMe ( k ) ) ; <nl> <nl> / / we can add a chunk to an empty manager <nl> / / version should be provided <nl> namespace { <nl> ShardChunkManagerPtr cloned ( empty - > clonePlus ( min , max , nonZero ) ) ; <nl> ASSERT_EQUALS ( cloned - > getVersion ( ) . toLong ( ) , nonZero . toLong ( ) ) ; <nl> ASSERT_EQUALS ( cloned - > getNumChunks ( ) , 1u ) ; <nl> - ASSERT ( cloned - > belongsToMe ( k ) ) ; <nl> + ASSERT ( cloned - > keyBelongsToMe ( k ) ) ; <nl> } <nl> } ; <nl> <nl> mmm a / src / mongo / s / collection_manager . cpp <nl> ppp b / src / mongo / s / collection_manager . cpp <nl> namespace mongo { <nl> } <nl> <nl> auto_ptr < CollectionManager > manager ( new CollectionManager ) ; <nl> - manager - > _key = this - > _key ; <nl> - manager - > _key . getOwned ( ) ; <nl> + manager - > _keyPattern = this - > _keyPattern ; <nl> + manager - > _keyPattern . getOwned ( ) ; <nl> manager - > _chunksMap = this - > _chunksMap ; <nl> manager - > _chunksMap . erase ( chunk . getMin ( ) ) ; <nl> manager - > _maxShardVersion = newShardVersion ; <nl> namespace mongo { <nl> } <nl> <nl> auto_ptr < CollectionManager > manager ( new CollectionManager ) ; <nl> - manager - > _key = this - > _key ; <nl> - manager - > _key . getOwned ( ) ; <nl> + manager - > _keyPattern = this - > _keyPattern ; <nl> + manager - > _keyPattern . getOwned ( ) ; <nl> manager - > _chunksMap = this - > _chunksMap ; <nl> manager - > _chunksMap . insert ( make_pair ( chunk . getMin ( ) . getOwned ( ) , chunk . getMax ( ) . getOwned ( ) ) ) ; <nl> manager - > _maxShardVersion = newShardVersion ; <nl> namespace mongo { <nl> } <nl> <nl> auto_ptr < CollectionManager > manager ( new CollectionManager ) ; <nl> - manager - > _key = this - > _key ; <nl> - manager - > _key . getOwned ( ) ; <nl> + manager - > _keyPattern = this - > _keyPattern ; <nl> + manager - > _keyPattern . getOwned ( ) ; <nl> manager - > _chunksMap = this - > _chunksMap ; <nl> manager - > _maxShardVersion = newShardVersion ; / / will increment 2nd , 3rd , . . . chunks below <nl> <nl> namespace mongo { <nl> return manager . release ( ) ; <nl> } <nl> <nl> - bool CollectionManager : : belongsToMe ( const BSONObj & point ) const { <nl> + bool CollectionManager : : keyBelongsToMe ( const BSONObj & key ) const { <nl> / / For now , collections don ' t move . So if the collection is not sharded , assume <nl> - / / the documet ca be accessed . <nl> - if ( _key . isEmpty ( ) ) { <nl> + / / the document with the given key can be accessed . <nl> + if ( _keyPattern . isEmpty ( ) ) { <nl> return true ; <nl> } <nl> <nl> - if ( _rangesMap . size ( ) < = 0 ) { <nl> + if ( _rangesMap . size ( ) < = 0 ) { <nl> return false ; <nl> } <nl> <nl> - RangeMap : : const_iterator it = _rangesMap . upper_bound ( point ) ; <nl> - if ( it ! = _rangesMap . begin ( ) ) <nl> - it - - ; <nl> + RangeMap : : const_iterator it = _rangesMap . upper_bound ( key ) ; <nl> + if ( it ! = _rangesMap . begin ( ) ) it - - ; <nl> <nl> - bool good = rangeContains ( it - > first , it - > second , point ) ; <nl> + bool good = rangeContains ( it - > first , it - > second , key ) ; <nl> <nl> / / Logs if in debugging mode and the point doesn ' t belong here . <nl> - if ( dcompare ( ! good ) ) { <nl> - log ( ) < < " bad : " < < point < < " " <nl> - < < it - > first < < " " < < point . woCompare ( it - > first ) < < " " <nl> - < < point . woCompare ( it - > second ) < < endl ; <nl> + if ( dcompare ( ! good ) ) { <nl> + log ( ) < < " bad : " < < key < < " " < < it - > first < < " " < < key . woCompare ( it - > first ) < < " " <nl> + < < key . woCompare ( it - > second ) < < endl ; <nl> <nl> - for ( RangeMap : : const_iterator i = _rangesMap . begin ( ) ; i ! = _rangesMap . end ( ) ; + + i ) { <nl> + for ( RangeMap : : const_iterator i = _rangesMap . begin ( ) ; i ! = _rangesMap . end ( ) ; + + i ) { <nl> log ( ) < < " \ t " < < i - > first < < " \ t " < < i - > second < < " \ t " < < endl ; <nl> } <nl> } <nl> namespace mongo { <nl> <nl> string CollectionManager : : toString ( ) const { <nl> StringBuilder ss ; <nl> - ss < < " CollectionManager version : " < < _maxShardVersion . toString ( ) < < " key : " < < _key ; <nl> + ss < < " CollectionManager version : " < < _maxShardVersion . toString ( ) < < " key : " < < _keyPattern ; <nl> if ( _rangesMap . empty ( ) ) { <nl> return ss . str ( ) ; <nl> } <nl> mmm a / src / mongo / s / collection_manager . h <nl> ppp b / src / mongo / s / collection_manager . h <nl> namespace mongo { <nl> / / <nl> <nl> / * * <nl> - * Returns true the document ' doc ' belongs to this chunkset . Recall that documents of <nl> + * Returns true the document key ' key ' belongs to this chunkset . Recall that documents of <nl> * an in - flight chunk migration may be present and should not be considered part of the <nl> - * collection / chunkset yet . ' doc ' must contain the sharding key and , optionally , <nl> - * other attributes . <nl> + * collection / chunkset yet . Key must be the full shard key . <nl> * / <nl> - bool belongsToMe ( const BSONObj & doc ) const ; <nl> + bool keyBelongsToMe ( const BSONObj & key ) const ; <nl> <nl> / * * <nl> * Given the chunk ' s min key ( or empty doc ) in ' lookupKey ' , gets the boundaries of the <nl> namespace mongo { <nl> <nl> ChunkVersion getMaxShardVersion ( ) const { return _maxShardVersion ; } <nl> <nl> - BSONObj getKey ( ) const { return _key ; } <nl> + BSONObj getKeyPattern ( ) const { return _keyPattern ; } <nl> <nl> size_t getNumChunks ( ) const { return _chunksMap . size ( ) ; } <nl> <nl> namespace mongo { <nl> ChunkVersion _maxShardVersion ; <nl> <nl> / / key pattern for chunks under this range <nl> - BSONObj _key ; <nl> + BSONObj _keyPattern ; <nl> <nl> / / a map from a min key into the chunk ' s ( or range ' s ) max boundary <nl> typedef map < BSONObj , BSONObj , BSONObjCmp > RangeMap ; <nl> mmm a / src / mongo / s / collection_manager_test . cpp <nl> ppp b / src / mongo / s / collection_manager_test . cpp <nl> namespace { <nl> } ; <nl> <nl> TEST_F ( NoChunkFixture , BasicBelongsToMe ) { <nl> - ASSERT_FALSE ( getCollManager ( ) - > belongsToMe ( BSON ( " a " < < MINKEY ) ) ) ; <nl> - ASSERT_FALSE ( getCollManager ( ) - > belongsToMe ( BSON ( " a " < < 10 ) ) ) ; <nl> + ASSERT_FALSE ( getCollManager ( ) - > keyBelongsToMe ( BSON ( " a " < < MINKEY ) ) ) ; <nl> + ASSERT_FALSE ( getCollManager ( ) - > keyBelongsToMe ( BSON ( " a " < < 10 ) ) ) ; <nl> } <nl> <nl> TEST_F ( NoChunkFixture , CompoudKeyBelongsToMe ) { <nl> - ASSERT_FALSE ( getCollManager ( ) - > belongsToMe ( BSON ( " a " < < 1 < < " b " < < 2 ) ) ) ; <nl> + ASSERT_FALSE ( getCollManager ( ) - > keyBelongsToMe ( BSON ( " a " < < 1 < < " b " < < 2 ) ) ) ; <nl> } <nl> <nl> TEST_F ( NoChunkFixture , getNextFromEmpty ) { <nl> namespace { <nl> ASSERT_EQUALS ( 1u , cloned - > getNumChunks ( ) ) ; <nl> ASSERT_EQUALS ( cloned - > getMaxShardVersion ( ) . toLong ( ) , version . toLong ( ) ) ; <nl> ASSERT_EQUALS ( cloned - > getMaxCollVersion ( ) . toLong ( ) , version . toLong ( ) ) ; <nl> - ASSERT ( cloned - > belongsToMe ( BSON ( " a " < < 15 ) ) ) ; <nl> + ASSERT ( cloned - > keyBelongsToMe ( BSON ( " a " < < 15 ) ) ) ; <nl> } <nl> <nl> TEST_F ( NoChunkFixture , MustHaveVersionForFirstChunk ) { <nl> namespace { <nl> } ; <nl> <nl> TEST_F ( SingleChunkFixture , BasicBelongsToMe ) { <nl> - ASSERT ( getCollManager ( ) - > belongsToMe ( BSON ( " a " < < 10 ) ) ) ; <nl> - ASSERT ( getCollManager ( ) - > belongsToMe ( BSON ( " a " < < 15 ) ) ) ; <nl> - ASSERT ( getCollManager ( ) - > belongsToMe ( BSON ( " a " < < 19 ) ) ) ; <nl> + ASSERT ( getCollManager ( ) - > keyBelongsToMe ( BSON ( " a " < < 10 ) ) ) ; <nl> + ASSERT ( getCollManager ( ) - > keyBelongsToMe ( BSON ( " a " < < 15 ) ) ) ; <nl> + ASSERT ( getCollManager ( ) - > keyBelongsToMe ( BSON ( " a " < < 19 ) ) ) ; <nl> } <nl> <nl> TEST_F ( SingleChunkFixture , DoesntBelongsToMe ) { <nl> - ASSERT_FALSE ( getCollManager ( ) - > belongsToMe ( BSON ( " a " < < 0 ) ) ) ; <nl> - ASSERT_FALSE ( getCollManager ( ) - > belongsToMe ( BSON ( " a " < < 9 ) ) ) ; <nl> - ASSERT_FALSE ( getCollManager ( ) - > belongsToMe ( BSON ( " a " < < 20 ) ) ) ; <nl> - ASSERT_FALSE ( getCollManager ( ) - > belongsToMe ( BSON ( " a " < < 1234 ) ) ) ; <nl> - ASSERT_FALSE ( getCollManager ( ) - > belongsToMe ( BSON ( " a " < < MINKEY ) ) ) ; <nl> - ASSERT_FALSE ( getCollManager ( ) - > belongsToMe ( BSON ( " a " < < MAXKEY ) ) ) ; <nl> + ASSERT_FALSE ( getCollManager ( ) - > keyBelongsToMe ( BSON ( " a " < < 0 ) ) ) ; <nl> + ASSERT_FALSE ( getCollManager ( ) - > keyBelongsToMe ( BSON ( " a " < < 9 ) ) ) ; <nl> + ASSERT_FALSE ( getCollManager ( ) - > keyBelongsToMe ( BSON ( " a " < < 20 ) ) ) ; <nl> + ASSERT_FALSE ( getCollManager ( ) - > keyBelongsToMe ( BSON ( " a " < < 1234 ) ) ) ; <nl> + ASSERT_FALSE ( getCollManager ( ) - > keyBelongsToMe ( BSON ( " a " < < MINKEY ) ) ) ; <nl> + ASSERT_FALSE ( getCollManager ( ) - > keyBelongsToMe ( BSON ( " a " < < MAXKEY ) ) ) ; <nl> } <nl> <nl> TEST_F ( SingleChunkFixture , CompoudKeyBelongsToMe ) { <nl> - ASSERT ( getCollManager ( ) - > belongsToMe ( BSON ( " a " < < 15 < < " a " < < 14 ) ) ) ; <nl> + ASSERT ( getCollManager ( ) - > keyBelongsToMe ( BSON ( " a " < < 15 < < " a " < < 14 ) ) ) ; <nl> } <nl> <nl> TEST_F ( SingleChunkFixture , getNextFromEmpty ) { <nl> namespace { <nl> ASSERT_EQUALS ( cloned - > getMaxShardVersion ( ) . toLong ( ) , zeroVersion . toLong ( ) ) ; <nl> ASSERT_EQUALS ( cloned - > getMaxCollVersion ( ) . toLong ( ) , <nl> getCollManager ( ) - > getMaxCollVersion ( ) . toLong ( ) ) ; <nl> - ASSERT_FALSE ( cloned - > belongsToMe ( BSON ( " a " < < 15 ) ) ) ; <nl> + ASSERT_FALSE ( cloned - > keyBelongsToMe ( BSON ( " a " < < 15 ) ) ) ; <nl> } <nl> <nl> TEST_F ( SingleChunkFixture , LastChunkMinusCantHaveNonZeroVersion ) { <nl> namespace { <nl> / / if shard key is compound . <nl> <nl> TEST_F ( SingleChunkMinMaxCompoundKeyFixture , CompoudKeyBelongsToMe ) { <nl> - ASSERT ( getCollManager ( ) - > belongsToMe ( BSON ( " a " < < MINKEY < < " b " < < MINKEY ) ) ) ; <nl> - ASSERT_FALSE ( getCollManager ( ) - > belongsToMe ( BSON ( " a " < < MAXKEY < < " b " < < MAXKEY ) ) ) ; <nl> - ASSERT ( getCollManager ( ) - > belongsToMe ( BSON ( " a " < < MINKEY < < " b " < < 10 ) ) ) ; <nl> - ASSERT ( getCollManager ( ) - > belongsToMe ( BSON ( " a " < < 10 < < " b " < < 20 ) ) ) ; <nl> + ASSERT ( getCollManager ( ) - > keyBelongsToMe ( BSON ( " a " < < MINKEY < < " b " < < MINKEY ) ) ) ; <nl> + ASSERT_FALSE ( getCollManager ( ) - > keyBelongsToMe ( BSON ( " a " < < MAXKEY < < " b " < < MAXKEY ) ) ) ; <nl> + ASSERT ( getCollManager ( ) - > keyBelongsToMe ( BSON ( " a " < < MINKEY < < " b " < < 10 ) ) ) ; <nl> + ASSERT ( getCollManager ( ) - > keyBelongsToMe ( BSON ( " a " < < 10 < < " b " < < 20 ) ) ) ; <nl> } <nl> <nl> / * * <nl> namespace { <nl> <nl> / / TODO : test maxShardVersion , maxCollVersion <nl> <nl> - ASSERT_FALSE ( cloned - > belongsToMe ( BSON ( " a " < < 25 < < " b " < < 0 ) ) ) ; <nl> - ASSERT_FALSE ( cloned - > belongsToMe ( BSON ( " a " < < 29 < < " b " < < 0 ) ) ) ; <nl> - ASSERT ( cloned - > belongsToMe ( BSON ( " a " < < 30 < < " b " < < 0 ) ) ) ; <nl> - ASSERT ( cloned - > belongsToMe ( BSON ( " a " < < 45 < < " b " < < 0 ) ) ) ; <nl> - ASSERT ( cloned - > belongsToMe ( BSON ( " a " < < 49 < < " b " < < 0 ) ) ) ; <nl> - ASSERT_FALSE ( cloned - > belongsToMe ( BSON ( " a " < < 50 < < " b " < < 0 ) ) ) ; <nl> + ASSERT_FALSE ( cloned - > keyBelongsToMe ( BSON ( " a " < < 25 < < " b " < < 0 ) ) ) ; <nl> + ASSERT_FALSE ( cloned - > keyBelongsToMe ( BSON ( " a " < < 29 < < " b " < < 0 ) ) ) ; <nl> + ASSERT ( cloned - > keyBelongsToMe ( BSON ( " a " < < 30 < < " b " < < 0 ) ) ) ; <nl> + ASSERT ( cloned - > keyBelongsToMe ( BSON ( " a " < < 45 < < " b " < < 0 ) ) ) ; <nl> + ASSERT ( cloned - > keyBelongsToMe ( BSON ( " a " < < 49 < < " b " < < 0 ) ) ) ; <nl> + ASSERT_FALSE ( cloned - > keyBelongsToMe ( BSON ( " a " < < 50 < < " b " < < 0 ) ) ) ; <nl> } <nl> <nl> TEST_F ( TwoChunksWithGapCompoundKeyFixture , ClonePlusOverlappingRange ) { <nl> namespace { <nl> <nl> / / TODO : test maxShardVersion , maxCollVersion <nl> <nl> - ASSERT_FALSE ( cloned - > belongsToMe ( BSON ( " a " < < 5 < < " b " < < 0 ) ) ) ; <nl> - ASSERT_FALSE ( cloned - > belongsToMe ( BSON ( " a " < < 15 < < " b " < < 0 ) ) ) ; <nl> - ASSERT ( cloned - > belongsToMe ( BSON ( " a " < < 30 < < " b " < < 0 ) ) ) ; <nl> - ASSERT ( cloned - > belongsToMe ( BSON ( " a " < < 35 < < " b " < < 0 ) ) ) ; <nl> - ASSERT_FALSE ( cloned - > belongsToMe ( BSON ( " a " < < 40 < < " b " < < 0 ) ) ) ; <nl> + ASSERT_FALSE ( cloned - > keyBelongsToMe ( BSON ( " a " < < 5 < < " b " < < 0 ) ) ) ; <nl> + ASSERT_FALSE ( cloned - > keyBelongsToMe ( BSON ( " a " < < 15 < < " b " < < 0 ) ) ) ; <nl> + ASSERT ( cloned - > keyBelongsToMe ( BSON ( " a " < < 30 < < " b " < < 0 ) ) ) ; <nl> + ASSERT ( cloned - > keyBelongsToMe ( BSON ( " a " < < 35 < < " b " < < 0 ) ) ) ; <nl> + ASSERT_FALSE ( cloned - > keyBelongsToMe ( BSON ( " a " < < 40 < < " b " < < 0 ) ) ) ; <nl> } <nl> <nl> TEST_F ( TwoChunksWithGapCompoundKeyFixture , CloneMinusNonExisting ) { <nl> namespace { <nl> ASSERT_EQUALS ( cloned - > getMaxCollVersion ( ) . toLong ( ) , version . toLong ( ) ) ; <nl> ASSERT_EQUALS ( getCollManager ( ) - > getNumChunks ( ) , 2u ) ; <nl> ASSERT_EQUALS ( cloned - > getNumChunks ( ) , 4u ) ; <nl> - ASSERT ( cloned - > belongsToMe ( min ) ) ; <nl> - ASSERT ( cloned - > belongsToMe ( split1 ) ) ; <nl> - ASSERT ( cloned - > belongsToMe ( split2 ) ) ; <nl> - ASSERT ( ! cloned - > belongsToMe ( max ) ) ; <nl> + ASSERT ( cloned - > keyBelongsToMe ( min ) ) ; <nl> + ASSERT ( cloned - > keyBelongsToMe ( split1 ) ) ; <nl> + ASSERT ( cloned - > keyBelongsToMe ( split2 ) ) ; <nl> + ASSERT ( ! cloned - > keyBelongsToMe ( max ) ) ; <nl> } <nl> <nl> TEST_F ( TwoChunksWithGapCompoundKeyFixture , CloneSplitOutOfRangeSplitPoint ) { <nl> namespace { <nl> } ; <nl> <nl> TEST_F ( ThreeChunkWithRangeGapFixture , ShardOwnsDoc ) { <nl> - ASSERT ( getCollManager ( ) - > belongsToMe ( BSON ( " a " < < 5 ) ) ) ; <nl> - ASSERT ( getCollManager ( ) - > belongsToMe ( BSON ( " a " < < 10 ) ) ) ; <nl> - ASSERT ( getCollManager ( ) - > belongsToMe ( BSON ( " a " < < 30 ) ) ) ; <nl> - ASSERT ( getCollManager ( ) - > belongsToMe ( BSON ( " a " < < 40 ) ) ) ; <nl> + ASSERT ( getCollManager ( ) - > keyBelongsToMe ( BSON ( " a " < < 5 ) ) ) ; <nl> + ASSERT ( getCollManager ( ) - > keyBelongsToMe ( BSON ( " a " < < 10 ) ) ) ; <nl> + ASSERT ( getCollManager ( ) - > keyBelongsToMe ( BSON ( " a " < < 30 ) ) ) ; <nl> + ASSERT ( getCollManager ( ) - > keyBelongsToMe ( BSON ( " a " < < 40 ) ) ) ; <nl> } <nl> <nl> TEST_F ( ThreeChunkWithRangeGapFixture , ShardDoesntOwnDoc ) { <nl> - ASSERT_FALSE ( getCollManager ( ) - > belongsToMe ( BSON ( " a " < < 25 ) ) ) ; <nl> - ASSERT_FALSE ( getCollManager ( ) - > belongsToMe ( BSON ( " a " < < MAXKEY ) ) ) ; <nl> + ASSERT_FALSE ( getCollManager ( ) - > keyBelongsToMe ( BSON ( " a " < < 25 ) ) ) ; <nl> + ASSERT_FALSE ( getCollManager ( ) - > keyBelongsToMe ( BSON ( " a " < < MAXKEY ) ) ) ; <nl> } <nl> <nl> TEST_F ( ThreeChunkWithRangeGapFixture , GetNextFromEmpty ) { <nl> mmm a / src / mongo / s / d_chunk_manager . cpp <nl> ppp b / src / mongo / s / d_chunk_manager . cpp <nl> namespace mongo { <nl> BSONElement e = collectionDoc [ " key " ] ; <nl> uassert ( 13542 , str : : stream ( ) < < " collection doesn ' t have a key : " < < collectionDoc , ! e . eoo ( ) & & e . isABSONObj ( ) ) ; <nl> <nl> - _key = e . Obj ( ) . getOwned ( ) ; <nl> + _keyPattern = e . Obj ( ) . getOwned ( ) ; <nl> } <nl> <nl> void ShardChunkManager : : _fillChunks ( DBClientCursorInterface * cursor ) { <nl> namespace mongo { <nl> _rangesMap . insert ( make_pair ( min , max ) ) ; <nl> } <nl> <nl> - static bool contains ( const BSONObj & min , const BSONObj & max , const BSONObj & point ) { <nl> - return point . woCompare ( min ) > = 0 & & point . woCompare ( max ) < 0 ; <nl> + static bool contains ( const BSONObj & min , const BSONObj & max , const BSONObj & key ) { <nl> + return key . woCompare ( min ) > = 0 & & key . woCompare ( max ) < 0 ; <nl> } <nl> <nl> - bool ShardChunkManager : : belongsToMe ( ClientCursor * cc ) const { <nl> - verify ( cc ) ; <nl> - if ( _rangesMap . size ( ) = = 0 ) <nl> - return false ; <nl> - <nl> - KeyPattern pat ( _key ) ; <nl> - return _belongsToMe ( cc - > extractKey ( pat ) ) ; <nl> - } <nl> + bool ShardChunkManager : : keyBelongsToMe ( const BSONObj & key ) const { <nl> <nl> - bool ShardChunkManager : : belongsToMe ( const BSONObj & doc ) const { <nl> - if ( _rangesMap . size ( ) = = 0 ) <nl> - return false ; <nl> - <nl> - KeyPattern pat ( _key ) ; <nl> - return _belongsToMe ( pat . extractSingleKey ( doc ) ) ; <nl> - } <nl> + if ( _rangesMap . size ( ) = = 0 ) return false ; <nl> <nl> - bool ShardChunkManager : : _belongsToMe ( const BSONObj & point ) const { <nl> - RangeMap : : const_iterator it = _rangesMap . upper_bound ( point ) ; <nl> + RangeMap : : const_iterator it = _rangesMap . upper_bound ( key ) ; <nl> if ( it ! = _rangesMap . begin ( ) ) <nl> it - - ; <nl> <nl> - bool good = contains ( it - > first , it - > second , point ) ; <nl> + bool good = contains ( it - > first , it - > second , key ) ; <nl> <nl> # if 0 <nl> if ( ! good ) { <nl> namespace mongo { <nl> _assertChunkExists ( min , max ) ; <nl> <nl> auto_ptr < ShardChunkManager > p ( new ShardChunkManager ) ; <nl> - p - > _key = this - > _key ; <nl> + p - > _keyPattern = this - > _keyPattern ; <nl> <nl> if ( _chunksMap . size ( ) = = 1 ) { <nl> / / if left with no chunks , just reset version <nl> namespace mongo { <nl> <nl> auto_ptr < ShardChunkManager > p ( new ShardChunkManager ) ; <nl> <nl> - p - > _key = this - > _key ; <nl> + p - > _keyPattern = this - > _keyPattern ; <nl> p - > _chunksMap = this - > _chunksMap ; <nl> p - > _chunksMap . insert ( make_pair ( min . getOwned ( ) , max . getOwned ( ) ) ) ; <nl> p - > _version = version ; <nl> namespace mongo { <nl> <nl> auto_ptr < ShardChunkManager > p ( new ShardChunkManager ) ; <nl> <nl> - p - > _key = this - > _key ; <nl> + p - > _keyPattern = this - > _keyPattern ; <nl> p - > _chunksMap = this - > _chunksMap ; <nl> p - > _version = version ; / / will increment second , third , . . . chunks below <nl> <nl> namespace mongo { <nl> <nl> string ShardChunkManager : : toString ( ) const { <nl> StringBuilder ss ; <nl> - ss < < " ShardChunkManager version : " < < _version . toString ( ) < < " key : " < < _key ; <nl> + ss < < " ShardChunkManager version : " < < _version . toString ( ) < < " keyPattern : " < < _keyPattern ; <nl> bool first = true ; <nl> for ( RangeMap : : const_iterator i = _rangesMap . begin ( ) ; i ! = _rangesMap . end ( ) ; + + i ) { <nl> if ( first ) first = false ; <nl> mmm a / src / mongo / s / d_chunk_manager . h <nl> ppp b / src / mongo / s / d_chunk_manager . h <nl> namespace mongo { <nl> const ChunkVersion & version ) ; <nl> <nl> / * * <nl> - * Checks whether a document belongs to this shard . <nl> + * Checks whether a document key belongs to the collection on this shard . <nl> * <nl> - * @ param obj document containing sharding keys ( and , optionally , other attributes ) <nl> - * @ return true if shards hold the object <nl> - * / <nl> - bool belongsToMe ( const BSONObj & doc ) const ; <nl> - <nl> - / * * <nl> - * Checks whether the document currently pointed to by this cursor belongs to this shard . <nl> - * This version of the function will use a covered index if there is one in the cursor . <nl> + * Note that ! keyBelongsToMe ( ) does not necessarily imply the document is orphaned - it <nl> + * might be part of a migration . <nl> * <nl> - * @ param cc cursor pointing to an object <nl> - * @ return true if shards hold the object <nl> + * @ param key the full shard key <nl> * / <nl> - bool belongsToMe ( ClientCursor * cc ) const ; <nl> + bool keyBelongsToMe ( const BSONObj & key ) const ; <nl> <nl> / * * <nl> * Given a chunk ' s min key ( or empty doc ) , gets the boundary of the chunk following that one ( the first ) . <nl> namespace mongo { <nl> <nl> ChunkVersion getVersion ( ) const { return _version ; } <nl> ChunkVersion getCollVersion ( ) const { return _collVersion ; } <nl> - BSONObj getKey ( ) const { return _key . getOwned ( ) ; } <nl> + BSONObj getKeyPattern ( ) const { return _keyPattern . getOwned ( ) ; } <nl> unsigned getNumChunks ( ) const { return _chunksMap . size ( ) ; } <nl> <nl> string toString ( ) const ; <nl> private : <nl> void _init ( const string & configServer , const string & ns , const string & shardName , ShardChunkManagerPtr oldManager = ShardChunkManagerPtr ( ) ) ; <nl> <nl> - / * * <nl> - * @ same as belongsToMe but point is the extracted shard key <nl> - * / <nl> - bool _belongsToMe ( const BSONObj & point ) const ; <nl> - <nl> ChunkVersion _collVersion ; <nl> / / highest ChunkVersion for which this ShardChunkManager ' s information is accurate <nl> ChunkVersion _version ; <nl> <nl> / / key pattern for chunks under this range <nl> - BSONObj _key ; <nl> + BSONObj _keyPattern ; <nl> <nl> / / a map from a min key into the chunk ' s ( or range ' s ) max boundary <nl> typedef map < BSONObj , BSONObj , BSONObjCmp > RangeMap ; <nl> mmm a / src / mongo / s / d_migrate . cpp <nl> ppp b / src / mongo / s / d_migrate . cpp <nl> namespace mongo { <nl> <nl> ShardChunkManagerPtr chunkManager = shardingState . getShardChunkManager ( ns ) ; <nl> verify ( chunkManager ! = NULL ) ; <nl> - BSONObj shardKeyPattern = chunkManager - > getKey ( ) ; <nl> + BSONObj shardKeyPattern = chunkManager - > getKeyPattern ( ) ; <nl> if ( shardKeyPattern . isEmpty ( ) ) { <nl> errmsg = " no shard key found " ; <nl> return false ; <nl> mmm a / src / mongo / s / d_state . cpp <nl> ppp b / src / mongo / s / d_state . cpp <nl> namespace mongo { <nl> ShardChunkManagerPtr p ( ShardChunkManager : : make ( c , ns , _shardName , currManager ) ) ; <nl> <nl> / / Handle the case where the collection isn ' t sharded more gracefully <nl> - if ( p - > getKey ( ) . isEmpty ( ) ) { <nl> + if ( p - > getKeyPattern ( ) . isEmpty ( ) ) { <nl> version = ConfigVersion ( 0 , OID ( ) ) ; <nl> / / There was an error getting any data for this collection , return false <nl> return false ; <nl> mmm a / src / mongo / s / metadata_loader . cpp <nl> ppp b / src / mongo / s / metadata_loader . cpp <nl> namespace mongo { <nl> <nl> if ( ! collDoc . getKeyPattern ( ) . isEmpty ( ) ) { <nl> <nl> - manager - > _key = collDoc . getKeyPattern ( ) ; <nl> + manager - > _keyPattern = collDoc . getKeyPattern ( ) ; <nl> <nl> if ( ! initChunks ( collDoc , ns , shard , oldManager , manager , errMsg ) ) { <nl> return false ; <nl> namespace mongo { <nl> < < ns < < " is unsharded on this shard " < < endl ; <nl> } <nl> <nl> - manager - > _key = BSONObj ( ) ; <nl> + manager - > _keyPattern = BSONObj ( ) ; <nl> manager - > _maxShardVersion = ChunkVersion ( 1 , 0 , collDoc . getEpoch ( ) ) ; <nl> manager - > _maxCollVersion = manager - > _maxShardVersion ; <nl> } <nl> mmm a / src / mongo / s / metadata_loader_test . cpp <nl> ppp b / src / mongo / s / metadata_loader_test . cpp <nl> namespace { <nl> " shard0000 " , <nl> NULL , / * no old manager * / <nl> NULL / * no need for errMsg * / ) ; <nl> - ASSERT_TRUE ( manager - > getKey ( ) . equal ( BSON ( " a " < < 1 ) ) ) ; <nl> + ASSERT_TRUE ( manager - > getKeyPattern ( ) . equal ( BSON ( " a " < < 1 ) ) ) ; <nl> } <nl> <nl> TEST_F ( ConfigServerFixture , SingleChunkGetMaxCollVersion ) { <nl> | SERVER - 8598 push key expression evaluation out of ShardChunkManager | mongodb/mongo | 4cb7655e3da3466265ace5d6cf44c55b931f3d15 | 2013-06-04T20:44:06Z |
mmm a / src / ic / accessor - assembler . cc <nl> ppp b / src / ic / accessor - assembler . cc <nl> void AccessorAssembler : : GenericPropertyLoad ( Node * receiver , Node * receiver_map , <nl> Variable var_value ( this , MachineRepresentation : : kTagged ) ; <nl> <nl> / / Receivers requiring non - standard accesses ( interceptors , access <nl> - / / checks , string wrappers , proxies ) are handled in the runtime . <nl> - / / We special - case strings here , to support loading < Symbol . split > etc . <nl> - Variable var_receiver ( this , MachineRepresentation : : kTagged ) ; <nl> - Variable var_receiver_map ( this , MachineRepresentation : : kTagged ) ; <nl> - Variable var_instance_type ( this , MachineRepresentation : : kWord32 ) ; <nl> - var_receiver . Bind ( receiver ) ; <nl> - var_receiver_map . Bind ( receiver_map ) ; <nl> - var_instance_type . Bind ( instance_type ) ; <nl> - Label normal_receiver ( this ) ; <nl> - GotoIf ( Int32GreaterThan ( instance_type , <nl> - Int32Constant ( LAST_SPECIAL_RECEIVER_TYPE ) ) , <nl> - & normal_receiver ) ; <nl> - GotoIf ( Int32GreaterThanOrEqual ( instance_type , <nl> - Int32Constant ( FIRST_NONSTRING_TYPE ) ) , <nl> + / / checks , strings and string wrappers , proxies ) are handled in the runtime . <nl> + GotoIf ( Int32LessThanOrEqual ( instance_type , <nl> + Int32Constant ( LAST_SPECIAL_RECEIVER_TYPE ) ) , <nl> slow ) ; <nl> - CSA_ASSERT ( this , WordEqual ( LoadMapConstructorFunctionIndex ( receiver_map ) , <nl> - IntPtrConstant ( Context : : STRING_FUNCTION_INDEX ) ) ) ; <nl> - Node * native_context = LoadNativeContext ( p - > context ) ; <nl> - Node * constructor_function = <nl> - LoadContextElement ( native_context , Context : : STRING_FUNCTION_INDEX ) ; <nl> - Node * initial_map = LoadObjectField ( constructor_function , <nl> - JSFunction : : kPrototypeOrInitialMapOffset ) ; <nl> - var_receiver . Bind ( LoadMapPrototype ( initial_map ) ) ; <nl> - var_receiver_map . Bind ( LoadMap ( var_receiver . value ( ) ) ) ; <nl> - var_instance_type . Bind ( LoadMapInstanceType ( var_receiver_map . value ( ) ) ) ; <nl> - Goto ( & normal_receiver ) ; <nl> - <nl> - Bind ( & normal_receiver ) ; <nl> - receiver = var_receiver . value ( ) ; <nl> - receiver_map = var_receiver_map . value ( ) ; <nl> - instance_type = var_instance_type . value ( ) ; <nl> + <nl> / / Check if the receiver has fast or slow properties . <nl> Node * properties = LoadProperties ( receiver ) ; <nl> Node * properties_map = LoadMap ( properties ) ; <nl> void AccessorAssembler : : GenericPropertyLoad ( Node * receiver , Node * receiver_map , <nl> / / See also TryLookupProperty ( ) which has the same limitation . <nl> const int32_t kMaxLinear = 210 ; <nl> Label stub_cache ( this ) ; <nl> - Node * bitfield3 = LoadMapBitField3 ( var_receiver_map . value ( ) ) ; <nl> + Node * bitfield3 = LoadMapBitField3 ( receiver_map ) ; <nl> Node * nof = DecodeWordFromWord32 < Map : : NumberOfOwnDescriptorsBits > ( bitfield3 ) ; <nl> GotoIf ( UintPtrLessThan ( IntPtrConstant ( kMaxLinear ) , nof ) , & stub_cache ) ; <nl> - Node * descriptors = LoadMapDescriptors ( var_receiver_map . value ( ) ) ; <nl> + Node * descriptors = LoadMapDescriptors ( receiver_map ) ; <nl> Variable var_name_index ( this , MachineType : : PointerRepresentation ( ) ) ; <nl> Label if_descriptor_found ( this ) ; <nl> DescriptorLookupLinear ( key , descriptors , nof , & if_descriptor_found , <nl> void AccessorAssembler : : GenericPropertyLoad ( Node * receiver , Node * receiver_map , <nl> <nl> Bind ( & if_descriptor_found ) ; <nl> { <nl> - LoadPropertyFromFastObject ( receiver , var_receiver_map . value ( ) , descriptors , <nl> + LoadPropertyFromFastObject ( receiver , receiver_map , descriptors , <nl> var_name_index . value ( ) , & var_details , <nl> & var_value ) ; <nl> Goto ( & if_found_on_receiver ) ; <nl> void AccessorAssembler : : GenericPropertyLoad ( Node * receiver , Node * receiver_map , <nl> Bind ( & if_found_on_receiver ) ; <nl> { <nl> Node * value = CallGetterIfAccessor ( var_value . value ( ) , var_details . value ( ) , <nl> - p - > context , var_receiver . value ( ) , slow ) ; <nl> + p - > context , receiver , slow ) ; <nl> IncrementCounter ( isolate ( ) - > counters ( ) - > ic_keyed_load_generic_symbol ( ) , 1 ) ; <nl> Return ( value ) ; <nl> } <nl> void AccessorAssembler : : GenericPropertyLoad ( Node * receiver , Node * receiver_map , <nl> Variable * merged_variables [ ] = { & var_holder_map , & var_holder_instance_type } ; <nl> Label loop ( this , arraysize ( merged_variables ) , merged_variables ) ; <nl> <nl> - var_holder_map . Bind ( var_receiver_map . value ( ) ) ; <nl> - var_holder_instance_type . Bind ( var_instance_type . value ( ) ) ; <nl> + var_holder_map . Bind ( receiver_map ) ; <nl> + var_holder_instance_type . Bind ( instance_type ) ; <nl> / / Private symbols must not be looked up on the prototype chain . <nl> GotoIf ( IsPrivateSymbol ( key ) , & return_undefined ) ; <nl> Goto ( & loop ) ; <nl> void AccessorAssembler : : GenericPropertyLoad ( Node * receiver , Node * receiver_map , <nl> var_holder_map . Bind ( proto_map ) ; <nl> var_holder_instance_type . Bind ( proto_instance_type ) ; <nl> Label next_proto ( this ) , return_value ( this , & var_value ) , goto_slow ( this ) ; <nl> - TryGetOwnProperty ( p - > context , var_receiver . value ( ) , proto , proto_map , <nl> + TryGetOwnProperty ( p - > context , receiver , proto , proto_map , <nl> proto_instance_type , key , & return_value , & var_value , <nl> & next_proto , & goto_slow ) ; <nl> <nl> | Revert " [ stubs ] KeyedLoadGeneric : support loading properties from strings " | v8/v8 | a5298c6f525cb90c46e8f16966e162f38117c012 | 2017-02-07T01:05:42Z |
mmm a / ChangeLog <nl> ppp b / ChangeLog <nl> <nl> + 2010 - 01 - 10 : Version 3 . 0 . 7 <nl> + <nl> + Stopped calling inherited setters when creating object literals <nl> + ( issue 1015 ) . <nl> + <nl> + Changed interpretation of malformed \ c ? escapes in RegExp to match <nl> + JSC . <nl> + <nl> + Enhanced the command - line debugger interface and fixed some minor <nl> + bugs in the debugger . <nl> + <nl> + Performance improvements on the IA32 platform . <nl> + <nl> + <nl> 2010 - 01 - 05 : Version 3 . 0 . 6 <nl> <nl> - Allowed getters and setters on JSArray elements ( Issue 900 ) . <nl> + Allowed getters and setters on JSArray elements ( issue 900 ) . <nl> <nl> - Stopped JSON objects from hitting inherited setters ( part of <nl> - Issue 1015 ) . <nl> + Stopped JSON objects from hitting inherited setters ( part of <nl> + issue 1015 ) . <nl> <nl> Allowed numbers and strings as names of getters / setters in object <nl> - initializer ( Issue 820 ) . <nl> + initializer ( issue 820 ) . <nl> <nl> Added use_system_v8 option to gyp ( off by default ) , to make it easier <nl> for Linux distributions to ship with system - provided V8 library . <nl> <nl> - Exported external array data accessors ( Issue 1016 ) . <nl> + Exported external array data accessors ( issue 1016 ) . <nl> <nl> Added labelled thread names to help with debugging ( on Linux ) . <nl> <nl> <nl> Allowed forcing the use of a simulator from the build script <nl> independently of the host architecture . <nl> <nl> - Fixed FreeBSD port ( Issue 912 ) . <nl> + Fixed FreeBSD port ( issue 912 ) . <nl> <nl> Made windows - tick - processor respect D8_PATH . <nl> <nl> mmm a / src / version . cc <nl> ppp b / src / version . cc <nl> <nl> / / cannot be changed without changing the SCons build script . <nl> # define MAJOR_VERSION 3 <nl> # define MINOR_VERSION 0 <nl> - # define BUILD_NUMBER 7 <nl> + # define BUILD_NUMBER 8 <nl> # define PATCH_LEVEL 0 <nl> # define CANDIDATE_VERSION true <nl> <nl> | Prepare push to trunk . Now working on version 3 . 0 . 8 . | v8/v8 | 3d4395bab43f012cb4696e4dd6f93b7dd9f6bac0 | 2011-01-10T08:05:34Z |
mmm a / dbms / src / Interpreters / InterpreterSelectQuery . cpp <nl> ppp b / dbms / src / Interpreters / InterpreterSelectQuery . cpp <nl> void InterpreterSelectQuery : : basicInit ( BlockInputStreamPtr input_ , const NamesAn <nl> if ( query . table & & typeid_cast < ASTSelectQuery * > ( & * query . table ) ) <nl> { <nl> if ( table_column_names . empty ( ) ) <nl> - context . setColumns ( InterpreterSelectQuery ( query . table , context , to_stage , subquery_depth ) . getSampleBlock ( ) . getColumnsList ( ) ) ; <nl> + { <nl> + / / / Оптимизация : мы считаем , что запрос содержит только один SELECT , даже если это может быть <nl> + / / / в самом деле цепочка UNION ALL . Первый запрос достаточен для определения нужных столбцов . <nl> + context . setColumns ( InterpreterSelectQuery ( query . table , context , to_stage , subquery_depth , nullptr , false ) . getSampleBlock ( ) . getColumnsList ( ) ) ; <nl> + } <nl> } <nl> else <nl> { <nl> void InterpreterSelectQuery : : basicInit ( BlockInputStreamPtr input_ , const NamesAn <nl> if ( input_ ) <nl> streams . push_back ( input_ ) ; <nl> <nl> - / / / Создаем цепочку запросов SELECT и проверяем , что результаты всех запросов SELECT cовместимые . <nl> - / / / NOTE Мы можем безопасно применить static_cast вместо typeid_cast , <nl> - / / / потому что знаем , что в цепочке UNION ALL имеются только деревья типа SELECT . <nl> - InterpreterSelectQuery * interpreter = this ; <nl> - Block first = interpreter - > getSampleBlock ( ) ; <nl> - for ( ASTPtr tree = query . next_union_all ; ! tree . isNull ( ) ; tree = ( static_cast < ASTSelectQuery & > ( * tree ) ) . next_union_all ) <nl> + if ( isFirstSelectInsideUnionAll ( ) ) <nl> { <nl> - interpreter - > next_select_in_union_all . reset ( new InterpreterSelectQuery ( tree , context , to_stage , subquery_depth , nullptr , false ) ) ; <nl> - interpreter = interpreter - > next_select_in_union_all . get ( ) ; <nl> - Block current = interpreter - > getSampleBlock ( ) ; <nl> - if ( ! blocksHaveEqualStructure ( first , current ) ) <nl> - throw Exception ( " Result structures mismatch in the SELECT queries of the UNION ALL chain . Found result structure : \ n \ n " + current . dumpStructure ( ) <nl> - + " \ n \ nwhile expecting : \ n \ n " + first . dumpStructure ( ) + " \ n \ ninstead " , <nl> - ErrorCodes : : UNION_ALL_RESULT_STRUCTURES_MISMATCH ) ; <nl> + / / / Создаем цепочку запросов SELECT и проверяем , что результаты всех запросов SELECT cовместимые . <nl> + / / / NOTE Мы можем безопасно применить static_cast вместо typeid_cast , <nl> + / / / потому что знаем , что в цепочке UNION ALL имеются только деревья типа SELECT . <nl> + InterpreterSelectQuery * interpreter = this ; <nl> + Block first = interpreter - > getSampleBlock ( ) ; <nl> + for ( ASTPtr tree = query . next_union_all ; ! tree . isNull ( ) ; tree = ( static_cast < ASTSelectQuery & > ( * tree ) ) . next_union_all ) <nl> + { <nl> + interpreter - > next_select_in_union_all . reset ( new InterpreterSelectQuery ( tree , context , to_stage , subquery_depth , nullptr , false ) ) ; <nl> + interpreter = interpreter - > next_select_in_union_all . get ( ) ; <nl> + Block current = interpreter - > getSampleBlock ( ) ; <nl> + if ( ! blocksHaveEqualStructure ( first , current ) ) <nl> + throw Exception ( " Result structures mismatch in the SELECT queries of the UNION ALL chain . Found result structure : \ n \ n " + current . dumpStructure ( ) <nl> + + " \ n \ nwhile expecting : \ n \ n " + first . dumpStructure ( ) + " \ n \ ninstead " , <nl> + ErrorCodes : : UNION_ALL_RESULT_STRUCTURES_MISMATCH ) ; <nl> + } <nl> } <nl> } <nl> <nl> bool InterpreterSelectQuery : : hasAsterisk ( ) const <nl> if ( query . hasAsterisk ( ) ) <nl> return true ; <nl> <nl> - for ( IAST * tree = query . next_union_all . get ( ) ; tree ! = nullptr ; tree = static_cast < ASTSelectQuery * > ( tree ) - > next_union_all . get ( ) ) <nl> - { <nl> - const auto & next_query = static_cast < ASTSelectQuery & > ( * tree ) ; <nl> - if ( next_query . hasAsterisk ( ) ) <nl> - return true ; <nl> - } <nl> + if ( isFirstSelectInsideUnionAll ( ) ) <nl> + for ( IAST * tree = query . next_union_all . get ( ) ; tree ! = nullptr ; tree = static_cast < ASTSelectQuery * > ( tree ) - > next_union_all . get ( ) ) <nl> + { <nl> + const auto & next_query = static_cast < ASTSelectQuery & > ( * tree ) ; <nl> + if ( next_query . hasAsterisk ( ) ) <nl> + return true ; <nl> + } <nl> + <nl> return false ; <nl> } <nl> <nl> void InterpreterSelectQuery : : renameColumns ( ) <nl> { <nl> - for ( IAST * tree = query . next_union_all . get ( ) ; tree ! = nullptr ; tree = static_cast < ASTSelectQuery * > ( tree ) - > next_union_all . get ( ) ) <nl> - { <nl> - auto & ast = static_cast < ASTSelectQuery & > ( * tree ) ; <nl> - ast . renameColumns ( query ) ; <nl> - } <nl> + if ( isFirstSelectInsideUnionAll ( ) ) <nl> + for ( IAST * tree = query . next_union_all . get ( ) ; tree ! = nullptr ; tree = static_cast < ASTSelectQuery * > ( tree ) - > next_union_all . get ( ) ) <nl> + { <nl> + auto & ast = static_cast < ASTSelectQuery & > ( * tree ) ; <nl> + ast . renameColumns ( query ) ; <nl> + } <nl> } <nl> <nl> void InterpreterSelectQuery : : rewriteExpressionList ( const Names & required_column_names ) <nl> void InterpreterSelectQuery : : rewriteExpressionList ( const Names & required_column <nl> if ( query . distinct ) <nl> return ; <nl> <nl> - for ( IAST * tree = query . next_union_all . get ( ) ; tree ! = nullptr ; tree = static_cast < ASTSelectQuery * > ( tree ) - > next_union_all . get ( ) ) <nl> - { <nl> - auto & next_query = static_cast < ASTSelectQuery & > ( * tree ) ; <nl> - if ( next_query . distinct ) <nl> - return ; <nl> - } <nl> + if ( isFirstSelectInsideUnionAll ( ) ) <nl> + for ( IAST * tree = query . next_union_all . get ( ) ; tree ! = nullptr ; tree = static_cast < ASTSelectQuery * > ( tree ) - > next_union_all . get ( ) ) <nl> + { <nl> + auto & next_query = static_cast < ASTSelectQuery & > ( * tree ) ; <nl> + if ( next_query . distinct ) <nl> + return ; <nl> + } <nl> <nl> query . rewriteSelectExpressionList ( required_column_names ) ; <nl> + <nl> + if ( isFirstSelectInsideUnionAll ( ) ) <nl> for ( IAST * tree = query . next_union_all . get ( ) ; tree ! = nullptr ; tree = static_cast < ASTSelectQuery * > ( tree ) - > next_union_all . get ( ) ) <nl> - { <nl> - auto & next_query = static_cast < ASTSelectQuery & > ( * tree ) ; <nl> - next_query . rewriteSelectExpressionList ( required_column_names ) ; <nl> - } <nl> + { <nl> + auto & next_query = static_cast < ASTSelectQuery & > ( * tree ) ; <nl> + next_query . rewriteSelectExpressionList ( required_column_names ) ; <nl> + } <nl> } <nl> <nl> bool InterpreterSelectQuery : : isFirstSelectInsideUnionAll ( ) const <nl> | dbms : Server : more optimizations . [ # METR - 14099 ] | ClickHouse/ClickHouse | d52d26dcb54710a01df42636a8ea4115b602a21b | 2014-12-25T15:27:03Z |
mmm a / python / pyphantomjs / webpage . py <nl> ppp b / python / pyphantomjs / webpage . py <nl> def mainFrame ( self ) : <nl> return self . m_mainFrame <nl> <nl> def renderImage ( self ) : <nl> - frameRect = QRect ( QPoint ( 0 , 0 ) , self . m_mainFrame . contentsSize ( ) ) <nl> + viewportSize = self . m_webPage . viewportSize ( ) <nl> + frameRect = QRect ( QPoint ( 0 , 0 ) , viewportSize ) <nl> if not self . m_clipRect . isEmpty ( ) : <nl> frameRect = self . m_clipRect <nl> <nl> - viewportSize = self . m_webPage . viewportSize ( ) <nl> - self . m_webPage . setViewportSize ( self . m_mainFrame . contentsSize ( ) ) <nl> + if self . m_webPage . pageScroll : <nl> + self . m_webPage . mainFrame ( ) . \ <nl> + setScrollPosition ( QPoint ( self . m_webPage . pageScroll . x ( ) , <nl> + self . m_webPage . pageScroll . y ( ) ) ) <nl> <nl> image = QImage ( frameRect . size ( ) , QImage . Format_ARGB32 ) <nl> image . fill ( qRgba ( 255 , 255 , 255 , 0 ) ) <nl> def viewportSize ( self , size ) : <nl> <nl> self . m_webPage . setViewportSize ( QSize ( sizes [ ' width ' ] , sizes [ ' height ' ] ) ) <nl> <nl> + @ pyqtProperty ( ' QVariantMap ' ) <nl> + def pageScroll ( self ) : <nl> + scroll = self . m_webPage . pageScroll <nl> + result = { <nl> + ' left ' : scroll . x ( ) , <nl> + ' top ' : scroll . y ( ) <nl> + } <nl> + return result <nl> + <nl> + @ pageScroll . setter <nl> + def pageScroll ( self , size ) : <nl> + names = ( ' left ' , ' top ' ) <nl> + for item in names : <nl> + try : <nl> + globals ( ) [ item ] = int ( size [ item ] ) <nl> + if globals ( ) [ item ] < 0 : <nl> + globals ( ) [ item ] = 0 <nl> + except KeyError : <nl> + globals ( ) [ item ] = getattr ( self . m_webPage . pageScroll ( ) , item ) ( ) <nl> + self . m_webPage . pageScroll = QPoint ( left , top ) <nl> + <nl> do_action ( ' WebPage ' ) <nl> + <nl> + <nl> | Added pageScroll property to webPage for javascript and changed renderImage code to render webpage considering scroll | ariya/phantomjs | 983518d91388d02a0b3799358f24fa0468dd1977 | 2011-07-08T08:59:10Z |
mmm a / xbmc / epg / EpgContainer . cpp <nl> ppp b / xbmc / epg / EpgContainer . cpp <nl> void CEpgContainer : : LoadFromDB ( void ) <nl> unsigned int iCounter ( 0 ) ; <nl> if ( m_database . IsOpen ( ) ) <nl> { <nl> - ShowProgressDialog ( false ) ; <nl> + { <nl> + / * unlock m_critSection before calling ShowProgressDialog ( ) - <nl> + this is not legal , but works around a deadlock bug ( because <nl> + ShowProgressDialog ( ) calls functions which lock <nl> + g_graphicsContext ) ; note that ShowProgressDialog ( ) is <nl> + sometimes called with m_critSection locked and sometimes <nl> + without ; this is a major bug that must be addressed <nl> + eventually * / <nl> + CSingleExit exit ( m_critSection ) ; <nl> + ShowProgressDialog ( false ) ; <nl> + } <nl> <nl> m_database . DeleteOldEpgEntries ( ) ; <nl> m_database . Get ( * this ) ; <nl> mmm a / xbmc / guilib / GUIWindowManager . cpp <nl> ppp b / xbmc / guilib / GUIWindowManager . cpp <nl> CGUIWindow * CGUIWindowManager : : GetWindow ( int id ) const <nl> CGUIWindow * window ; <nl> if ( id = = 0 | | id = = WINDOW_INVALID ) <nl> return NULL ; <nl> + <nl> + CSingleLock lock ( g_graphicsContext ) ; <nl> + <nl> window = m_idCache . Get ( id ) ; <nl> if ( window ) <nl> return window ; <nl> <nl> - CSingleLock lock ( g_graphicsContext ) ; <nl> WindowMap : : const_iterator it = m_mapWindows . find ( id ) ; <nl> if ( it ! = m_mapWindows . end ( ) ) <nl> window = ( * it ) . second ; <nl> mmm a / xbmc / pvr / PVRGUIInfo . cpp <nl> ppp b / xbmc / pvr / PVRGUIInfo . cpp <nl> void CPVRGUIInfo : : Notify ( const Observable & obs , const ObservableMessage msg ) <nl> <nl> void CPVRGUIInfo : : ShowPlayerInfo ( int iTimeout ) <nl> { <nl> - CSingleLock lock ( m_critSection ) ; <nl> + { <nl> + CSingleLock lock ( m_critSection ) ; <nl> <nl> - if ( iTimeout > 0 ) <nl> - m_ToggleShowInfo . Set ( iTimeout * 1000 ) ; <nl> + if ( iTimeout > 0 ) <nl> + m_ToggleShowInfo . Set ( iTimeout * 1000 ) ; <nl> + } <nl> <nl> g_infoManager . SetShowInfo ( true ) ; <nl> } <nl> void CPVRGUIInfo : : ToggleShowInfo ( void ) <nl> if ( m_ToggleShowInfo . IsTimePast ( ) ) <nl> { <nl> m_ToggleShowInfo . SetInfinite ( ) ; <nl> + <nl> + / * release our lock while calling into global objects ( which have <nl> + their own locks ) to avoid deadlocks * / <nl> + lock . Leave ( ) ; <nl> + <nl> g_infoManager . SetShowInfo ( false ) ; <nl> g_PVRManager . UpdateCurrentChannel ( ) ; <nl> } <nl> else if ( ! g_infoManager . GetShowInfo ( ) ) / / channel infos ( no longer ) displayed ? <nl> { <nl> + / * release our lock while calling into global objects ( which have <nl> + their own locks ) to avoid deadlocks * / <nl> + lock . Leave ( ) ; <nl> + <nl> g_PVRManager . UpdateCurrentChannel ( ) ; <nl> } <nl> } <nl> mmm a / xbmc / pvr / PVRManager . cpp <nl> ppp b / xbmc / pvr / PVRManager . cpp <nl> using namespace KODI : : MESSAGING ; <nl> <nl> using KODI : : MESSAGING : : HELPERS : : DialogResponse ; <nl> <nl> - int CPVRManager : : m_pvrWindowIds [ 12 ] = { <nl> + const int CPVRManager : : m_pvrWindowIds [ 12 ] = { <nl> WINDOW_TV_CHANNELS , <nl> WINDOW_TV_GUIDE , <nl> WINDOW_TV_RECORDINGS , <nl> bool CPVRManager : : UpgradeOutdatedAddons ( void ) <nl> auto outdatedAddons = m_outdatedAddons ; <nl> / / stop threads and unload <nl> SetState ( ManagerStateInterrupted ) ; <nl> - g_EpgContainer . Stop ( ) ; <nl> + <nl> + { <nl> + CSingleExit exit ( m_critSection ) ; <nl> + g_EpgContainer . Stop ( ) ; <nl> + } <nl> + <nl> m_guiInfo - > Stop ( ) ; <nl> m_addons - > Stop ( ) ; <nl> Cleanup ( ) ; <nl> bool CPVRManager : : UpgradeOutdatedAddons ( void ) <nl> if ( IsInitialising ( ) ) <nl> { <nl> SetState ( ManagerStateStarted ) ; <nl> - g_EpgContainer . Start ( true ) ; <nl> + <nl> + { <nl> + CSingleExit exit ( m_critSection ) ; <nl> + g_EpgContainer . Start ( true ) ; <nl> + } <nl> <nl> CLog : : Log ( LOGDEBUG , " PVRManager - % s - restarted " , __FUNCTION__ ) ; <nl> return true ; <nl> void CPVRManager : : Cleanup ( void ) <nl> m_pendingUpdates . clear ( ) ; <nl> <nl> / * unregister application action listener * / <nl> - g_application . UnregisterActionListener ( & CPVRActionListener : : GetInstance ( ) ) ; <nl> + { <nl> + CSingleExit exit ( m_critSection ) ; <nl> + g_application . UnregisterActionListener ( & CPVRActionListener : : GetInstance ( ) ) ; <nl> + } <nl> <nl> HideProgressDialog ( ) ; <nl> <nl> void CPVRManager : : Start ( bool bAsync / * = false * / ) <nl> m_database - > Open ( ) ; <nl> <nl> / * register application action listener * / <nl> - g_application . RegisterActionListener ( & CPVRActionListener : : GetInstance ( ) ) ; <nl> + { <nl> + CSingleExit exit ( m_critSection ) ; <nl> + g_application . RegisterActionListener ( & CPVRActionListener : : GetInstance ( ) ) ; <nl> + } <nl> <nl> / * create the supervisor thread to do all background activities * / <nl> StartUpdateThreads ( ) ; <nl> bool CPVRManager : : Load ( void ) <nl> / * reset observer for pvr windows * / <nl> for ( std : : size_t i = 0 ; i ! = ARRAY_SIZE ( m_pvrWindowIds ) ; i + + ) <nl> { <nl> + CSingleExit exit ( m_critSection ) ; <nl> CGUIWindowPVRBase * pWindow = ( CGUIWindowPVRBase * ) g_windowManager . GetWindow ( m_pvrWindowIds [ i ] ) ; <nl> if ( pWindow ) <nl> pWindow - > ResetObservers ( ) ; <nl> mmm a / xbmc / pvr / PVRManager . h <nl> ppp b / xbmc / pvr / PVRManager . h <nl> namespace PVR <nl> ManagerState m_managerState ; <nl> CStopWatch * m_parentalTimer ; <nl> std : : vector < std : : string > m_outdatedAddons ; <nl> - static int m_pvrWindowIds [ 12 ] ; <nl> + static const int m_pvrWindowIds [ 12 ] ; <nl> } ; <nl> <nl> class CPVREpgsCreateJob : public CJob <nl> | Merge pull request from FernetMenta / 8635 | xbmc/xbmc | bdaaba9b979bddd99525b21f5fba2bca71bb3035 | 2015-12-23T21:22:22Z |
mmm a / src / heap / collection - barrier . cc <nl> ppp b / src / heap / collection - barrier . cc <nl> void CollectionBarrier : : ResumeThreadsAwaitingCollection ( ) { <nl> <nl> void CollectionBarrier : : ShutdownRequested ( ) { <nl> base : : MutexGuard guard ( & mutex_ ) ; <nl> + time_to_collection_scope_ . reset ( ) ; <nl> state_ . store ( RequestState : : kShutdown ) ; <nl> cond_ . NotifyAll ( ) ; <nl> } <nl> | [ heap ] Reset time_to_collection_scope_ on TearDown | v8/v8 | 41b5c8d0526334fa8d62e14b1bfa1a1e887f44ff | 2020-10-14T15:22:39Z |
mmm a / src / smtp . cpp <nl> ppp b / src / smtp . cpp <nl> <nl> # include < QNetworkInterface > <nl> # include < QCryptographicHash > <nl> <nl> + namespace { <nl> const short DEFAULT_PORT = 25 ; <nl> const short DEFAULT_PORT_SSL = 465 ; <nl> <nl> QByteArray hmacMD5 ( QByteArray key , const QByteArray & msg ) <nl> return QCryptographicHash : : hash ( total , QCryptographicHash : : Md5 ) ; <nl> } <nl> <nl> + QByteArray determineLocalAddress ( ) <nl> + { <nl> + QByteArray address = " 127 . 0 . 0 . 1 " ; <nl> + foreach ( const QHostAddress & addr , QNetworkInterface : : allAddresses ( ) ) { <nl> + if ( addr = = QHostAddress : : LocalHost | | addr = = QHostAddress : : LocalHostIPv6 ) <nl> + continue ; <nl> + address = addr . toString ( ) . toLatin1 ( ) ; <nl> + break ; <nl> + } <nl> + <nl> + return address ; <nl> + } <nl> + } / / namespace <nl> + <nl> Smtp : : Smtp ( QObject * parent ) : QObject ( parent ) , <nl> state ( Init ) , use_ssl ( false ) { <nl> # ifndef QT_NO_OPENSSL <nl> QByteArray Smtp : : encode_mime_header ( const QString & key , const QString & value , QT <nl> <nl> void Smtp : : ehlo ( ) <nl> { <nl> - QByteArray address = " 127 . 0 . 0 . 1 " ; <nl> - foreach ( const QHostAddress & addr , QNetworkInterface : : allAddresses ( ) ) <nl> - { <nl> - if ( addr = = QHostAddress : : LocalHost | | addr = = QHostAddress : : LocalHostIPv6 ) <nl> - continue ; <nl> - address = addr . toString ( ) . toLatin1 ( ) ; <nl> - break ; <nl> - } <nl> - / / Send EHLO <nl> - socket - > write ( " ehlo " + address + " \ r \ n " ) ; <nl> + QByteArray address = determineLocalAddress ( ) ; <nl> + socket - > write ( " ehlo " + address + " \ r \ n " ) ; <nl> socket - > flush ( ) ; <nl> state = EhloSent ; <nl> } <nl> <nl> + void Smtp : : helo ( ) <nl> + { <nl> + QByteArray address = determineLocalAddress ( ) ; <nl> + socket - > write ( " helo " + address + " \ r \ n " ) ; <nl> + socket - > flush ( ) ; <nl> + state = HeloSent ; <nl> + } <nl> + <nl> void Smtp : : parseEhloResponse ( const QByteArray & code , bool continued , const QString & line ) <nl> { <nl> if ( code ! = " 250 " ) { <nl> void Smtp : : parseEhloResponse ( const QByteArray & code , bool continued , const QStri <nl> if ( state = = EhloSent ) { <nl> / / try to send HELO instead of EHLO <nl> qDebug ( ) < < " EHLO failed , trying HELO instead . . . " ; <nl> - socket - > write ( " helo \ r \ n " ) ; <nl> - socket - > flush ( ) ; <nl> - state = HeloSent ; <nl> + helo ( ) ; <nl> } else { <nl> / / Both EHLO and HELO failed , chances are this is NOT <nl> / / a SMTP server <nl> mmm a / src / smtp . h <nl> ppp b / src / smtp . h <nl> private slots : <nl> private : <nl> QByteArray encode_mime_header ( const QString & key , const QString & value , QTextCodec * latin1 , const QByteArray & prefix = QByteArray ( ) ) ; <nl> void ehlo ( ) ; <nl> + void helo ( ) ; <nl> void parseEhloResponse ( const QByteArray & code , bool continued , const QString & line ) ; <nl> void authenticate ( ) ; <nl> void startTLS ( ) ; <nl> | add host address parameter to helo smtp request | qbittorrent/qBittorrent | a7ad34418f389f1c8f540f107977672e57c62bb3 | 2014-08-25T10:26:30Z |
mmm a / lib / SIL / Parser / ParseSIL . cpp <nl> ppp b / lib / SIL / Parser / ParseSIL . cpp <nl> namespace { <nl> / / / A callback to be invoked every time a type was deserialized . <nl> std : : function < void ( Type ) > ParsedTypeCallback ; <nl> <nl> - bool performTypeLocChecking ( TypeLoc & T , bool IsSILType , <nl> + Type performTypeLocChecking ( TypeRepr * TyR , bool IsSILType , <nl> GenericEnvironment * GenericEnv = nullptr ) ; <nl> <nl> void convertRequirements ( SILFunction * F , ArrayRef < RequirementRepr > From , <nl> void SILParser : : convertRequirements ( SILFunction * F , <nl> IdentTypeReprLookup PerformLookup ( P ) ; <nl> / / Use parser lexical scopes to resolve references <nl> / / to the generic parameters . <nl> - auto ResolveToInterfaceType = [ & ] ( TypeLoc Ty ) - > Type { <nl> - Ty . getTypeRepr ( ) - > walk ( PerformLookup ) ; <nl> - performTypeLocChecking ( Ty , / * IsSIL * / false ) ; <nl> - assert ( Ty . getType ( ) ) ; <nl> - return Ty . getType ( ) - > mapTypeOutOfContext ( ) ; <nl> + auto ResolveToInterfaceType = [ & ] ( TypeRepr * Ty ) - > Type { <nl> + Ty - > walk ( PerformLookup ) ; <nl> + return performTypeLocChecking ( Ty , / * IsSIL * / false ) - > mapTypeOutOfContext ( ) ; <nl> } ; <nl> <nl> for ( auto & Req : From ) { <nl> if ( Req . getKind ( ) = = RequirementReprKind : : SameType ) { <nl> - auto FirstType = ResolveToInterfaceType ( Req . getFirstTypeLoc ( ) ) ; <nl> - auto SecondType = ResolveToInterfaceType ( Req . getSecondTypeLoc ( ) ) ; <nl> + auto FirstType = ResolveToInterfaceType ( Req . getFirstTypeRepr ( ) ) ; <nl> + auto SecondType = ResolveToInterfaceType ( Req . getSecondTypeRepr ( ) ) ; <nl> Requirement ConvertedRequirement ( RequirementKind : : SameType , FirstType , <nl> SecondType ) ; <nl> To . push_back ( ConvertedRequirement ) ; <nl> void SILParser : : convertRequirements ( SILFunction * F , <nl> } <nl> <nl> if ( Req . getKind ( ) = = RequirementReprKind : : TypeConstraint ) { <nl> - auto Subject = ResolveToInterfaceType ( Req . getSubjectLoc ( ) ) ; <nl> - auto Constraint = ResolveToInterfaceType ( Req . getConstraintLoc ( ) ) ; <nl> + auto Subject = ResolveToInterfaceType ( Req . getSubjectRepr ( ) ) ; <nl> + auto Constraint = ResolveToInterfaceType ( Req . getConstraintRepr ( ) ) ; <nl> Requirement ConvertedRequirement ( RequirementKind : : Conformance , Subject , <nl> Constraint ) ; <nl> To . push_back ( ConvertedRequirement ) ; <nl> void SILParser : : convertRequirements ( SILFunction * F , <nl> } <nl> <nl> if ( Req . getKind ( ) = = RequirementReprKind : : LayoutConstraint ) { <nl> - auto Subject = ResolveToInterfaceType ( Req . getSubjectLoc ( ) ) ; <nl> + auto Subject = ResolveToInterfaceType ( Req . getSubjectRepr ( ) ) ; <nl> Requirement ConvertedRequirement ( RequirementKind : : Layout , Subject , <nl> Req . getLayoutConstraint ( ) ) ; <nl> To . push_back ( ConvertedRequirement ) ; <nl> static bool parseDeclSILOptional ( bool * isTransparent , <nl> return false ; <nl> } <nl> <nl> - bool SILParser : : performTypeLocChecking ( TypeLoc & T , bool IsSILType , <nl> + Type SILParser : : performTypeLocChecking ( TypeRepr * T , bool IsSILType , <nl> GenericEnvironment * GenericEnv ) { <nl> if ( GenericEnv = = nullptr ) <nl> GenericEnv = ContextGenericEnv ; <nl> <nl> - return swift : : performTypeLocChecking ( P . Context , T , <nl> + TypeLoc loc ( T ) ; <nl> + ( void ) swift : : performTypeLocChecking ( P . Context , loc , <nl> / * isSILMode = * / true , IsSILType , <nl> GenericEnv , & P . SF ) ; <nl> + return loc . getType ( ) ; <nl> } <nl> <nl> / / / Find the top - level ValueDecl or Module given a name . <nl> static ValueDecl * lookupMember ( Parser & P , Type Ty , DeclBaseName Name , <nl> bool SILParser : : parseASTType ( CanType & result , GenericEnvironment * env ) { <nl> ParserResult < TypeRepr > parsedType = P . parseType ( ) ; <nl> if ( parsedType . isNull ( ) ) return true ; <nl> - TypeLoc loc = parsedType . get ( ) ; <nl> - if ( performTypeLocChecking ( loc , / * IsSILType = * / false , env ) ) <nl> + auto resolvedType = performTypeLocChecking ( parsedType . get ( ) , / * IsSILType = * / false , env ) ; <nl> + if ( resolvedType - > hasError ( ) ) <nl> return true ; <nl> <nl> if ( env ) <nl> - result = loc . getType ( ) - > mapTypeOutOfContext ( ) - > getCanonicalType ( ) ; <nl> + result = resolvedType - > mapTypeOutOfContext ( ) - > getCanonicalType ( ) ; <nl> else <nl> - result = loc . getType ( ) - > getCanonicalType ( ) ; <nl> + result = resolvedType - > getCanonicalType ( ) ; <nl> <nl> / / Invoke the callback on the parsed type . <nl> - ParsedTypeCallback ( loc . getType ( ) ) ; <nl> + ParsedTypeCallback ( resolvedType ) ; <nl> return false ; <nl> } <nl> <nl> bool SILParser : : parseSILType ( SILType & Result , <nl> ParsedGenericEnv = env ; <nl> <nl> / / Apply attributes to the type . <nl> - TypeLoc Ty = P . applyAttributeToType ( TyR . get ( ) , attrs , specifier , specifierLoc ) ; <nl> - <nl> - if ( performTypeLocChecking ( Ty , / * IsSILType = * / true , OuterGenericEnv ) ) <nl> + auto * attrRepr = P . applyAttributeToType ( TyR . get ( ) , attrs , specifier , specifierLoc ) ; <nl> + auto Ty = performTypeLocChecking ( attrRepr , / * IsSILType = * / true , OuterGenericEnv ) ; <nl> + if ( Ty - > hasError ( ) ) <nl> return true ; <nl> <nl> - Result = SILType : : getPrimitiveType ( Ty . getType ( ) - > getCanonicalType ( ) , <nl> + Result = SILType : : getPrimitiveType ( Ty - > getCanonicalType ( ) , <nl> category ) ; <nl> <nl> / / Invoke the callback on the parsed type . <nl> - ParsedTypeCallback ( Ty . getType ( ) ) ; <nl> + ParsedTypeCallback ( Ty ) ; <nl> <nl> return false ; <nl> } <nl> bool SILParser : : parseSILBBArgsAtBranch ( SmallVector < SILValue , 6 > & Args , <nl> / / / <nl> / / / FIXME : This is a hack to work around the lack of a DeclContext for <nl> / / / witness tables . <nl> - static void bindProtocolSelfInTypeRepr ( TypeLoc & TL , ProtocolDecl * proto ) { <nl> - if ( auto typeRepr = TL . getTypeRepr ( ) ) { <nl> - / / AST walker to update ' Self ' references . <nl> - class BindProtocolSelf : public ASTWalker { <nl> - ProtocolDecl * proto ; <nl> - GenericTypeParamDecl * selfParam ; <nl> - Identifier selfId ; <nl> - <nl> - public : <nl> - BindProtocolSelf ( ProtocolDecl * proto ) <nl> - : proto ( proto ) , <nl> - selfParam ( proto - > getProtocolSelfType ( ) - > getDecl ( ) ) , <nl> - selfId ( proto - > getASTContext ( ) . Id_Self ) { <nl> - } <nl> + static void bindProtocolSelfInTypeRepr ( TypeRepr * typeRepr , ProtocolDecl * proto ) { <nl> + assert ( typeRepr ) ; <nl> <nl> - virtual bool walkToTypeReprPre ( TypeRepr * T ) override { <nl> - if ( auto ident = dyn_cast < IdentTypeRepr > ( T ) ) { <nl> - auto firstComponent = ident - > getComponentRange ( ) . front ( ) ; <nl> - if ( firstComponent - > getNameRef ( ) . isSimpleName ( selfId ) ) <nl> - firstComponent - > setValue ( selfParam , proto ) ; <nl> - } <nl> + / / AST walker to update ' Self ' references . <nl> + class BindProtocolSelf : public ASTWalker { <nl> + ProtocolDecl * proto ; <nl> + GenericTypeParamDecl * selfParam ; <nl> + Identifier selfId ; <nl> <nl> - return true ; <nl> + public : <nl> + BindProtocolSelf ( ProtocolDecl * proto ) <nl> + : proto ( proto ) , <nl> + selfParam ( proto - > getProtocolSelfType ( ) - > getDecl ( ) ) , <nl> + selfId ( proto - > getASTContext ( ) . Id_Self ) { <nl> + } <nl> + <nl> + virtual bool walkToTypeReprPre ( TypeRepr * T ) override { <nl> + if ( auto ident = dyn_cast < IdentTypeRepr > ( T ) ) { <nl> + auto firstComponent = ident - > getComponentRange ( ) . front ( ) ; <nl> + if ( firstComponent - > getNameRef ( ) . isSimpleName ( selfId ) ) <nl> + firstComponent - > setValue ( selfParam , proto ) ; <nl> } <nl> - } ; <nl> <nl> - typeRepr - > walk ( BindProtocolSelf ( proto ) ) ; <nl> - } <nl> + return true ; <nl> + } <nl> + } ; <nl> + <nl> + typeRepr - > walk ( BindProtocolSelf ( proto ) ) ; <nl> } <nl> <nl> / / / Parse the substitution list for an apply instruction or <nl> bool SILParser : : parseSubstitutions ( SmallVectorImpl < ParsedSubstitution > & parsed , <nl> ParserResult < TypeRepr > TyR = P . parseType ( ) ; <nl> if ( TyR . isNull ( ) ) <nl> return true ; <nl> - TypeLoc Ty = TyR . get ( ) ; <nl> if ( defaultForProto ) <nl> - bindProtocolSelfInTypeRepr ( Ty , defaultForProto ) ; <nl> - if ( performTypeLocChecking ( Ty , / * IsSILType = * / false , GenericEnv ) ) <nl> + bindProtocolSelfInTypeRepr ( TyR . get ( ) , defaultForProto ) ; <nl> + <nl> + auto Ty = performTypeLocChecking ( TyR . get ( ) , / * IsSILType = * / false , GenericEnv ) ; <nl> + if ( Ty - > hasError ( ) ) <nl> return true ; <nl> - parsed . push_back ( { Loc , Ty . getType ( ) } ) ; <nl> + parsed . push_back ( { Loc , Ty } ) ; <nl> } while ( P . consumeIf ( tok : : comma ) ) ; <nl> <nl> / / Consume the closing ' > ' . <nl> bool SILParser : : parseSILDeclRef ( SILDeclRef & Member , bool FnTypeRequired ) { <nl> GenericsScope . reset ( ) ; <nl> if ( TyR . isNull ( ) ) <nl> return true ; <nl> - TypeLoc Ty = TyR . get ( ) ; <nl> <nl> / / The type can be polymorphic . <nl> GenericEnvironment * genericEnv = nullptr ; <nl> if ( auto fnType = dyn_cast < FunctionTypeRepr > ( TyR . get ( ) ) ) { <nl> if ( auto generics = fnType - > getGenericParams ( ) ) { <nl> - assert ( ! Ty . wasValidated ( ) & & Ty . getType ( ) . isNull ( ) ) ; <nl> - <nl> genericEnv = handleSILGenericParams ( generics , & P . SF ) ; <nl> fnType - > setGenericEnvironment ( genericEnv ) ; <nl> } <nl> if ( auto generics = fnType - > getPatternGenericParams ( ) ) { <nl> - assert ( ! Ty . wasValidated ( ) & & Ty . getType ( ) . isNull ( ) ) ; <nl> - <nl> genericEnv = handleSILGenericParams ( generics , & P . SF ) ; <nl> fnType - > setPatternGenericEnvironment ( genericEnv ) ; <nl> } <nl> } <nl> <nl> - if ( performTypeLocChecking ( Ty , / * IsSILType = * / false , genericEnv ) ) <nl> + auto Ty = performTypeLocChecking ( TyR . get ( ) , / * IsSILType = * / false , genericEnv ) ; <nl> + if ( Ty - > hasError ( ) ) <nl> return true ; <nl> <nl> / / Pick the ValueDecl that has the right type . <nl> ValueDecl * TheDecl = nullptr ; <nl> - auto declTy = Ty . getType ( ) - > getCanonicalType ( ) ; <nl> + auto declTy = Ty - > getCanonicalType ( ) ; <nl> for ( unsigned I = 0 , E = values . size ( ) ; I < E ; + + I ) { <nl> auto * decl = values [ I ] ; <nl> <nl> ProtocolConformanceRef SILParser : : parseProtocolConformanceHelper ( <nl> ParserResult < TypeRepr > TyR = P . parseType ( ) ; <nl> if ( TyR . isNull ( ) ) <nl> return ProtocolConformanceRef ( ) ; <nl> - TypeLoc Ty = TyR . get ( ) ; <nl> if ( defaultForProto ) { <nl> - bindProtocolSelfInTypeRepr ( Ty , defaultForProto ) ; <nl> + bindProtocolSelfInTypeRepr ( TyR . get ( ) , defaultForProto ) ; <nl> } <nl> <nl> - if ( performTypeLocChecking ( Ty , / * IsSILType = * / false , witnessEnv ) ) <nl> + auto ConformingTy = performTypeLocChecking ( TyR . get ( ) , / * IsSILType = * / false , witnessEnv ) ; <nl> + if ( ConformingTy - > hasError ( ) ) <nl> return ProtocolConformanceRef ( ) ; <nl> - auto ConformingTy = Ty . getType ( ) ; <nl> <nl> if ( P . parseToken ( tok : : colon , diag : : expected_sil_witness_colon ) ) <nl> return ProtocolConformanceRef ( ) ; <nl> static bool parseSILVTableEntry ( <nl> return true ; <nl> TypeLoc Ty = TyR . get ( ) ; <nl> if ( isDefaultWitnessTable ) <nl> - bindProtocolSelfInTypeRepr ( Ty , proto ) ; <nl> + bindProtocolSelfInTypeRepr ( TyR . get ( ) , proto ) ; <nl> if ( swift : : performTypeLocChecking ( P . Context , Ty , <nl> / * isSILMode = * / false , <nl> / * isSILType = * / false , <nl> static bool parseSILVTableEntry ( <nl> return true ; <nl> TypeLoc Ty = TyR . get ( ) ; <nl> if ( isDefaultWitnessTable ) <nl> - bindProtocolSelfInTypeRepr ( Ty , proto ) ; <nl> + bindProtocolSelfInTypeRepr ( TyR . get ( ) , proto ) ; <nl> if ( swift : : performTypeLocChecking ( P . Context , Ty , <nl> / * isSILMode = * / false , <nl> / * isSILType = * / false , <nl> | [ NFC ] Wean SILParser off of TypeLocs | apple/swift | 8c3f154258837606e3af658bac516bab0852d823 | 2020-06-11T23:02:17Z |
mmm a / samples / Cpp / TestCpp / Classes / Box2DTestBed / Box2dView . cpp <nl> ppp b / samples / Cpp / TestCpp / Classes / Box2DTestBed / Box2dView . cpp <nl> <nl> # include " GLES - Render . h " <nl> # include " Test . h " <nl> # include " renderer / CCRenderer . h " <nl> - # include " renderer / CCCustomCommand . h " <nl> <nl> # define kAccelerometerFrequency 30 <nl> # define FRAMES_BETWEEN_PRESSES_FOR_DOUBLE_CLICK 10 <nl> void Box2DView : : draw ( ) <nl> { <nl> Layer : : draw ( ) ; <nl> <nl> - CustomCommand * cmd = CustomCommand : : getCommandPool ( ) . generateCommand ( ) ; <nl> - cmd - > init ( 0 , _vertexZ ) ; <nl> - cmd - > func = CC_CALLBACK_0 ( Box2DView : : onDraw , this ) ; <nl> - Director : : getInstance ( ) - > getRenderer ( ) - > addCommand ( cmd ) ; <nl> + _customCmd . init ( 0 , _vertexZ ) ; <nl> + _customCmd . func = CC_CALLBACK_0 ( Box2DView : : onDraw , this ) ; <nl> + Director : : getInstance ( ) - > getRenderer ( ) - > addCommand ( & _customCmd ) ; <nl> } <nl> <nl> void Box2DView : : onDraw ( ) <nl> mmm a / samples / Cpp / TestCpp / Classes / Box2DTestBed / Box2dView . h <nl> ppp b / samples / Cpp / TestCpp / Classes / Box2DTestBed / Box2dView . h <nl> <nl> <nl> / / # include " cocos2d . h " <nl> # include " . . / testBasic . h " <nl> + # include " renderer / CCCustomCommand . h " <nl> <nl> class MenuLayer : public Layer <nl> { <nl> class Box2DView : public Layer <nl> <nl> static Box2DView * viewWithEntryID ( int entryId ) ; <nl> protected : <nl> + CustomCommand _customCmd ; <nl> void onDraw ( ) ; <nl> } ; <nl> <nl> mmm a / samples / Cpp / TestCpp / Classes / ClippingNodeTest / ClippingNodeTest . cpp <nl> ppp b / samples / Cpp / TestCpp / Classes / ClippingNodeTest / ClippingNodeTest . cpp <nl> <nl> # include " ClippingNodeTest . h " <nl> # include " . . / testResource . h " <nl> # include " renderer / CCRenderer . h " <nl> - # include " renderer / CCCustomCommand . h " <nl> <nl> enum { <nl> kTagTitleLabel = 1 , <nl> void RawStencilBufferTest : : draw ( ) <nl> auto planeSize = winPoint * ( 1 . 0 / _planeCount ) ; <nl> <nl> Renderer * renderer = Director : : getInstance ( ) - > getRenderer ( ) ; <nl> + size_t neededCmdSize = _planeCount * 2 + 2 ; <nl> + if ( _renderCmds . size ( ) ! = neededCmdSize ) <nl> + { <nl> + _renderCmds . resize ( neededCmdSize ) ; <nl> + } <nl> + <nl> + auto iter = _renderCmds . begin ( ) ; <nl> <nl> - CustomCommand * cmd = CustomCommand : : getCommandPool ( ) . generateCommand ( ) ; <nl> - cmd - > init ( 0 , _vertexZ ) ; <nl> - cmd - > func = CC_CALLBACK_0 ( RawStencilBufferTest : : onEnableStencil , this ) ; <nl> - renderer - > addCommand ( cmd ) ; <nl> + iter - > init ( 0 , _vertexZ ) ; <nl> + iter - > func = CC_CALLBACK_0 ( RawStencilBufferTest : : onEnableStencil , this ) ; <nl> + renderer - > addCommand ( & ( * iter ) ) ; <nl> + + + iter ; <nl> <nl> <nl> <nl> void RawStencilBufferTest : : draw ( ) <nl> spritePoint . y = 0 ; <nl> _sprite - > setPosition ( spritePoint ) ; <nl> <nl> - cmd = CustomCommand : : getCommandPool ( ) . generateCommand ( ) ; <nl> - cmd - > init ( 0 , _vertexZ ) ; <nl> - cmd - > func = CC_CALLBACK_0 ( RawStencilBufferTest : : onBeforeDrawClip , this , i , stencilPoint ) ; <nl> - renderer - > addCommand ( cmd ) ; <nl> + iter - > init ( 0 , _vertexZ ) ; <nl> + iter - > func = CC_CALLBACK_0 ( RawStencilBufferTest : : onBeforeDrawClip , this , i , stencilPoint ) ; <nl> + renderer - > addCommand ( & ( * iter ) ) ; <nl> + + + iter ; <nl> <nl> kmGLPushMatrix ( ) ; <nl> this - > transform ( ) ; <nl> _sprite - > visit ( ) ; <nl> kmGLPopMatrix ( ) ; <nl> <nl> - cmd = CustomCommand : : getCommandPool ( ) . generateCommand ( ) ; <nl> - cmd - > init ( 0 , _vertexZ ) ; <nl> - cmd - > func = CC_CALLBACK_0 ( RawStencilBufferTest : : onBeforeDrawSprite , this , i , winPoint ) ; <nl> - renderer - > addCommand ( cmd ) ; <nl> + iter - > init ( 0 , _vertexZ ) ; <nl> + iter - > func = CC_CALLBACK_0 ( RawStencilBufferTest : : onBeforeDrawSprite , this , i , winPoint ) ; <nl> + renderer - > addCommand ( & ( * iter ) ) ; <nl> + + + iter ; <nl> <nl> kmGLPushMatrix ( ) ; <nl> this - > transform ( ) ; <nl> void RawStencilBufferTest : : draw ( ) <nl> kmGLPopMatrix ( ) ; <nl> } <nl> <nl> - cmd = CustomCommand : : getCommandPool ( ) . generateCommand ( ) ; <nl> - cmd - > init ( 0 , _vertexZ ) ; <nl> - cmd - > func = CC_CALLBACK_0 ( RawStencilBufferTest : : onDisableStencil , this ) ; <nl> - renderer - > addCommand ( cmd ) ; <nl> + iter - > init ( 0 , _vertexZ ) ; <nl> + iter - > func = CC_CALLBACK_0 ( RawStencilBufferTest : : onDisableStencil , this ) ; <nl> + renderer - > addCommand ( & ( * iter ) ) ; <nl> <nl> } <nl> <nl> mmm a / samples / Cpp / TestCpp / Classes / ClippingNodeTest / ClippingNodeTest . h <nl> ppp b / samples / Cpp / TestCpp / Classes / ClippingNodeTest / ClippingNodeTest . h <nl> <nl> <nl> # include " . . / testBasic . h " <nl> # include " . . / BaseTest . h " <nl> + # include " renderer / CCCustomCommand . h " <nl> + # include < list > <nl> <nl> class BaseClippingNodeTest : public BaseTest <nl> { <nl> class RawStencilBufferTest : public BaseClippingNodeTest <nl> virtual void setupStencilForDrawingOnPlane ( GLint plane ) ; <nl> <nl> protected : <nl> + std : : list < CustomCommand > _renderCmds ; <nl> void onEnableStencil ( ) ; <nl> void onDisableStencil ( ) ; <nl> void onBeforeDrawClip ( int planeIndex , const Point & pt ) ; <nl> mmm a / samples / Cpp / TestCpp / Classes / LabelTest / LabelTestNew . cpp <nl> ppp b / samples / Cpp / TestCpp / Classes / LabelTest / LabelTestNew . cpp <nl> <nl> # include " LabelTestNew . h " <nl> # include " . . / testResource . h " <nl> # include " renderer / CCRenderer . h " <nl> - # include " renderer / CCCustomCommand . h " <nl> <nl> enum { <nl> kTagTileMap = 1 , <nl> LabelFNTSpriteActions : : LabelFNTSpriteActions ( ) <nl> <nl> void LabelFNTSpriteActions : : draw ( ) <nl> { <nl> - CustomCommand * cmd = CustomCommand : : getCommandPool ( ) . generateCommand ( ) ; <nl> - cmd - > init ( 0 , _vertexZ ) ; <nl> - cmd - > func = CC_CALLBACK_0 ( LabelFNTSpriteActions : : onDraw , this ) ; <nl> - Director : : getInstance ( ) - > getRenderer ( ) - > addCommand ( cmd ) ; <nl> + _renderCmd . init ( 0 , _vertexZ ) ; <nl> + _renderCmd . func = CC_CALLBACK_0 ( LabelFNTSpriteActions : : onDraw , this ) ; <nl> + Director : : getInstance ( ) - > getRenderer ( ) - > addCommand ( & _renderCmd ) ; <nl> <nl> } <nl> <nl> std : : string LabelFNTBounds : : subtitle ( ) const <nl> <nl> void LabelFNTBounds : : draw ( ) <nl> { <nl> - CustomCommand * cmd = CustomCommand : : getCommandPool ( ) . generateCommand ( ) ; <nl> - cmd - > init ( 0 , _vertexZ ) ; <nl> - cmd - > func = CC_CALLBACK_0 ( LabelFNTBounds : : onDraw , this ) ; <nl> - Director : : getInstance ( ) - > getRenderer ( ) - > addCommand ( cmd ) ; <nl> + _renderCmd . init ( 0 , _vertexZ ) ; <nl> + _renderCmd . func = CC_CALLBACK_0 ( LabelFNTBounds : : onDraw , this ) ; <nl> + Director : : getInstance ( ) - > getRenderer ( ) - > addCommand ( & _renderCmd ) ; <nl> } <nl> <nl> void LabelFNTBounds : : onDraw ( ) <nl> mmm a / samples / Cpp / TestCpp / Classes / LabelTest / LabelTestNew . h <nl> ppp b / samples / Cpp / TestCpp / Classes / LabelTest / LabelTestNew . h <nl> <nl> <nl> # include " . . / testBasic . h " <nl> # include " . . / BaseTest . h " <nl> + # include " renderer / CCCustomCommand . h " <nl> <nl> <nl> class AtlasDemoNew : public BaseTest <nl> class LabelFNTSpriteActions : public AtlasDemoNew <nl> virtual std : : string title ( ) const override ; <nl> virtual std : : string subtitle ( ) const override ; <nl> protected : <nl> + CustomCommand _renderCmd ; <nl> void onDraw ( ) ; <nl> } ; <nl> <nl> class LabelFNTBounds : public AtlasDemoNew <nl> private : <nl> Label * label1 ; <nl> protected : <nl> + CustomCommand _renderCmd ; <nl> void onDraw ( ) ; <nl> } ; <nl> <nl> mmm a / samples / Cpp / TestCpp / Classes / Texture2dTest / Texture2dTest . cpp <nl> ppp b / samples / Cpp / TestCpp / Classes / Texture2dTest / Texture2dTest . cpp <nl> <nl> # include " Texture2dTest . h " <nl> # include " . . / testResource . h " <nl> # include " renderer / CCRenderer . h " <nl> - # include " renderer / CCCustomCommand . h " <nl> <nl> enum { <nl> kTagLabel = 1 , <nl> void TextureDrawAtPoint : : draw ( ) <nl> { <nl> TextureDemo : : draw ( ) ; <nl> <nl> - CustomCommand * cmd = CustomCommand : : getCommandPool ( ) . generateCommand ( ) ; <nl> - cmd - > init ( 0 , _vertexZ ) ; <nl> - cmd - > func = CC_CALLBACK_0 ( TextureDrawAtPoint : : onDraw , this ) ; <nl> - Director : : getInstance ( ) - > getRenderer ( ) - > addCommand ( cmd ) ; <nl> + _renderCmd . init ( 0 , _vertexZ ) ; <nl> + _renderCmd . func = CC_CALLBACK_0 ( TextureDrawAtPoint : : onDraw , this ) ; <nl> + Director : : getInstance ( ) - > getRenderer ( ) - > addCommand ( & _renderCmd ) ; <nl> <nl> } <nl> <nl> void TextureDrawInRect : : draw ( ) <nl> { <nl> TextureDemo : : draw ( ) ; <nl> <nl> - CustomCommand * cmd = CustomCommand : : getCommandPool ( ) . generateCommand ( ) ; <nl> - cmd - > init ( 0 , _vertexZ ) ; <nl> - cmd - > func = CC_CALLBACK_0 ( TextureDrawInRect : : onDraw , this ) ; <nl> - Director : : getInstance ( ) - > getRenderer ( ) - > addCommand ( cmd ) ; <nl> + _renderCmd . init ( 0 , _vertexZ ) ; <nl> + _renderCmd . func = CC_CALLBACK_0 ( TextureDrawInRect : : onDraw , this ) ; <nl> + Director : : getInstance ( ) - > getRenderer ( ) - > addCommand ( & _renderCmd ) ; <nl> <nl> } <nl> <nl> mmm a / samples / Cpp / TestCpp / Classes / Texture2dTest / Texture2dTest . h <nl> ppp b / samples / Cpp / TestCpp / Classes / Texture2dTest / Texture2dTest . h <nl> <nl> <nl> # include " . . / testBasic . h " <nl> # include " . . / BaseTest . h " <nl> + # include " renderer / CCCustomCommand . h " <nl> <nl> <nl> class TextureDemo : public BaseTest <nl> class TextureDrawAtPoint : public TextureDemo <nl> virtual void onEnter ( ) ; <nl> virtual void draw ( ) ; <nl> protected : <nl> + CustomCommand _renderCmd ; <nl> void onDraw ( ) ; <nl> private : <nl> Texture2D * _tex1 , * _Tex2F ; <nl> class TextureDrawInRect : public TextureDemo <nl> virtual void onEnter ( ) ; <nl> virtual void draw ( ) ; <nl> protected : <nl> + CustomCommand _renderCmd ; <nl> void onDraw ( ) ; <nl> private : <nl> Texture2D * _tex1 , * _Tex2F ; <nl> mmm a / samples / Cpp / TestCpp / Classes / TileMapTest / TileMapTest . cpp <nl> ppp b / samples / Cpp / TestCpp / Classes / TileMapTest / TileMapTest . cpp <nl> TMXOrthoObjectsTest : : TMXOrthoObjectsTest ( ) <nl> <nl> void TMXOrthoObjectsTest : : draw ( ) <nl> { <nl> - CustomCommand * cmd = CustomCommand : : getCommandPool ( ) . generateCommand ( ) ; <nl> - cmd - > init ( 0 , _vertexZ ) ; <nl> - cmd - > func = CC_CALLBACK_0 ( TMXOrthoObjectsTest : : onDraw , this ) ; <nl> - Director : : getInstance ( ) - > getRenderer ( ) - > addCommand ( cmd ) ; <nl> + _renderCmd . init ( 0 , _vertexZ ) ; <nl> + _renderCmd . func = CC_CALLBACK_0 ( TMXOrthoObjectsTest : : onDraw , this ) ; <nl> + Director : : getInstance ( ) - > getRenderer ( ) - > addCommand ( & _renderCmd ) ; <nl> } <nl> <nl> void TMXOrthoObjectsTest : : onDraw ( ) <nl> TMXIsoObjectsTest : : TMXIsoObjectsTest ( ) <nl> <nl> void TMXIsoObjectsTest : : draw ( ) <nl> { <nl> - CustomCommand * cmd = CustomCommand : : getCommandPool ( ) . generateCommand ( ) ; <nl> - cmd - > init ( 0 , _vertexZ ) ; <nl> - cmd - > func = CC_CALLBACK_0 ( TMXIsoObjectsTest : : onDraw , this ) ; <nl> - Director : : getInstance ( ) - > getRenderer ( ) - > addCommand ( cmd ) ; <nl> + _renderCmd . init ( 0 , _vertexZ ) ; <nl> + _renderCmd . func = CC_CALLBACK_0 ( TMXIsoObjectsTest : : onDraw , this ) ; <nl> + Director : : getInstance ( ) - > getRenderer ( ) - > addCommand ( & _renderCmd ) ; <nl> } <nl> <nl> void TMXIsoObjectsTest : : onDraw ( ) <nl> TMXGIDObjectsTest : : TMXGIDObjectsTest ( ) <nl> <nl> void TMXGIDObjectsTest : : draw ( ) <nl> { <nl> - CustomCommand * cmd = CustomCommand : : getCommandPool ( ) . generateCommand ( ) ; <nl> - cmd - > init ( 0 , _vertexZ ) ; <nl> - cmd - > func = CC_CALLBACK_0 ( TMXGIDObjectsTest : : onDraw , this ) ; <nl> - Director : : getInstance ( ) - > getRenderer ( ) - > addCommand ( cmd ) ; <nl> + _renderCmd . init ( 0 , _vertexZ ) ; <nl> + _renderCmd . func = CC_CALLBACK_0 ( TMXGIDObjectsTest : : onDraw , this ) ; <nl> + Director : : getInstance ( ) - > getRenderer ( ) - > addCommand ( & _renderCmd ) ; <nl> } <nl> <nl> void TMXGIDObjectsTest : : onDraw ( ) <nl> mmm a / samples / Cpp / TestCpp / Classes / TileMapTest / TileMapTest . h <nl> ppp b / samples / Cpp / TestCpp / Classes / TileMapTest / TileMapTest . h <nl> <nl> <nl> # include " . . / testBasic . h " <nl> # include " . . / BaseTest . h " <nl> + # include " renderer / CCCustomCommand . h " <nl> <nl> class TileDemo : public BaseTest <nl> { <nl> class TMXOrthoObjectsTest : public TileDemo <nl> virtual void draw ( ) ; <nl> virtual std : : string subtitle ( ) const override ; <nl> protected : <nl> + CustomCommand _renderCmd ; <nl> void onDraw ( ) ; <nl> } ; <nl> <nl> class TMXIsoObjectsTest : public TileDemo <nl> virtual void draw ( ) ; <nl> virtual std : : string subtitle ( ) const override ; <nl> protected : <nl> + CustomCommand _renderCmd ; <nl> void onDraw ( ) ; <nl> } ; <nl> <nl> class TMXGIDObjectsTest : public TileDemo <nl> virtual void draw ( ) ; <nl> <nl> protected : <nl> + CustomCommand _renderCmd ; <nl> void onDraw ( ) ; <nl> <nl> } ; <nl> | fix testcase remove commandpool | cocos2d/cocos2d-x | 81989704218d8d1a5c904ce89ca0b7ec0c690924 | 2013-12-27T06:42:45Z |
mmm a / lib / Parse / ParseDecl . cpp <nl> ppp b / lib / Parse / ParseDecl . cpp <nl> bool Parser : : parseDeclFuncBodyDelayed ( FuncDecl * FD ) { <nl> auto BeginParserPosition = getParserPosition ( FunctionParserState - > BodyPos ) ; <nl> auto EndLexerState = L - > getStateForBeginningOfTokenLoc ( FE - > getEndLoc ( ) ) ; <nl> <nl> + / / ParserPositionRAII needs a primed parser to restore to . <nl> + if ( Tok . is ( tok : : NUM_TOKENS ) ) <nl> + consumeToken ( ) ; <nl> + <nl> / / Ensure that we restore the parser state at exit . <nl> ParserPositionRAII PPR ( * this ) ; <nl> <nl> mmm a / lib / Parse / Parser . cpp <nl> ppp b / lib / Parse / Parser . cpp <nl> class ParseDelayedFunctionBodies : public ASTWalker { <nl> CodeCompletionFactory - > createCodeCompletionCallbacks ( TheParser ) ) ; <nl> TheParser . setCodeCompletion ( CodeCompletionOffset , CodeCompletion . get ( ) ) ; <nl> } <nl> - / / Prime the parser . <nl> - / / FIXME : This is only necessary because ParserPositionRAII asserts without <nl> - / / a primed parser . <nl> - TheParser . consumeToken ( ) ; <nl> TheParser . parseDeclFuncBodyDelayed ( FD ) ; <nl> } <nl> } ; <nl> | If the parser needs priming , let the parser handle it . | apple/swift | 51990109e571cdbaf9efb630604fbb66fa0a6cf0 | 2013-07-25T14:42:10Z |
mmm a / api / tesseractmain . cpp <nl> ppp b / api / tesseractmain . cpp <nl> int main ( int argc , char * * argv ) { <nl> TIFFClose ( archive ) ; <nl> archive = TIFFOpen ( argv [ 1 ] , " r " ) ; <nl> if ( archive = = NULL ) { <nl> - READFAILED . error ( argv [ 0 ] , EXIT , argv [ 1 ] ) ; <nl> - return 1 ; <nl> + tprintf ( " Read of file % s failed \ n " , argv [ 1 ] ) ; <nl> + exit ( 1 ) ; <nl> } <nl> if ( page_number > 0 ) <nl> tprintf ( " Page % d \ n " , page_number ) ; <nl> int main ( int argc , char * * argv ) { <nl> } else { <nl> # endif <nl> / / Using built - in image library to read bmp , or tiff without libtiff . <nl> - if ( image . read_header ( argv [ 1 ] ) < 0 ) <nl> - READFAILED . error ( argv [ 0 ] , EXIT , argv [ 1 ] ) ; <nl> + if ( image . read_header ( argv [ 1 ] ) < 0 ) { <nl> + tprintf ( " Read of file % s failed . \ n " , argv [ 1 ] ) ; <nl> + exit ( 1 ) ; <nl> + } <nl> if ( image . read ( image . get_ysize ( ) ) < 0 ) <nl> MEMORY_OUT . error ( argv [ 0 ] , EXIT , " Read of image % s " , argv [ 1 ] ) ; <nl> invert_image ( & image ) ; <nl> | Fixed issue 279 | tesseract-ocr/tesseract | 1c3d5a83c00ecd82a9d92067c3be1cb159e1a139 | 2010-05-20T15:45:08Z |
mmm a / src / clustering / administration / tables / generate_config . cc <nl> ppp b / src / clustering / administration / tables / generate_config . cc <nl> bool table_generate_config ( <nl> & yielder , <nl> interruptor , <nl> [ & ] ( size_t shard , const machine_id_t & server ) { <nl> + guarantee ( config_out - > shards [ shard ] . director . is_unset ( ) ) ; <nl> config_out - > shards [ shard ] . replicas . insert ( server ) ; <nl> config_out - > shards [ shard ] . director = server ; <nl> / * We have to update ` pairings ` as directors are selected so that our <nl> bool table_generate_config ( <nl> } <nl> <nl> for ( size_t shard = 0 ; shard < params . num_shards ; + + shard ) { <nl> + guarantee ( ! config_out - > shards [ shard ] . director . is_unset ( ) ) ; <nl> guarantee ( config_out - > shards [ shard ] . replicas . size ( ) = = total_replicas ) ; <nl> } <nl> <nl> | Add back in guarantee that each shard ' s director is set exactly once . | rethinkdb/rethinkdb | 2cb1bb94ff61eb1ad3b84da5968066565367d7cc | 2014-10-02T23:35:42Z |
mmm a / src / arm / code - stubs - arm . cc <nl> ppp b / src / arm / code - stubs - arm . cc <nl> void MathPowStub : : Generate ( MacroAssembler * masm ) { <nl> __ Ret ( ) ; <nl> } <nl> <nl> - bool CEntryStub : : NeedsImmovableCode ( ) { <nl> - return true ; <nl> - } <nl> - <nl> + Movability CEntryStub : : NeedsImmovableCode ( ) { return kImmovable ; } <nl> <nl> void CodeStub : : GenerateStubsAheadOfTime ( Isolate * isolate ) { <nl> CEntryStub : : GenerateAheadOfTime ( isolate ) ; <nl> mmm a / src / arm / code - stubs - arm . h <nl> ppp b / src / arm / code - stubs - arm . h <nl> class DirectCEntryStub : public PlatformCodeStub { <nl> void GenerateCall ( MacroAssembler * masm , Register target ) ; <nl> <nl> private : <nl> - bool NeedsImmovableCode ( ) override { return true ; } <nl> + Movability NeedsImmovableCode ( ) override { return kImmovable ; } <nl> <nl> DEFINE_NULL_CALL_INTERFACE_DESCRIPTOR ( ) ; <nl> DEFINE_PLATFORM_CODE_STUB ( DirectCEntry , PlatformCodeStub ) ; <nl> mmm a / src / arm64 / code - stubs - arm64 . cc <nl> ppp b / src / arm64 / code - stubs - arm64 . cc <nl> void CodeStub : : GenerateFPStubs ( Isolate * isolate ) { <nl> USE ( isolate ) ; <nl> } <nl> <nl> - <nl> - bool CEntryStub : : NeedsImmovableCode ( ) { <nl> + Movability CEntryStub : : NeedsImmovableCode ( ) { <nl> / / CEntryStub stores the return address on the stack before calling into <nl> / / C + + code . In some cases , the VM accesses this address , but it is not used <nl> / / when the C + + code returns to the stub because LR holds the return address <nl> bool CEntryStub : : NeedsImmovableCode ( ) { <nl> / / TODO ( jbramley ) : Whilst this is the only analysis that makes sense , I can ' t <nl> / / find any comment to confirm this , and I don ' t hit any crashes whatever <nl> / / this function returns . The anaylsis should be properly confirmed . <nl> - return true ; <nl> + return kImmovable ; <nl> } <nl> <nl> <nl> mmm a / src / arm64 / code - stubs - arm64 . h <nl> ppp b / src / arm64 / code - stubs - arm64 . h <nl> class DirectCEntryStub : public PlatformCodeStub { <nl> void GenerateCall ( MacroAssembler * masm , Register target ) ; <nl> <nl> private : <nl> - bool NeedsImmovableCode ( ) override { return true ; } <nl> + Movability NeedsImmovableCode ( ) override { return kImmovable ; } <nl> <nl> DEFINE_NULL_CALL_INTERFACE_DESCRIPTOR ( ) ; <nl> DEFINE_PLATFORM_CODE_STUB ( DirectCEntry , PlatformCodeStub ) ; <nl> mmm a / src / code - stubs . h <nl> ppp b / src / code - stubs . h <nl> class CodeStub : public ZoneObject { <nl> <nl> / / Returns whether the code generated for this stub needs to be allocated as <nl> / / a fixed ( non - moveable ) code object . <nl> - virtual bool NeedsImmovableCode ( ) { return false ; } <nl> + virtual Movability NeedsImmovableCode ( ) { return kMovable ; } <nl> <nl> virtual void PrintName ( std : : ostream & os ) const ; / / NOLINT <nl> virtual void PrintBaseName ( std : : ostream & os ) const ; / / NOLINT <nl> class CEntryStub : public PlatformCodeStub { <nl> bool is_builtin_exit ( ) const { return FrameTypeBits : : decode ( minor_key_ ) ; } <nl> int result_size ( ) const { return ResultSizeBits : : decode ( minor_key_ ) ; } <nl> <nl> - bool NeedsImmovableCode ( ) override ; <nl> + Movability NeedsImmovableCode ( ) override ; <nl> <nl> class SaveDoublesBits : public BitField < bool , 0 , 1 > { } ; <nl> class ArgvMode : public BitField < bool , 1 , 1 > { } ; <nl> mmm a / src / compiler / code - generator . cc <nl> ppp b / src / compiler / code - generator . cc <nl> Handle < Code > CodeGenerator : : FinalizeCode ( ) { <nl> <nl> Handle < Code > result = isolate ( ) - > factory ( ) - > NewCode ( <nl> desc , info ( ) - > code_kind ( ) , Handle < Object > ( ) , table , source_positions , <nl> - deopt_data , false , info ( ) - > stub_key ( ) , true , <nl> + deopt_data , kMovable , info ( ) - > stub_key ( ) , true , <nl> frame ( ) - > GetTotalFrameSlotCount ( ) , safepoints ( ) - > GetCodeOffset ( ) ) ; <nl> isolate ( ) - > counters ( ) - > total_compiled_code_size ( ) - > Increment ( <nl> result - > instruction_size ( ) ) ; <nl> mmm a / src / deoptimizer . cc <nl> ppp b / src / deoptimizer . cc <nl> void Deoptimizer : : EnsureCodeForDeoptimizationEntry ( Isolate * isolate , <nl> / / directly and there is no support for relocating them . <nl> Handle < Code > code = isolate - > factory ( ) - > NewCode ( <nl> desc , Code : : STUB , Handle < Object > ( ) , MaybeHandle < HandlerTable > ( ) , <nl> - MaybeHandle < ByteArray > ( ) , MaybeHandle < DeoptimizationData > ( ) , true ) ; <nl> + MaybeHandle < ByteArray > ( ) , MaybeHandle < DeoptimizationData > ( ) , kImmovable ) ; <nl> CHECK ( Heap : : IsImmovable ( * code ) ) ; <nl> <nl> CHECK_NULL ( data - > deopt_entry_code_ [ type ] ) ; <nl> mmm a / src / factory . cc <nl> ppp b / src / factory . cc <nl> Handle < CodeDataContainer > Factory : : NewCodeDataContainer ( int flags ) { <nl> return data_container ; <nl> } <nl> <nl> - Handle < Code > Factory : : NewCodeRaw ( int object_size , bool immovable ) { <nl> + Handle < Code > Factory : : NewCodeRaw ( int object_size , Movability movability ) { <nl> CALL_HEAP_FUNCTION ( isolate ( ) , <nl> - isolate ( ) - > heap ( ) - > AllocateCode ( object_size , immovable ) , <nl> + isolate ( ) - > heap ( ) - > AllocateCode ( object_size , movability ) , <nl> Code ) ; <nl> } <nl> <nl> Handle < Code > Factory : : NewCode ( <nl> const CodeDesc & desc , Code : : Kind kind , Handle < Object > self_ref , <nl> MaybeHandle < HandlerTable > maybe_handler_table , <nl> MaybeHandle < ByteArray > maybe_source_position_table , <nl> - MaybeHandle < DeoptimizationData > maybe_deopt_data , bool immovable , <nl> + MaybeHandle < DeoptimizationData > maybe_deopt_data , Movability movability , <nl> uint32_t stub_key , bool is_turbofanned , int stack_slots , <nl> int safepoint_table_offset ) { <nl> Handle < ByteArray > reloc_info = NewByteArray ( desc . reloc_size , TENURED ) ; <nl> Handle < Code > Factory : : NewCode ( <nl> int obj_size = Code : : SizeFor ( RoundUp ( body_size , kObjectAlignment ) ) ; <nl> <nl> CodeSpaceMemoryModificationScope code_allocation ( isolate ( ) - > heap ( ) ) ; <nl> - Handle < Code > code = NewCodeRaw ( obj_size , immovable ) ; <nl> + Handle < Code > code = NewCodeRaw ( obj_size , movability ) ; <nl> DCHECK ( ! isolate ( ) - > heap ( ) - > memory_allocator ( ) - > code_range ( ) - > valid ( ) | | <nl> isolate ( ) - > heap ( ) - > memory_allocator ( ) - > code_range ( ) - > contains ( <nl> code - > address ( ) ) | | <nl> Handle < Code > Factory : : NewCode ( <nl> } <nl> <nl> Handle < Code > Factory : : NewCodeForDeserialization ( uint32_t size ) { <nl> - const bool kNotImmovable = false ; <nl> - return NewCodeRaw ( size , kNotImmovable ) ; <nl> + return NewCodeRaw ( size , kMovable ) ; <nl> } <nl> <nl> Handle < Code > Factory : : CopyCode ( Handle < Code > code ) { <nl> mmm a / src / factory . h <nl> ppp b / src / factory . h <nl> class V8_EXPORT_PRIVATE Factory final { <nl> MaybeHandle < ByteArray > ( ) , <nl> MaybeHandle < DeoptimizationData > maybe_deopt_data = <nl> MaybeHandle < DeoptimizationData > ( ) , <nl> - bool immovable = false , uint32_t stub_key = 0 , <nl> + Movability movability = kMovable , uint32_t stub_key = 0 , <nl> bool is_turbofanned = false , int stack_slots = 0 , <nl> int safepoint_table_offset = 0 ) ; <nl> <nl> class V8_EXPORT_PRIVATE Factory final { <nl> PretenureFlag pretenure ) ; <nl> <nl> / / Creates a code object that is not yet fully initialized yet . <nl> - Handle < Code > NewCodeRaw ( int object_size , bool immovable ) ; <nl> + Handle < Code > NewCodeRaw ( int object_size , Movability movability ) ; <nl> <nl> / / Attempt to find the number in a small cache . If we finds it , return <nl> / / the string representation of the number . Otherwise return undefined . <nl> mmm a / src / globals . h <nl> ppp b / src / globals . h <nl> enum GarbageCollector { SCAVENGER , MARK_COMPACTOR , MINOR_MARK_COMPACTOR } ; <nl> <nl> enum Executability { NOT_EXECUTABLE , EXECUTABLE } ; <nl> <nl> + enum Movability { kMovable , kImmovable } ; <nl> + <nl> enum VisitMode { <nl> VISIT_ALL , <nl> VISIT_ALL_IN_MINOR_MC_MARK , <nl> mmm a / src / heap / heap . cc <nl> ppp b / src / heap / heap . cc <nl> AllocationResult Heap : : AllocateFixedTypedArray ( int length , <nl> return elements ; <nl> } <nl> <nl> - <nl> - AllocationResult Heap : : AllocateCode ( int object_size , bool immovable ) { <nl> + AllocationResult Heap : : AllocateCode ( int object_size , Movability movability ) { <nl> DCHECK ( IsAligned ( static_cast < intptr_t > ( object_size ) , kCodeAlignment ) ) ; <nl> AllocationResult allocation = AllocateRaw ( object_size , CODE_SPACE ) ; <nl> <nl> HeapObject * result = nullptr ; <nl> if ( ! allocation . To ( & result ) ) return allocation ; <nl> - if ( immovable ) { <nl> + if ( movability = = kImmovable ) { <nl> Address address = result - > address ( ) ; <nl> MemoryChunk * chunk = MemoryChunk : : FromAddress ( address ) ; <nl> / / Code objects which should stay at a fixed address are allocated either <nl> mmm a / src / heap / heap . h <nl> ppp b / src / heap / heap . h <nl> class Heap { <nl> MUST_USE_RESULT AllocationResult <nl> AllocateForeign ( Address address , PretenureFlag pretenure = NOT_TENURED ) ; <nl> <nl> - MUST_USE_RESULT AllocationResult <nl> - AllocateCode ( int object_size , bool immovable ) ; <nl> + MUST_USE_RESULT AllocationResult AllocateCode ( int object_size , <nl> + Movability movability ) ; <nl> <nl> void set_force_oom ( bool value ) { force_oom_ = value ; } <nl> <nl> mmm a / src / ia32 / code - stubs - ia32 . cc <nl> ppp b / src / ia32 / code - stubs - ia32 . cc <nl> void MathPowStub : : Generate ( MacroAssembler * masm ) { <nl> __ ret ( 0 ) ; <nl> } <nl> <nl> - <nl> - bool CEntryStub : : NeedsImmovableCode ( ) { <nl> - return false ; <nl> - } <nl> - <nl> + Movability CEntryStub : : NeedsImmovableCode ( ) { return kMovable ; } <nl> <nl> void CodeStub : : GenerateStubsAheadOfTime ( Isolate * isolate ) { <nl> CEntryStub : : GenerateAheadOfTime ( isolate ) ; <nl> mmm a / src / mips / code - stubs - mips . cc <nl> ppp b / src / mips / code - stubs - mips . cc <nl> void MathPowStub : : Generate ( MacroAssembler * masm ) { <nl> __ Ret ( ) ; <nl> } <nl> <nl> - bool CEntryStub : : NeedsImmovableCode ( ) { <nl> - return true ; <nl> - } <nl> - <nl> + Movability CEntryStub : : NeedsImmovableCode ( ) { return kImmovable ; } <nl> <nl> void CodeStub : : GenerateStubsAheadOfTime ( Isolate * isolate ) { <nl> CEntryStub : : GenerateAheadOfTime ( isolate ) ; <nl> mmm a / src / mips / code - stubs - mips . h <nl> ppp b / src / mips / code - stubs - mips . h <nl> class DirectCEntryStub : public PlatformCodeStub { <nl> void GenerateCall ( MacroAssembler * masm , Register target ) ; <nl> <nl> private : <nl> - bool NeedsImmovableCode ( ) override { return true ; } <nl> + Movability NeedsImmovableCode ( ) override { return kImmovable ; } <nl> <nl> DEFINE_NULL_CALL_INTERFACE_DESCRIPTOR ( ) ; <nl> DEFINE_PLATFORM_CODE_STUB ( DirectCEntry , PlatformCodeStub ) ; <nl> mmm a / src / mips64 / code - stubs - mips64 . cc <nl> ppp b / src / mips64 / code - stubs - mips64 . cc <nl> void MathPowStub : : Generate ( MacroAssembler * masm ) { <nl> __ Ret ( ) ; <nl> } <nl> <nl> - bool CEntryStub : : NeedsImmovableCode ( ) { <nl> - return true ; <nl> - } <nl> - <nl> + Movability CEntryStub : : NeedsImmovableCode ( ) { return kImmovable ; } <nl> <nl> void CodeStub : : GenerateStubsAheadOfTime ( Isolate * isolate ) { <nl> CEntryStub : : GenerateAheadOfTime ( isolate ) ; <nl> mmm a / src / mips64 / code - stubs - mips64 . h <nl> ppp b / src / mips64 / code - stubs - mips64 . h <nl> class DirectCEntryStub : public PlatformCodeStub { <nl> void GenerateCall ( MacroAssembler * masm , Register target ) ; <nl> <nl> private : <nl> - bool NeedsImmovableCode ( ) override { return true ; } <nl> + Movability NeedsImmovableCode ( ) override { return kImmovable ; } <nl> <nl> DEFINE_NULL_CALL_INTERFACE_DESCRIPTOR ( ) ; <nl> DEFINE_PLATFORM_CODE_STUB ( DirectCEntry , PlatformCodeStub ) ; <nl> mmm a / src / ppc / code - stubs - ppc . cc <nl> ppp b / src / ppc / code - stubs - ppc . cc <nl> void MathPowStub : : Generate ( MacroAssembler * masm ) { <nl> __ Ret ( ) ; <nl> } <nl> <nl> - <nl> - bool CEntryStub : : NeedsImmovableCode ( ) { return true ; } <nl> - <nl> + Movability CEntryStub : : NeedsImmovableCode ( ) { return kImmovable ; } <nl> <nl> void CodeStub : : GenerateStubsAheadOfTime ( Isolate * isolate ) { <nl> CEntryStub : : GenerateAheadOfTime ( isolate ) ; <nl> mmm a / src / ppc / code - stubs - ppc . h <nl> ppp b / src / ppc / code - stubs - ppc . h <nl> class DirectCEntryStub : public PlatformCodeStub { <nl> void GenerateCall ( MacroAssembler * masm , Register target ) ; <nl> <nl> private : <nl> - bool NeedsImmovableCode ( ) override { return true ; } <nl> + Movability NeedsImmovableCode ( ) override { return kImmovable ; } <nl> <nl> DEFINE_NULL_CALL_INTERFACE_DESCRIPTOR ( ) ; <nl> DEFINE_PLATFORM_CODE_STUB ( DirectCEntry , PlatformCodeStub ) ; <nl> mmm a / src / s390 / code - stubs - s390 . cc <nl> ppp b / src / s390 / code - stubs - s390 . cc <nl> void MathPowStub : : Generate ( MacroAssembler * masm ) { <nl> __ Ret ( ) ; <nl> } <nl> <nl> - bool CEntryStub : : NeedsImmovableCode ( ) { return true ; } <nl> + Movability CEntryStub : : NeedsImmovableCode ( ) { return kImmovable ; } <nl> <nl> void CodeStub : : GenerateStubsAheadOfTime ( Isolate * isolate ) { <nl> CEntryStub : : GenerateAheadOfTime ( isolate ) ; <nl> mmm a / src / s390 / code - stubs - s390 . h <nl> ppp b / src / s390 / code - stubs - s390 . h <nl> class DirectCEntryStub : public PlatformCodeStub { <nl> void GenerateCall ( MacroAssembler * masm , Register target ) ; <nl> <nl> private : <nl> - bool NeedsImmovableCode ( ) override { return true ; } <nl> + Movability NeedsImmovableCode ( ) override { return kImmovable ; } <nl> <nl> DEFINE_NULL_CALL_INTERFACE_DESCRIPTOR ( ) ; <nl> DEFINE_PLATFORM_CODE_STUB ( DirectCEntry , PlatformCodeStub ) ; <nl> mmm a / src / x64 / code - stubs - x64 . cc <nl> ppp b / src / x64 / code - stubs - x64 . cc <nl> void MathPowStub : : Generate ( MacroAssembler * masm ) { <nl> __ ret ( 0 ) ; <nl> } <nl> <nl> - <nl> - bool CEntryStub : : NeedsImmovableCode ( ) { <nl> - return false ; <nl> - } <nl> - <nl> + Movability CEntryStub : : NeedsImmovableCode ( ) { return kMovable ; } <nl> <nl> void CodeStub : : GenerateStubsAheadOfTime ( Isolate * isolate ) { <nl> CEntryStub : : GenerateAheadOfTime ( isolate ) ; <nl> mmm a / test / cctest / heap / test - heap . cc <nl> ppp b / test / cctest / heap / test - heap . cc <nl> Handle < Code > GenerateDummyImmovableCode ( Isolate * isolate ) { <nl> <nl> CodeDesc desc ; <nl> assm . GetCode ( isolate , & desc ) ; <nl> - const bool kImmovable = true ; <nl> Handle < Code > code = isolate - > factory ( ) - > NewCode ( <nl> desc , Code : : STUB , Handle < Code > ( ) , HandlerTable : : Empty ( isolate ) , <nl> MaybeHandle < ByteArray > ( ) , DeoptimizationData : : Empty ( isolate ) , kImmovable ) ; <nl> | [ heap ] Introduce { Movability } enum for type safety . | v8/v8 | d80d85bfc1e69999ea6a50e3ca0659c490955e9b | 2017-11-15T10:11:26Z |
mmm a / src / concurrency / semaphore . cc <nl> ppp b / src / concurrency / semaphore . cc <nl> void semaphore_t : : co_lock_interruptible ( signal_t * interruptor ) { <nl> void on_semaphore_available ( ) { pulse ( ) ; } <nl> } cb ; <nl> lock ( & cb , 1 ) ; <nl> - wait_interruptible ( & cb , interruptor ) ; <nl> + <nl> + try { <nl> + wait_interruptible ( & cb , interruptor ) ; <nl> + } catch ( interrupted_exc_t & ex ) { <nl> + / / Remove our lock request from the queue <nl> + for ( lock_request_t * request = waiters . head ( ) ; request ! = NULL ; request = waiters . next ( request ) ) { <nl> + if ( request - > cb = = & cb ) { <nl> + waiters . remove ( request ) ; <nl> + delete request ; <nl> + break ; <nl> + } <nl> + } <nl> + throw ; <nl> + } <nl> } <nl> <nl> void semaphore_t : : unlock ( int count ) { <nl> void adjustable_semaphore_t : : co_lock_interruptible ( signal_t * interruptor ) { <nl> void on_semaphore_available ( ) { pulse ( ) ; } <nl> } cb ; <nl> lock ( & cb , 1 ) ; <nl> - wait_interruptible ( & cb , interruptor ) ; <nl> + <nl> + try { <nl> + wait_interruptible ( & cb , interruptor ) ; <nl> + } catch ( interrupted_exc_t & ex ) { <nl> + / / Remove our lock request from the queue <nl> + for ( lock_request_t * request = waiters . head ( ) ; request ! = NULL ; request = waiters . next ( request ) ) { <nl> + if ( request - > cb = = & cb ) { <nl> + waiters . remove ( request ) ; <nl> + delete request ; <nl> + break ; <nl> + } <nl> + } <nl> + throw ; <nl> + } <nl> } <nl> <nl> void adjustable_semaphore_t : : unlock ( int count ) { <nl> | fixing potential crash and memory corruption when a semaphore co_lock_interruptible is interrupted | rethinkdb/rethinkdb | 5747d976b9c496873088440eb240b4ed8dbf5d24 | 2012-10-22T21:42:43Z |
new file mode 100644 <nl> index 00000000000 . . 5820f68b58d <nl> mmm / dev / null <nl> ppp b / folly / Exception . h <nl> <nl> + / * <nl> + * Copyright 2013 Facebook , Inc . <nl> + * <nl> + * Licensed under the Apache License , Version 2 . 0 ( the " License " ) ; <nl> + * you may not use this file except in compliance with the License . <nl> + * You may obtain a copy of the License at <nl> + * <nl> + * http : / / www . apache . org / licenses / LICENSE - 2 . 0 <nl> + * <nl> + * Unless required by applicable law or agreed to in writing , software <nl> + * distributed under the License is distributed on an " AS IS " BASIS , <nl> + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied . <nl> + * See the License for the specific language governing permissions and <nl> + * limitations under the License . <nl> + * / <nl> + <nl> + # ifndef FOLLY_EXCEPTION_H_ <nl> + # define FOLLY_EXCEPTION_H_ <nl> + <nl> + # include < errno . h > <nl> + <nl> + # include < stdexcept > <nl> + # include < system_error > <nl> + <nl> + # include " folly / Likely . h " <nl> + <nl> + namespace folly { <nl> + <nl> + / / Helper to throw std : : system_error <nl> + void throwSystemError ( int err , const char * msg ) __attribute__ ( ( noreturn ) ) ; <nl> + inline void throwSystemError ( int err , const char * msg ) { <nl> + throw std : : system_error ( err , std : : system_category ( ) , msg ) ; <nl> + } <nl> + <nl> + / / Helper to throw std : : system_error from errno <nl> + void throwSystemError ( const char * msg ) __attribute__ ( ( noreturn ) ) ; <nl> + inline void throwSystemError ( const char * msg ) { <nl> + throwSystemError ( errno , msg ) ; <nl> + } <nl> + <nl> + / / Check a Posix return code ( 0 on success , error number on error ) , throw <nl> + / / on error . <nl> + inline void checkPosixError ( int err , const char * msg ) { <nl> + if ( UNLIKELY ( err ! = 0 ) ) { <nl> + throwSystemError ( err , msg ) ; <nl> + } <nl> + } <nl> + <nl> + / / Check a Linux kernel - style return code ( > = 0 on success , negative error <nl> + / / number on error ) , throw on error . <nl> + inline void checkKernelError ( ssize_t ret , const char * msg ) { <nl> + if ( UNLIKELY ( ret < 0 ) ) { <nl> + throwSystemError ( - ret , msg ) ; <nl> + } <nl> + } <nl> + <nl> + / / Check a traditional Uinx return code ( - 1 and sets errno on error ) , throw <nl> + / / on error . <nl> + inline void checkUnixError ( ssize_t ret , const char * msg ) { <nl> + if ( UNLIKELY ( ret = = - 1 ) ) { <nl> + throwSystemError ( msg ) ; <nl> + } <nl> + } <nl> + inline void checkUnixError ( ssize_t ret , int savedErrno , const char * msg ) { <nl> + if ( UNLIKELY ( ret = = - 1 ) ) { <nl> + throwSystemError ( savedErrno , msg ) ; <nl> + } <nl> + } <nl> + <nl> + } / / namespace folly <nl> + <nl> + # endif / * FOLLY_EXCEPTION_H_ * / <nl> + <nl> mmm a / folly / Subprocess . cpp <nl> ppp b / folly / Subprocess . cpp <nl> <nl> # include < glog / logging . h > <nl> <nl> # include " folly / Conv . h " <nl> + # include " folly / Exception . h " <nl> # include " folly / ScopeGuard . h " <nl> # include " folly / String . h " <nl> # include " folly / io / Cursor . h " <nl> std : : unique_ptr < const char * [ ] > cloneStrings ( const std : : vector < std : : string > & s ) { <nl> return d ; <nl> } <nl> <nl> - / / Helper to throw std : : system_error <nl> - void throwSystemError ( int err , const char * msg ) __attribute__ ( ( noreturn ) ) ; <nl> - void throwSystemError ( int err , const char * msg ) { <nl> - throw std : : system_error ( err , std : : system_category ( ) , msg ) ; <nl> - } <nl> - <nl> - / / Helper to throw std : : system_error from errno <nl> - void throwSystemError ( const char * msg ) __attribute__ ( ( noreturn ) ) ; <nl> - void throwSystemError ( const char * msg ) { <nl> - throwSystemError ( errno , msg ) ; <nl> - } <nl> - <nl> - / / Check a Posix return code ( 0 on success , error number on error ) , throw <nl> - / / on error . <nl> - void checkPosixError ( int err , const char * msg ) { <nl> - if ( err ! = 0 ) { <nl> - throwSystemError ( err , msg ) ; <nl> - } <nl> - } <nl> - <nl> - / / Check a traditional Uinx return code ( - 1 and sets errno on error ) , throw <nl> - / / on error . <nl> - void checkUnixError ( ssize_t ret , const char * msg ) { <nl> - if ( ret = = - 1 ) { <nl> - throwSystemError ( msg ) ; <nl> - } <nl> - } <nl> - void checkUnixError ( ssize_t ret , int savedErrno , const char * msg ) { <nl> - if ( ret = = - 1 ) { <nl> - throwSystemError ( savedErrno , msg ) ; <nl> - } <nl> - } <nl> - <nl> / / Check a wait ( ) status , throw on non - successful <nl> void checkStatus ( ProcessReturnCode returnCode ) { <nl> if ( returnCode . state ( ) ! = ProcessReturnCode : : EXITED | | <nl> new file mode 100644 <nl> index 00000000000 . . 2080b72038e <nl> mmm / dev / null <nl> ppp b / folly / experimental / io / AsyncIO . cpp <nl> <nl> + / * <nl> + * Copyright 2013 Facebook , Inc . <nl> + * <nl> + * Licensed under the Apache License , Version 2 . 0 ( the " License " ) ; <nl> + * you may not use this file except in compliance with the License . <nl> + * You may obtain a copy of the License at <nl> + * <nl> + * http : / / www . apache . org / licenses / LICENSE - 2 . 0 <nl> + * <nl> + * Unless required by applicable law or agreed to in writing , software <nl> + * distributed under the License is distributed on an " AS IS " BASIS , <nl> + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied . <nl> + * See the License for the specific language governing permissions and <nl> + * limitations under the License . <nl> + * / <nl> + <nl> + # include " folly / experimental / io / AsyncIO . h " <nl> + <nl> + # include < cerrno > <nl> + <nl> + # include < glog / logging . h > <nl> + <nl> + # include " folly / Exception . h " <nl> + # include " folly / Likely . h " <nl> + # include " folly / String . h " <nl> + # include " folly / eventfd . h " <nl> + <nl> + namespace folly { <nl> + <nl> + AsyncIO : : AsyncIO ( size_t capacity , PollMode pollMode ) <nl> + : ctx_ ( 0 ) , <nl> + pending_ ( 0 ) , <nl> + capacity_ ( capacity ) , <nl> + pollFd_ ( - 1 ) { <nl> + if ( UNLIKELY ( capacity_ = = 0 ) ) { <nl> + throw std : : out_of_range ( " AsyncIO : capacity must not be 0 " ) ; <nl> + } <nl> + completed_ . reserve ( capacity_ ) ; <nl> + if ( pollMode = = POLLABLE ) { <nl> + pollFd_ = eventfd ( 0 , EFD_NONBLOCK ) ; <nl> + checkUnixError ( pollFd_ , " AsyncIO : eventfd creation failed " ) ; <nl> + } <nl> + } <nl> + <nl> + AsyncIO : : ~ AsyncIO ( ) { <nl> + CHECK_EQ ( pending_ , 0 ) ; <nl> + if ( ctx_ ) { <nl> + int rc = io_queue_release ( ctx_ ) ; <nl> + CHECK_EQ ( rc , 0 ) < < " io_queue_release : " < < errnoStr ( - rc ) ; <nl> + } <nl> + if ( pollFd_ ! = - 1 ) { <nl> + CHECK_ERR ( close ( pollFd_ ) ) ; <nl> + } <nl> + } <nl> + <nl> + void AsyncIO : : pread ( Op * op , int fd , void * buf , size_t size , off_t start ) { <nl> + iocb cb ; <nl> + io_prep_pread ( & cb , fd , buf , size , start ) ; <nl> + submit ( op , & cb ) ; <nl> + } <nl> + <nl> + void AsyncIO : : pread ( Op * op , int fd , Range < unsigned char * > range , <nl> + off_t start ) { <nl> + pread ( op , fd , range . begin ( ) , range . size ( ) , start ) ; <nl> + } <nl> + <nl> + void AsyncIO : : preadv ( Op * op , int fd , const iovec * iov , int iovcnt , <nl> + off_t start ) { <nl> + iocb cb ; <nl> + io_prep_preadv ( & cb , fd , iov , iovcnt , start ) ; <nl> + submit ( op , & cb ) ; <nl> + } <nl> + <nl> + void AsyncIO : : pwrite ( Op * op , int fd , const void * buf , size_t size , <nl> + off_t start ) { <nl> + iocb cb ; <nl> + io_prep_pwrite ( & cb , fd , const_cast < void * > ( buf ) , size , start ) ; <nl> + submit ( op , & cb ) ; <nl> + } <nl> + <nl> + void AsyncIO : : pwrite ( Op * op , int fd , Range < const unsigned char * > range , <nl> + off_t start ) { <nl> + pwrite ( op , fd , range . begin ( ) , range . size ( ) , start ) ; <nl> + } <nl> + <nl> + void AsyncIO : : pwritev ( Op * op , int fd , const iovec * iov , int iovcnt , <nl> + off_t start ) { <nl> + iocb cb ; <nl> + io_prep_pwritev ( & cb , fd , iov , iovcnt , start ) ; <nl> + submit ( op , & cb ) ; <nl> + } <nl> + <nl> + void AsyncIO : : initializeContext ( ) { <nl> + if ( ! ctx_ ) { <nl> + int rc = io_queue_init ( capacity_ , & ctx_ ) ; <nl> + / / returns negative errno <nl> + checkKernelError ( rc , " AsyncIO : io_queue_init failed " ) ; <nl> + DCHECK ( ctx_ ) ; <nl> + } <nl> + } <nl> + <nl> + void AsyncIO : : submit ( Op * op , iocb * cb ) { <nl> + if ( UNLIKELY ( pending_ > = capacity_ ) ) { <nl> + throw std : : out_of_range ( " AsyncIO : too many pending requests " ) ; <nl> + } <nl> + if ( UNLIKELY ( op - > state ( ) ! = Op : : UNINITIALIZED ) ) { <nl> + throw std : : logic_error ( " AsyncIO : Invalid Op state in submit " ) ; <nl> + } <nl> + initializeContext ( ) ; / / on demand <nl> + cb - > data = op ; <nl> + if ( pollFd_ ! = - 1 ) { <nl> + io_set_eventfd ( cb , pollFd_ ) ; <nl> + } <nl> + int rc = io_submit ( ctx_ , 1 , & cb ) ; <nl> + checkKernelError ( rc , " AsyncIO : io_submit failed " ) ; <nl> + DCHECK_EQ ( rc , 1 ) ; <nl> + op - > start ( ) ; <nl> + + + pending_ ; <nl> + } <nl> + <nl> + Range < AsyncIO : : Op * * > AsyncIO : : wait ( size_t minRequests ) { <nl> + if ( UNLIKELY ( ! ctx_ ) ) { <nl> + throw std : : logic_error ( " AsyncIO : wait called with no requests " ) ; <nl> + } <nl> + if ( UNLIKELY ( pollFd_ ! = - 1 ) ) { <nl> + throw std : : logic_error ( " AsyncIO : wait not allowed on pollable object " ) ; <nl> + } <nl> + return doWait ( minRequests , pending_ ) ; <nl> + } <nl> + <nl> + Range < AsyncIO : : Op * * > AsyncIO : : pollCompleted ( ) { <nl> + if ( UNLIKELY ( ! ctx_ ) ) { <nl> + throw std : : logic_error ( " AsyncIO : pollCompleted called with no requests " ) ; <nl> + } <nl> + if ( UNLIKELY ( pollFd_ = = - 1 ) ) { <nl> + throw std : : logic_error ( <nl> + " AsyncIO : pollCompleted not allowed on non - pollable object " ) ; <nl> + } <nl> + uint64_t numEvents ; <nl> + / / This sets the eventFd counter to 0 , see <nl> + / / http : / / www . kernel . org / doc / man - pages / online / pages / man2 / eventfd . 2 . html <nl> + ssize_t rc ; <nl> + do { <nl> + rc = : : read ( pollFd_ , & numEvents , 8 ) ; <nl> + } while ( rc = = - 1 & & errno = = EINTR ) ; <nl> + if ( UNLIKELY ( rc = = - 1 & & errno = = EAGAIN ) ) { <nl> + return Range < Op * * > ( ) ; / / nothing completed <nl> + } <nl> + checkUnixError ( rc , " AsyncIO : read from event fd failed " ) ; <nl> + DCHECK_EQ ( rc , 8 ) ; <nl> + <nl> + DCHECK_GT ( numEvents , 0 ) ; <nl> + DCHECK_LE ( numEvents , pending_ ) ; <nl> + <nl> + / / Don ' t reap more than numEvents , as we ' ve just reset the counter to 0 . <nl> + return doWait ( numEvents , numEvents ) ; <nl> + } <nl> + <nl> + Range < AsyncIO : : Op * * > AsyncIO : : doWait ( size_t minRequests , size_t maxRequests ) { <nl> + io_event events [ pending_ ] ; <nl> + int count ; <nl> + do { <nl> + / / Wait forever <nl> + count = io_getevents ( ctx_ , minRequests , maxRequests , events , nullptr ) ; <nl> + } while ( count = = - EINTR ) ; <nl> + checkKernelError ( count , " AsyncIO : io_getevents failed " ) ; <nl> + DCHECK_GE ( count , minRequests ) ; / / the man page says so <nl> + DCHECK_LE ( count , pending_ ) ; <nl> + <nl> + completed_ . clear ( ) ; <nl> + if ( count = = 0 ) { <nl> + return folly : : Range < Op * * > ( ) ; <nl> + } <nl> + <nl> + for ( size_t i = 0 ; i < count ; + + i ) { <nl> + Op * op = static_cast < Op * > ( events [ i ] . data ) ; <nl> + DCHECK ( op ) ; <nl> + op - > complete ( events [ i ] . res ) ; <nl> + completed_ . push_back ( op ) ; <nl> + } <nl> + pending_ - = count ; <nl> + <nl> + return folly : : Range < Op * * > ( & completed_ . front ( ) , count ) ; <nl> + } <nl> + <nl> + AsyncIO : : Op : : Op ( ) <nl> + : state_ ( UNINITIALIZED ) , <nl> + result_ ( - EINVAL ) { <nl> + } <nl> + <nl> + void AsyncIO : : Op : : reset ( ) { <nl> + if ( UNLIKELY ( state_ = = PENDING ) ) { <nl> + throw std : : logic_error ( " AsyncIO : invalid state for reset " ) ; <nl> + } <nl> + state_ = UNINITIALIZED ; <nl> + result_ = - EINVAL ; <nl> + } <nl> + <nl> + AsyncIO : : Op : : ~ Op ( ) { <nl> + CHECK_NE ( state_ , PENDING ) ; <nl> + } <nl> + <nl> + void AsyncIO : : Op : : start ( ) { <nl> + DCHECK_EQ ( state_ , UNINITIALIZED ) ; <nl> + state_ = PENDING ; <nl> + } <nl> + <nl> + void AsyncIO : : Op : : complete ( ssize_t result ) { <nl> + DCHECK_EQ ( state_ , PENDING ) ; <nl> + state_ = COMPLETED ; <nl> + result_ = result ; <nl> + onCompleted ( ) ; <nl> + } <nl> + <nl> + void AsyncIO : : Op : : onCompleted ( ) { } / / default : do nothing <nl> + <nl> + ssize_t AsyncIO : : Op : : result ( ) const { <nl> + if ( UNLIKELY ( state_ ! = COMPLETED ) ) { <nl> + throw std : : logic_error ( " AsyncIO : Invalid Op state in result " ) ; <nl> + } <nl> + return result_ ; <nl> + } <nl> + <nl> + CallbackOp : : CallbackOp ( Callback & & callback ) : callback_ ( std : : move ( callback ) ) { } <nl> + <nl> + CallbackOp : : ~ CallbackOp ( ) { } <nl> + <nl> + CallbackOp * CallbackOp : : make ( Callback & & callback ) { <nl> + / / Ensure created on the heap <nl> + return new CallbackOp ( std : : move ( callback ) ) ; <nl> + } <nl> + <nl> + void CallbackOp : : onCompleted ( ) { <nl> + callback_ ( result ( ) ) ; <nl> + delete this ; <nl> + } <nl> + <nl> + } / / namespace folly <nl> + <nl> new file mode 100644 <nl> index 00000000000 . . 81ed94dcc72 <nl> mmm / dev / null <nl> ppp b / folly / experimental / io / AsyncIO . h <nl> <nl> + / * <nl> + * Copyright 2013 Facebook , Inc . <nl> + * <nl> + * Licensed under the Apache License , Version 2 . 0 ( the " License " ) ; <nl> + * you may not use this file except in compliance with the License . <nl> + * You may obtain a copy of the License at <nl> + * <nl> + * http : / / www . apache . org / licenses / LICENSE - 2 . 0 <nl> + * <nl> + * Unless required by applicable law or agreed to in writing , software <nl> + * distributed under the License is distributed on an " AS IS " BASIS , <nl> + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied . <nl> + * See the License for the specific language governing permissions and <nl> + * limitations under the License . <nl> + * / <nl> + <nl> + # ifndef FOLLY_IO_ASYNCIO_H_ <nl> + # define FOLLY_IO_ASYNCIO_H_ <nl> + <nl> + # include < sys / types . h > <nl> + # include < sys / uio . h > <nl> + # include < libaio . h > <nl> + <nl> + # include < cstdint > <nl> + # include < functional > <nl> + # include < utility > <nl> + # include < vector > <nl> + <nl> + # include < boost / noncopyable . hpp > <nl> + <nl> + # include " folly / Portability . h " <nl> + # include " folly / Range . h " <nl> + <nl> + namespace folly { <nl> + <nl> + / * * <nl> + * C + + interface around Linux Async IO . <nl> + * / <nl> + class AsyncIO : private boost : : noncopyable { <nl> + public : <nl> + enum PollMode { <nl> + NOT_POLLABLE , <nl> + POLLABLE <nl> + } ; <nl> + <nl> + / * * <nl> + * Create an AsyncIO context capacble of holding at most ' capacity ' pending <nl> + * requests at the same time . As requests complete , others can be scheduled , <nl> + * as long as this limit is not exceeded . <nl> + * <nl> + * Note : the maximum number of allowed concurrent requests is controlled <nl> + * by the fs . aio - max - nr sysctl , the default value is usually 64K . <nl> + * <nl> + * If pollMode is POLLABLE , pollFd ( ) will return a file descriptor that <nl> + * can be passed to poll / epoll / select and will become readable when <nl> + * any IOs on this AioReader have completed . If you do this , you must use <nl> + * pollCompleted ( ) instead of wait ( ) - - do not read from the pollFd ( ) <nl> + * file descriptor directly . <nl> + * / <nl> + explicit AsyncIO ( size_t capacity , PollMode pollMode = NOT_POLLABLE ) ; <nl> + ~ AsyncIO ( ) ; <nl> + <nl> + / * * <nl> + * An Op represents a pending operation . You may inherit from Op ( and <nl> + * override onCompleted ) in order to be notified of completion ( see <nl> + * CallbackOp below for an example ) , or you may use Op ' s methods directly . <nl> + * <nl> + * The Op must remain allocated until completion . <nl> + * / <nl> + class Op : private boost : : noncopyable { <nl> + friend class AsyncIO ; <nl> + public : <nl> + Op ( ) ; <nl> + virtual ~ Op ( ) ; <nl> + <nl> + / / There would be a cancel ( ) method here if Linux AIO actually implemented <nl> + / / it . But let ' s not get your hopes up . <nl> + <nl> + enum State { <nl> + UNINITIALIZED , <nl> + PENDING , <nl> + COMPLETED <nl> + } ; <nl> + <nl> + / * * <nl> + * Return the current operation state . <nl> + * / <nl> + State state ( ) const { return state_ ; } <nl> + <nl> + / * * <nl> + * Reset the operation for reuse . It is an error to call reset ( ) on <nl> + * an Op that is still pending . <nl> + * / <nl> + void reset ( ) ; <nl> + <nl> + / * * <nl> + * Retrieve the result of this operation . Returns > = 0 on success , <nl> + * - errno on failure ( that is , using the Linux kernel error reporting <nl> + * conventions ) . Use checkKernelError ( folly / Exception . h ) on the result to <nl> + * throw a std : : system_error in case of error instead . <nl> + * <nl> + * It is an error to call this if the Op hasn ' t yet started or is still <nl> + * pending . <nl> + * / <nl> + ssize_t result ( ) const ; <nl> + <nl> + private : <nl> + void start ( ) ; <nl> + void complete ( ssize_t result ) ; <nl> + <nl> + virtual void onCompleted ( ) ; <nl> + <nl> + State state_ ; <nl> + ssize_t result_ ; <nl> + } ; <nl> + <nl> + / * * <nl> + * Initiate a read request . <nl> + * / <nl> + void pread ( Op * op , int fd , void * buf , size_t size , off_t start ) ; <nl> + void pread ( Op * op , int fd , Range < unsigned char * > range , off_t start ) ; <nl> + void preadv ( Op * op , int fd , const iovec * iov , int iovcnt , off_t start ) ; <nl> + <nl> + / * * <nl> + * Initiate a write request . <nl> + * / <nl> + void pwrite ( Op * op , int fd , const void * buf , size_t size , off_t start ) ; <nl> + void pwrite ( Op * op , int fd , Range < const unsigned char * > range , off_t start ) ; <nl> + void pwritev ( Op * op , int fd , const iovec * iov , int iovcnt , off_t start ) ; <nl> + <nl> + / * * <nl> + * Wait for at least minRequests to complete . Returns the requests that <nl> + * have completed ; the returned range is valid until the next call to <nl> + * wait ( ) . minRequests may be 0 to not block . <nl> + * / <nl> + Range < Op * * > wait ( size_t minRequests ) ; <nl> + <nl> + / * * <nl> + * Return the number of pending requests . <nl> + * / <nl> + size_t pending ( ) const { return pending_ ; } <nl> + <nl> + / * * <nl> + * Return the maximum number of requests that can be kept outstanding <nl> + * at any one time . <nl> + * / <nl> + size_t capacity ( ) const { return capacity_ ; } <nl> + <nl> + / * * <nl> + * If POLLABLE , return a file descriptor that can be passed to poll / epoll <nl> + * and will become readable when any async IO operations have completed . <nl> + * If NOT_POLLABLE , return - 1 . <nl> + * / <nl> + int pollFd ( ) const { return pollFd_ ; } <nl> + <nl> + / * * <nl> + * If POLLABLE , call instead of wait after the file descriptor returned <nl> + * by pollFd ( ) became readable . The returned range is valid until the next <nl> + * call to pollCompleted ( ) . <nl> + * / <nl> + Range < Op * * > pollCompleted ( ) ; <nl> + <nl> + private : <nl> + void initializeContext ( ) ; <nl> + void submit ( Op * op , iocb * cb ) ; <nl> + Range < Op * * > doWait ( size_t minRequests , size_t maxRequests ) ; <nl> + <nl> + io_context_t ctx_ ; <nl> + size_t pending_ ; <nl> + size_t capacity_ ; <nl> + int pollFd_ ; <nl> + std : : vector < Op * > completed_ ; <nl> + } ; <nl> + <nl> + / * * <nl> + * Implementation of AsyncIO : : Op that calls a callback and then deletes <nl> + * itself . <nl> + * / <nl> + class CallbackOp : public AsyncIO : : Op { <nl> + public : <nl> + typedef std : : function < void ( ssize_t ) > Callback ; <nl> + static CallbackOp * make ( Callback & & callback ) ; <nl> + <nl> + private : <nl> + explicit CallbackOp ( Callback & & callback ) ; <nl> + ~ CallbackOp ( ) ; <nl> + void onCompleted ( ) FOLLY_OVERRIDE ; <nl> + <nl> + Callback callback_ ; <nl> + } ; <nl> + <nl> + } / / namespace folly <nl> + <nl> + # endif / * FOLLY_IO_ASYNCIO_H_ * / <nl> + <nl> new file mode 100644 <nl> index 00000000000 . . 77595c66be7 <nl> mmm / dev / null <nl> ppp b / folly / experimental / io / test / AsyncIOTest . cpp <nl> <nl> + / * <nl> + * Copyright 2013 Facebook , Inc . <nl> + * <nl> + * Licensed under the Apache License , Version 2 . 0 ( the " License " ) ; <nl> + * you may not use this file except in compliance with the License . <nl> + * You may obtain a copy of the License at <nl> + * <nl> + * http : / / www . apache . org / licenses / LICENSE - 2 . 0 <nl> + * <nl> + * Unless required by applicable law or agreed to in writing , software <nl> + * distributed under the License is distributed on an " AS IS " BASIS , <nl> + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied . <nl> + * See the License for the specific language governing permissions and <nl> + * limitations under the License . <nl> + * / <nl> + <nl> + # include " folly / experimental / io / AsyncIO . h " <nl> + <nl> + # include < sys / stat . h > <nl> + # include < sys / types . h > <nl> + # include < fcntl . h > <nl> + # include < poll . h > <nl> + <nl> + # include < cstdlib > <nl> + # include < cstdio > <nl> + # include < memory > <nl> + # include < random > <nl> + # include < vector > <nl> + <nl> + # include < glog / logging . h > <nl> + # include < gtest / gtest . h > <nl> + <nl> + # include " folly / experimental / io / FsUtil . h " <nl> + # include " folly / ScopeGuard . h " <nl> + # include " folly / String . h " <nl> + <nl> + namespace fs = folly : : fs ; <nl> + using folly : : AsyncIO ; <nl> + <nl> + namespace { <nl> + <nl> + constexpr size_t kAlignment = 512 ; / / align reads to 512 B ( for O_DIRECT ) <nl> + <nl> + struct TestSpec { <nl> + off_t start ; <nl> + size_t size ; <nl> + } ; <nl> + <nl> + void waitUntilReadable ( int fd ) { <nl> + pollfd pfd ; <nl> + pfd . fd = fd ; <nl> + pfd . events = POLLIN ; <nl> + <nl> + int r ; <nl> + do { <nl> + r = poll ( & pfd , 1 , - 1 ) ; / / wait forever <nl> + } while ( r = = - 1 & & errno = = EINTR ) ; <nl> + PCHECK ( r = = 1 ) ; <nl> + CHECK_EQ ( pfd . revents , POLLIN ) ; / / no errors etc <nl> + } <nl> + <nl> + folly : : Range < AsyncIO : : Op * * > readerWait ( AsyncIO * reader ) { <nl> + int fd = reader - > pollFd ( ) ; <nl> + if ( fd = = - 1 ) { <nl> + return reader - > wait ( 1 ) ; <nl> + } else { <nl> + waitUntilReadable ( fd ) ; <nl> + return reader - > pollCompleted ( ) ; <nl> + } <nl> + } <nl> + <nl> + / / Temporary file that is NOT kept open but is deleted on exit . <nl> + / / Generate random - looking but reproduceable data . <nl> + class TemporaryFile { <nl> + public : <nl> + explicit TemporaryFile ( size_t size ) ; <nl> + ~ TemporaryFile ( ) ; <nl> + <nl> + const fs : : path path ( ) const { return path_ ; } <nl> + <nl> + private : <nl> + fs : : path path_ ; <nl> + } ; <nl> + <nl> + TemporaryFile : : TemporaryFile ( size_t size ) <nl> + : path_ ( fs : : temp_directory_path ( ) / fs : : unique_path ( ) ) { <nl> + CHECK_EQ ( size % sizeof ( uint32_t ) , 0 ) ; <nl> + size / = sizeof ( uint32_t ) ; <nl> + const uint32_t seed = 42 ; <nl> + std : : mt19937 rnd ( seed ) ; <nl> + <nl> + const size_t bufferSize = 1U < < 16 ; <nl> + uint32_t buffer [ bufferSize ] ; <nl> + <nl> + FILE * fp = : : fopen ( path_ . c_str ( ) , " wb " ) ; <nl> + PCHECK ( fp ! = nullptr ) ; <nl> + while ( size ) { <nl> + size_t n = std : : min ( size , bufferSize ) ; <nl> + for ( size_t i = 0 ; i < n ; + + i ) { <nl> + buffer [ i ] = rnd ( ) ; <nl> + } <nl> + size_t written = : : fwrite ( buffer , sizeof ( uint32_t ) , n , fp ) ; <nl> + PCHECK ( written = = n ) ; <nl> + size - = written ; <nl> + } <nl> + PCHECK ( : : fclose ( fp ) = = 0 ) ; <nl> + } <nl> + <nl> + TemporaryFile : : ~ TemporaryFile ( ) { <nl> + try { <nl> + fs : : remove ( path_ ) ; <nl> + } catch ( const fs : : filesystem_error & e ) { <nl> + LOG ( ERROR ) < < " fs : : remove : " < < folly : : exceptionStr ( e ) ; <nl> + } <nl> + } <nl> + <nl> + TemporaryFile thisBinary ( 6 < < 20 ) ; / / 6MiB <nl> + <nl> + void testReadsSerially ( const std : : vector < TestSpec > & specs , <nl> + AsyncIO : : PollMode pollMode ) { <nl> + AsyncIO aioReader ( 1 , pollMode ) ; <nl> + AsyncIO : : Op op ; <nl> + int fd = : : open ( thisBinary . path ( ) . c_str ( ) , O_DIRECT | O_RDONLY ) ; <nl> + PCHECK ( fd ! = - 1 ) ; <nl> + SCOPE_EXIT { <nl> + : : close ( fd ) ; <nl> + } ; <nl> + <nl> + for ( int i = 0 ; i < specs . size ( ) ; i + + ) { <nl> + std : : unique_ptr < char [ ] > buf ( new char [ specs [ i ] . size ] ) ; <nl> + aioReader . pread ( & op , fd , buf . get ( ) , specs [ i ] . size , specs [ i ] . start ) ; <nl> + EXPECT_EQ ( aioReader . pending ( ) , 1 ) ; <nl> + auto ops = readerWait ( & aioReader ) ; <nl> + EXPECT_EQ ( 1 , ops . size ( ) ) ; <nl> + EXPECT_TRUE ( ops [ 0 ] = = & op ) ; <nl> + EXPECT_EQ ( aioReader . pending ( ) , 0 ) ; <nl> + ssize_t res = op . result ( ) ; <nl> + EXPECT_LE ( 0 , res ) < < folly : : errnoStr ( - res ) ; <nl> + EXPECT_EQ ( specs [ i ] . size , res ) ; <nl> + op . reset ( ) ; <nl> + } <nl> + } <nl> + <nl> + void testReadsParallel ( const std : : vector < TestSpec > & specs , <nl> + AsyncIO : : PollMode pollMode ) { <nl> + AsyncIO aioReader ( specs . size ( ) , pollMode ) ; <nl> + std : : unique_ptr < AsyncIO : : Op [ ] > ops ( new AsyncIO : : Op [ specs . size ( ) ] ) ; <nl> + std : : vector < std : : unique_ptr < char [ ] > > bufs ( specs . size ( ) ) ; <nl> + <nl> + int fd = : : open ( thisBinary . path ( ) . c_str ( ) , O_DIRECT | O_RDONLY ) ; <nl> + PCHECK ( fd ! = - 1 ) ; <nl> + SCOPE_EXIT { <nl> + : : close ( fd ) ; <nl> + } ; <nl> + for ( int i = 0 ; i < specs . size ( ) ; i + + ) { <nl> + bufs [ i ] . reset ( new char [ specs [ i ] . size ] ) ; <nl> + aioReader . pread ( & ops [ i ] , fd , bufs [ i ] . get ( ) , specs [ i ] . size , <nl> + specs [ i ] . start ) ; <nl> + } <nl> + std : : vector < bool > pending ( specs . size ( ) , true ) ; <nl> + <nl> + size_t remaining = specs . size ( ) ; <nl> + while ( remaining ! = 0 ) { <nl> + EXPECT_EQ ( remaining , aioReader . pending ( ) ) ; <nl> + auto completed = readerWait ( & aioReader ) ; <nl> + size_t nrRead = completed . size ( ) ; <nl> + EXPECT_NE ( nrRead , 0 ) ; <nl> + remaining - = nrRead ; <nl> + <nl> + for ( int i = 0 ; i < nrRead ; i + + ) { <nl> + int id = completed [ i ] - ops . get ( ) ; <nl> + EXPECT_GE ( id , 0 ) ; <nl> + EXPECT_LT ( id , specs . size ( ) ) ; <nl> + EXPECT_TRUE ( pending [ id ] ) ; <nl> + pending [ id ] = false ; <nl> + ssize_t res = ops [ id ] . result ( ) ; <nl> + EXPECT_LE ( 0 , res ) < < folly : : errnoStr ( - res ) ; <nl> + EXPECT_EQ ( specs [ id ] . size , res ) ; <nl> + } <nl> + } <nl> + EXPECT_EQ ( aioReader . pending ( ) , 0 ) ; <nl> + for ( int i = 0 ; i < pending . size ( ) ; i + + ) { <nl> + EXPECT_FALSE ( pending [ i ] ) ; <nl> + } <nl> + } <nl> + <nl> + void testReads ( const std : : vector < TestSpec > & specs , <nl> + AsyncIO : : PollMode pollMode ) { <nl> + testReadsSerially ( specs , pollMode ) ; <nl> + testReadsParallel ( specs , pollMode ) ; <nl> + } <nl> + <nl> + } / / anonymous namespace <nl> + <nl> + TEST ( AsyncIO , ZeroAsyncDataNotPollable ) { <nl> + testReads ( { { 0 , 0 } } , AsyncIO : : NOT_POLLABLE ) ; <nl> + } <nl> + <nl> + TEST ( AsyncIO , ZeroAsyncDataPollable ) { <nl> + testReads ( { { 0 , 0 } } , AsyncIO : : POLLABLE ) ; <nl> + } <nl> + <nl> + TEST ( AsyncIO , SingleAsyncDataNotPollable ) { <nl> + testReads ( { { 0 , 512 } } , AsyncIO : : NOT_POLLABLE ) ; <nl> + testReads ( { { 0 , 512 } } , AsyncIO : : NOT_POLLABLE ) ; <nl> + } <nl> + <nl> + TEST ( AsyncIO , SingleAsyncDataPollable ) { <nl> + testReads ( { { 0 , 512 } } , AsyncIO : : POLLABLE ) ; <nl> + testReads ( { { 0 , 512 } } , AsyncIO : : POLLABLE ) ; <nl> + } <nl> + <nl> + TEST ( AsyncIO , MultipleAsyncDataNotPollable ) { <nl> + testReads ( { { 512 , 1024 } , { 512 , 1024 } , { 512 , 2048 } } , AsyncIO : : NOT_POLLABLE ) ; <nl> + testReads ( { { 512 , 1024 } , { 512 , 1024 } , { 512 , 2048 } } , AsyncIO : : NOT_POLLABLE ) ; <nl> + <nl> + testReads ( { <nl> + { 0 , 5 * 1024 * 1024 } , <nl> + { 512 , 5 * 1024 * 1024 } , <nl> + } , AsyncIO : : NOT_POLLABLE ) ; <nl> + <nl> + testReads ( { <nl> + { 512 , 0 } , <nl> + { 512 , 512 } , <nl> + { 512 , 1024 } , <nl> + { 512 , 10 * 1024 } , <nl> + { 512 , 1024 * 1024 } , <nl> + } , AsyncIO : : NOT_POLLABLE ) ; <nl> + } <nl> + <nl> + TEST ( AsyncIO , MultipleAsyncDataPollable ) { <nl> + testReads ( { { 512 , 1024 } , { 512 , 1024 } , { 512 , 2048 } } , AsyncIO : : POLLABLE ) ; <nl> + testReads ( { { 512 , 1024 } , { 512 , 1024 } , { 512 , 2048 } } , AsyncIO : : POLLABLE ) ; <nl> + <nl> + testReads ( { <nl> + { 0 , 5 * 1024 * 1024 } , <nl> + { 512 , 5 * 1024 * 1024 } , <nl> + } , AsyncIO : : POLLABLE ) ; <nl> + <nl> + testReads ( { <nl> + { 512 , 0 } , <nl> + { 512 , 512 } , <nl> + { 512 , 1024 } , <nl> + { 512 , 10 * 1024 } , <nl> + { 512 , 1024 * 1024 } , <nl> + } , AsyncIO : : POLLABLE ) ; <nl> + } <nl> + <nl> + TEST ( AsyncIO , ManyAsyncDataNotPollable ) { <nl> + { <nl> + std : : vector < TestSpec > v ; <nl> + for ( int i = 0 ; i < 1000 ; i + + ) { <nl> + v . push_back ( { 512 * i , 512 } ) ; <nl> + } <nl> + testReads ( v , AsyncIO : : NOT_POLLABLE ) ; <nl> + } <nl> + } <nl> + <nl> + TEST ( AsyncIO , ManyAsyncDataPollable ) { <nl> + { <nl> + std : : vector < TestSpec > v ; <nl> + for ( int i = 0 ; i < 1000 ; i + + ) { <nl> + v . push_back ( { 512 * i , 512 } ) ; <nl> + } <nl> + testReads ( v , AsyncIO : : POLLABLE ) ; <nl> + } <nl> + } <nl> + <nl> + TEST ( AsyncIO , NonBlockingWait ) { <nl> + AsyncIO aioReader ( 1 , AsyncIO : : NOT_POLLABLE ) ; <nl> + AsyncIO : : Op op ; <nl> + int fd = : : open ( thisBinary . path ( ) . c_str ( ) , O_DIRECT | O_RDONLY ) ; <nl> + PCHECK ( fd ! = - 1 ) ; <nl> + SCOPE_EXIT { <nl> + : : close ( fd ) ; <nl> + } ; <nl> + size_t size = 1024 ; <nl> + std : : unique_ptr < char [ ] > buf ( new char [ size ] ) ; <nl> + aioReader . pread ( & op , fd , buf . get ( ) , size , 0 ) ; <nl> + EXPECT_EQ ( aioReader . pending ( ) , 1 ) ; <nl> + <nl> + folly : : Range < AsyncIO : : Op * * > completed ; <nl> + while ( completed . empty ( ) ) { <nl> + / / poll without blocking until the read request completes . <nl> + completed = aioReader . wait ( 0 ) ; <nl> + } <nl> + EXPECT_EQ ( completed . size ( ) , 1 ) ; <nl> + <nl> + EXPECT_TRUE ( completed [ 0 ] = = & op ) ; <nl> + ssize_t res = op . result ( ) ; <nl> + EXPECT_LE ( 0 , res ) < < folly : : errnoStr ( - res ) ; <nl> + EXPECT_EQ ( size , res ) ; <nl> + EXPECT_EQ ( aioReader . pending ( ) , 0 ) ; <nl> + } <nl> + <nl> | AsyncIO in folly | facebook/folly | 8f45b8d51e9fe5cab83d5e62aeae24aa19c3ae80 | 2013-03-19T00:05:03Z |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.