diff
stringlengths
41
2.03M
msg
stringlengths
1
1.5k
repo
stringlengths
5
40
sha
stringlengths
40
40
time
stringlengths
20
20
mmm a / hphp / hack / src / utils / find . ml <nl> ppp b / hphp / hack / src / utils / find . ml <nl> let paths_to_path_string paths = <nl> let escaped_paths = List . map escape_spaces stringed_paths in <nl> String . concat " " escaped_paths <nl> <nl> - let find_with_name paths pattern = <nl> + let find_with_name ? ( follow_symlinks = false ) paths pattern = <nl> let paths = paths_to_path_string paths in <nl> - let cmd = Utils . spf " find % s - name \ " % s \ " " paths pattern in <nl> + let flags = if follow_symlinks then " - L " else " " in <nl> + let cmd = Utils . spf " find % s % s - name \ " % s \ " " flags paths pattern in <nl> let ic = Unix . open_process_in cmd in <nl> let buf = Buffer . create 16 in <nl> ( try <nl> let find_with_name paths pattern = <nl> ( * Main entry point * ) <nl> ( * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * ) <nl> <nl> - let make_next_files filter ? ( others = [ ] ) root = <nl> + let make_next_files filter ? ( others = [ ] ) ? ( follow_symlinks = false ) root = <nl> let paths = paths_to_path_string ( root : : others ) in <nl> - let ic = Unix . open_process_in ( " find " ^ paths ) in <nl> + let flags = if follow_symlinks then " - L " else " " in <nl> + let ic = Unix . open_process_in ( " find " ^ flags ^ paths ) in <nl> let done_ = ref false in <nl> ( * This is subtle , but to optimize latency , we open the process and <nl> * then return a closure immediately . That way ' find ' gets started <nl> mmm a / hphp / hack / src / utils / find . mli <nl> ppp b / hphp / hack / src / utils / find . mli <nl> <nl> * ) <nl> <nl> val make_next_files : <nl> - ( string - > bool ) - > ? others : Path . t list - > Path . t - > <nl> - ( unit - > string list ) <nl> + ( string - > bool ) - > ? others : Path . t list - > ? follow_symlinks : bool - > <nl> + Path . t - > ( unit - > string list ) <nl> <nl> - val find_with_name : Path . t list - > string - > string list <nl> + val find_with_name : <nl> + ? follow_symlinks : bool - > Path . t list - > string - > string list <nl>
support for symlinked packages under node_modules
facebook/hhvm
d7822fa90fda0a1f763823ae4ffdd16ebf086c79
2015-06-19T21:32:25Z
mmm a / src / treelearner / feature_histogram . hpp <nl> ppp b / src / treelearner / feature_histogram . hpp <nl> class FeatureHistogram { <nl> * / <nl> void FindBestThreshold ( double sum_gradient , double sum_hessian , data_size_t num_data , <nl> SplitInfo * output ) { <nl> - find_best_threshold_fun_ ( sum_gradient , sum_hessian , num_data , output ) ; <nl> + find_best_threshold_fun_ ( sum_gradient , sum_hessian + 2 * kEpsilon , num_data , output ) ; <nl> if ( output - > gain > kMinScore ) { <nl> is_splittable_ = true ; <nl> } else { <nl>
fix error that sum_hessian may be zero .
microsoft/LightGBM
6c736da9325dba9d56108ae6742cb5242516911b
2017-01-23T12:16:27Z
mmm a / src / gui / categoryfilterproxymodel . h <nl> ppp b / src / gui / categoryfilterproxymodel . h <nl> class CategoryFilterProxyModel : public QSortFilterProxyModel <nl> <nl> protected : <nl> bool lessThan ( const QModelIndex & left , const QModelIndex & right ) const override ; <nl> + <nl> + private : <nl> + / / we added another overload of index ( ) , hence this using directive : <nl> + using QSortFilterProxyModel : : index ; <nl> } ; <nl> <nl> # endif / / CATEGORYFILTERPROXYMODEL_H <nl>
Merge pull request from evsh / fix - build
qbittorrent/qBittorrent
309ae0b4e9ba9cad3bb4968948471ef909ce908e
2017-05-18T15:35:53Z
mmm a / include / swift / AST / Decl . h <nl> ppp b / include / swift / AST / Decl . h <nl> class AbstractStorageDecl : public ValueDecl { <nl> / / / only valid on a declaration with Observing storage . <nl> FuncDecl * getDidSetFunc ( ) const { return getDidSetInfo ( ) . DidSet ; } <nl> <nl> - / / / Return true if this storage can ( but doesn ' t have to ) be accessed with <nl> - / / / Objective - C - compatible getters and setters . <nl> - bool hasForeignGetterAndSetter ( ) const ; <nl> - <nl> - / / / Return true if this storage * must * be accessed with Objective - C - compatible <nl> - / / / getters and setters . <nl> - bool requiresForeignGetterAndSetter ( ) const ; <nl> - <nl> / / / Given that this is an Objective - C property or subscript declaration , <nl> / / / produce its getter selector . <nl> ObjCSelector getObjCGetterSelector ( LazyResolver * resolver = nullptr , <nl> mmm a / lib / AST / Decl . cpp <nl> ppp b / lib / AST / Decl . cpp <nl> bool VarDecl : : isSelfParameter ( ) const { <nl> return false ; <nl> } <nl> <nl> - / / / Return true if this stored property has a getter and <nl> - / / / setter that are accessible from Objective - C . <nl> - bool AbstractStorageDecl : : hasForeignGetterAndSetter ( ) const { <nl> - if ( auto override = getOverriddenDecl ( ) ) <nl> - return override - > hasForeignGetterAndSetter ( ) ; <nl> - <nl> - if ( ! isObjC ( ) ) <nl> - return false ; <nl> - <nl> - return true ; <nl> - } <nl> - <nl> - bool AbstractStorageDecl : : requiresForeignGetterAndSetter ( ) const { <nl> - if ( isFinal ( ) ) <nl> - return false ; <nl> - if ( hasAccessorFunctions ( ) & & getGetter ( ) - > isImportAsMember ( ) ) <nl> - return true ; <nl> - if ( ! hasForeignGetterAndSetter ( ) ) <nl> - return false ; <nl> - / / Imported accessors are foreign and only have objc entry points . <nl> - if ( hasClangNode ( ) ) <nl> - return true ; <nl> - / / Otherwise , we only dispatch by @ objc if the declaration is dynamic , <nl> - / / NSManaged , or dispatched through an ObjC protocol . <nl> - return isDynamic ( ) <nl> - | | getAttrs ( ) . hasAttribute < NSManagedAttr > ( ) <nl> - | | ( isa < ProtocolDecl > ( getDeclContext ( ) ) & & isProtocolRequirement ( ) ) ; <nl> - } <nl> - <nl> - <nl> bool VarDecl : : isAnonClosureParam ( ) const { <nl> auto name = getName ( ) ; <nl> if ( name . empty ( ) ) <nl>
AST : Remove AbstractStorageDecl : : { has , requires } ForeignGetterAndSetter ( )
apple/swift
d1f1c682bc583fd4cb8699f409ba1040e82f9282
2018-01-05T05:53:34Z
mmm a / tensorflow / contrib / cmake / CMakeLists . txt <nl> ppp b / tensorflow / contrib / cmake / CMakeLists . txt <nl> <nl> # Minimum CMake required <nl> cmake_minimum_required ( VERSION 3 . 5 ) <nl> <nl> + if ( WIN32 ) <nl> + if ( $ { CMAKE_VERSION } VERSION_LESS " 3 . 8 " ) <nl> + message ( WARNING " Your current cmake version is $ { CMAKE_VERSION } which does not support setting the toolset architecture to x64 . This may cause \ " compiler out of heap space \ " errors when building . Consider upgrading your cmake to > 3 . 8 and using the flag - Thost = x64 when running cmake . " ) <nl> + else ( ) <nl> + if ( NOT CMAKE_VS_PLATFORM_TOOLSET_HOST_ARCHITECTURE OR NOT " $ { CMAKE_VS_PLATFORM_TOOLSET_HOST_ARCHITECTURE } " STREQUAL " x64 " ) <nl> + message ( WARNING " Your current cmake generator is set to use 32 bit toolset architecture . This may cause \ " compiler out of heap space \ " errors when building . Consider using the flag - Thost = x64 when running cmake . " ) <nl> + endif ( ) <nl> + endif ( ) <nl> + endif ( ) <nl> + <nl> # Project <nl> project ( tensorflow C CXX ) <nl> <nl> mmm a / tensorflow / contrib / cmake / README . md <nl> ppp b / tensorflow / contrib / cmake / README . md <nl> ops or APIs . <nl> Step - by - step Windows build <nl> = = = = = = = = = = = = = = = = = = = = = = = = = = <nl> <nl> - 1 . Install the prerequisites detailed above , and set up your environment . <nl> - <nl> - * The following commands assume that you are using the Windows Command <nl> - Prompt ( ` cmd . exe ` ) . You will need to set up your environment to use the <nl> - appropriate toolchain , i . e . the 64 - bit tools . ( Some of the binary targets <nl> - we will build are too large for the 32 - bit tools , and they will fail with <nl> - out - of - memory errors . ) The typical command to do set up your <nl> - environment is : <nl> - <nl> - ` ` ` <nl> - D : \ temp > " C : \ Program Files ( x86 ) \ Microsoft Visual Studio 14 . 0 \ VC \ bin \ amd64 \ vcvarsall . bat " <nl> - ` ` ` <nl> - <nl> - * When building with GPU support after installing the CUDNN zip file from NVidia , append its <nl> - bin directory to your PATH environment variable . <nl> - In case TensorFlow fails to find the CUDA dll ' s during initialization , check your PATH environment variable . <nl> - It should contain the directory of the CUDA dlls and the directory of the CUDNN dll . <nl> - For example : <nl> - <nl> - ` ` ` <nl> - D : \ Program Files \ NVIDIA GPU Computing Toolkit \ CUDA \ v8 . 0 \ bin <nl> - D : \ local \ cuda \ bin <nl> - ` ` ` <nl> - <nl> - * When building with MKL support after installing [ MKL ] ( https : / / software . intel . com / en - us / mkl ) from INTEL , append its bin directories to your PATH environment variable . <nl> - <nl> - In case TensorFlow fails to find the MKL dll ' s during initialization , check your PATH environment variable . <nl> - It should contain the directory of the MKL dlls . For example : <nl> - <nl> - ` ` ` <nl> - D : \ Tools \ IntelSWTools \ compilers_and_libraries \ windows \ redist \ intel64 \ mkl <nl> - D : \ Tools \ IntelSWTools \ compilers_and_libraries \ windows \ redist \ intel64 \ compiler <nl> - D : \ Tools \ IntelSWTools \ compilers_and_libraries \ windows \ redist \ intel64 \ tbb \ vc_mt <nl> - ` ` ` <nl> - <nl> - <nl> - * We assume that ` cmake ` and ` git ` are installed and in your ` % PATH % ` . If <nl> - for example ` cmake ` is not in your path and it is installed in <nl> - ` C : \ Program Files ( x86 ) \ CMake \ bin \ cmake . exe ` , you can add this directory <nl> - to your ` % PATH % ` as follows : <nl> - <nl> - ` ` ` <nl> - D : \ temp > set PATH = " % PATH % ; C : \ Program Files ( x86 ) \ CMake \ bin \ cmake . exe " <nl> - ` ` ` <nl> - <nl> - 2 . Clone the TensorFlow repository and create a working directory for your <nl> - build : <nl> - <nl> - ` ` ` <nl> - D : \ temp > git clone https : / / github . com / tensorflow / tensorflow . git <nl> - D : \ temp > cd tensorflow \ tensorflow \ contrib \ cmake <nl> - D : \ temp \ tensorflow \ tensorflow \ contrib \ cmake > mkdir build <nl> - D : \ temp \ tensorflow \ tensorflow \ contrib \ cmake > cd build <nl> - D : \ temp \ tensorflow \ tensorflow \ contrib \ cmake \ build > <nl> - ` ` ` <nl> - <nl> - 3 . Invoke CMake to create Visual Studio solution and project files . <nl> - <nl> - * * N . B . * * This assumes that ` cmake . exe ` is in your ` % PATH % ` environment <nl> - variable . The other paths are for illustrative purposes only , and may <nl> - be different on your platform . The ` ^ ` character is a line continuation <nl> - and must be the last character on each line . <nl> - <nl> - ` ` ` <nl> - D : \ . . . \ build > cmake . . - A x64 - DCMAKE_BUILD_TYPE = Release ^ <nl> - More ? - DSWIG_EXECUTABLE = C : / tools / swigwin - 3 . 0 . 10 / swig . exe ^ <nl> - More ? - DPYTHON_EXECUTABLE = C : / Users / % USERNAME % / AppData / Local / Continuum / Anaconda3 / python . exe ^ <nl> - More ? - DPYTHON_LIBRARIES = C : / Users / % USERNAME % / AppData / Local / Continuum / Anaconda3 / libs / python35 . lib <nl> - ` ` ` <nl> - To build with GPU support add " ^ " at the end of the last line above following with : <nl> - ` ` ` <nl> - More ? - Dtensorflow_ENABLE_GPU = ON ^ <nl> - More ? - DCUDNN_HOME = " D : \ . . . \ cudnn " <nl> - ` ` ` <nl> - To build with MKL support add " ^ " at the end of the last line above following with : <nl> - <nl> - ` ` ` <nl> - More ? - Dtensorflow_ENABLE_MKL_SUPPORT = ON ^ <nl> - More ? - DMKL_HOME = " D : \ . . . \ compilers_and_libraries " <nl> - ` ` ` <nl> - <nl> - To enable SIMD instructions with MSVC , as AVX and SSE , define it as follows : <nl> - <nl> - ` ` ` <nl> - More ? - Dtensorflow_WIN_CPU_SIMD_OPTIONS = / arch : AVX <nl> - ` ` ` <nl> - <nl> - Note that the ` - DCMAKE_BUILD_TYPE = Release ` flag must match the build <nl> - configuration that you choose when invoking ` msbuild ` . The known - good <nl> - values are ` Release ` and ` RelWithDebInfo ` . The ` Debug ` build type is <nl> - not currently supported , because it relies on a ` Debug ` library for <nl> - Python ( ` python35d . lib ` ) that is not distributed by default . <nl> - <nl> - There are various options that can be specified when generating the <nl> - solution and project files : <nl> - <nl> - * ` - DCMAKE_BUILD_TYPE = ( Release | RelWithDebInfo ) ` : Note that the <nl> - ` CMAKE_BUILD_TYPE ` option must match the build configuration that you <nl> - choose when invoking MSBuild in step 4 . The known - good values are <nl> - ` Release ` and ` RelWithDebInfo ` . The ` Debug ` build type is not currently <nl> - supported , because it relies on a ` Debug ` library for Python <nl> - ( ` python35d . lib ` ) that is not distributed by default . <nl> - <nl> - * ` - Dtensorflow_BUILD_ALL_KERNELS = ( ON | OFF ) ` . Defaults to ` ON ` . You can <nl> - build a small subset of the kernels for a faster build by setting this <nl> - option to ` OFF ` . <nl> - <nl> - * ` - Dtensorflow_BUILD_CC_EXAMPLE = ( ON | OFF ) ` . Defaults to ` ON ` . Generate <nl> - project files for a simple C + + <nl> - [ example training program ] ( https : / / github . com / tensorflow / tensorflow / blob / master / tensorflow / cc / tutorials / example_trainer . cc ) . <nl> - <nl> - * ` - Dtensorflow_BUILD_PYTHON_BINDINGS = ( ON | OFF ) ` . Defaults to ` ON ` . Generate <nl> - project files for building a PIP package containing the TensorFlow runtime <nl> - and its Python bindings . <nl> - <nl> - * ` - Dtensorflow_ENABLE_GRPC_SUPPORT = ( ON | OFF ) ` . Defaults to ` ON ` . Include <nl> - gRPC support and the distributed client and server code in the TensorFlow <nl> - runtime . <nl> - <nl> - * ` - Dtensorflow_ENABLE_SSL_SUPPORT = ( ON | OFF ) ` . Defaults to ` OFF ` . Include <nl> - SSL support ( for making secure HTTP requests ) in the TensorFlow runtime . <nl> - This support is incomplete , and will be used for Google Cloud Storage <nl> - support . <nl> - <nl> - * ` - Dtensorflow_ENABLE_GPU = ( ON | OFF ) ` . Defaults to ` OFF ` . Include <nl> - GPU support . If GPU is enabled you need to install the CUDA 8 . 0 Toolkit and CUDNN 5 . 1 . <nl> - CMake will expect the location of CUDNN in - DCUDNN_HOME = path_you_unzipped_cudnn . <nl> - <nl> - * ` - Dtensorflow_BUILD_CC_TESTS = ( ON | OFF ) ` . Defaults to ` OFF ` . This builds cc unit tests . <nl> - There are many of them and building will take a few hours . <nl> - After cmake , build and execute the tests with <nl> - ` ` ` <nl> - MSBuild / p : Configuration = RelWithDebInfo ALL_BUILD . vcxproj <nl> - ctest - C RelWithDebInfo <nl> - ` ` ` <nl> - <nl> - * ` - Dtensorflow_BUILD_PYTHON_TESTS = ( ON | OFF ) ` . Defaults to ` OFF ` . This enables python kernel tests . <nl> - After building the python wheel , you need to install the new wheel before running the tests . <nl> - To execute the tests , use <nl> - ` ` ` <nl> - ctest - C RelWithDebInfo <nl> - ` ` ` <nl> - <nl> - * ` - Dtensorflow_BUILD_MORE_PYTHON_TESTS = ( ON | OFF ) ` . Defaults to ` OFF ` . This enables python tests on <nl> - serveral major packages . This option is only valid if this and tensorflow_BUILD_PYTHON_TESTS are both set as ` ON ` . <nl> - After building the python wheel , you need to install the new wheel before running the tests . <nl> - To execute the tests , use <nl> - ` ` ` <nl> - ctest - C RelWithDebInfo <nl> - ` ` ` <nl> - <nl> - * ` - Dtensorflow_ENABLE_MKL_SUPPORT = ( ON | OFF ) ` . Defaults to ` OFF ` . Include MKL support . If MKL is enabled you need to install the [ Intel Math Kernal Library ] ( https : / / software . intel . com / en - us / mkl ) . <nl> - CMake will expect the location of MKL in - MKL_HOME = path_you_install_mkl . <nl> - <nl> - * ` - Dtensorflow_ENABLE_MKLDNN_SUPPORT = ( ON | OFF ) ` . Defaults to ` OFF ` . Include MKL DNN support . MKL DNN is [ Intel ( R ) Math Kernel Library for Deep Neural Networks ( Intel ( R ) MKL - DNN ) ] ( https : / / github . com / intel / mkl - dnn ) . You have to add ` - Dtensorflow_ENABLE_MKL_SUPPORT = ON ` before including MKL DNN support . <nl> - <nl> - <nl> - 4 . Invoke MSBuild to build TensorFlow . <nl> - <nl> - To build the C + + example program , which will be created as a ` . exe ` <nl> - executable in the subdirectory ` . \ Release ` : <nl> - <nl> - ` ` ` <nl> - D : \ . . . \ build > MSBuild / p : Configuration = Release tf_tutorials_example_trainer . vcxproj <nl> - D : \ . . . \ build > Release \ tf_tutorials_example_trainer . exe <nl> - ` ` ` <nl> - <nl> - To build the PIP package , which will be created as a ` . whl ` file in the <nl> - subdirectory ` . \ tf_python \ dist ` : <nl> - <nl> - ` ` ` <nl> - D : \ . . . \ build > MSBuild / p : Configuration = Release tf_python_build_pip_package . vcxproj <nl> - ` ` ` <nl> - <nl> + 1 . Install the prerequisites detailed above , and set up your environment . <nl> + <nl> + * When building with GPU support after installing the CUDNN zip file from <nl> + NVidia , append its bin directory to your PATH environment variable . In <nl> + case TensorFlow fails to find the CUDA dll ' s during initialization , <nl> + check your PATH environment variable . It should contain the directory of <nl> + the CUDA dlls and the directory of the CUDNN dll . For example : <nl> + <nl> + ` ` ` <nl> + D : \ Program Files \ NVIDIA GPU Computing Toolkit \ CUDA \ v8 . 0 \ bin <nl> + D : \ local \ cuda \ bin <nl> + ` ` ` <nl> + <nl> + * When building with MKL support after installing <nl> + [ MKL ] ( https : / / software . intel . com / en - us / mkl ) from INTEL , append its bin <nl> + directories to your PATH environment variable . <nl> + <nl> + In case TensorFlow fails to find the MKL dll ' s during initialization , <nl> + check your PATH environment variable . It should contain the directory of <nl> + the MKL dlls . For example : <nl> + <nl> + ` ` ` <nl> + D : \ Tools \ IntelSWTools \ compilers_and_libraries \ windows \ redist \ intel64 \ mkl <nl> + D : \ Tools \ IntelSWTools \ compilers_and_libraries \ windows \ redist \ intel64 \ compiler <nl> + D : \ Tools \ IntelSWTools \ compilers_and_libraries \ windows \ redist \ intel64 \ tbb \ vc_mt <nl> + ` ` ` <nl> + <nl> + * We assume that ` cmake ` and ` git ` are installed and in your ` % PATH % ` . If <nl> + for example ` cmake ` is not in your path and it is installed in <nl> + ` C : \ Program Files ( x86 ) \ CMake \ bin \ cmake . exe ` , you can add this directory <nl> + to your ` % PATH % ` as follows : <nl> + <nl> + ` ` ` <nl> + D : \ temp > set PATH = " % PATH % ; C : \ Program Files ( x86 ) \ CMake \ bin \ cmake . exe " <nl> + ` ` ` <nl> + <nl> + 2 . Clone the TensorFlow repository and create a working directory for your <nl> + build : <nl> + <nl> + ` ` ` <nl> + D : \ temp > git clone https : / / github . com / tensorflow / tensorflow . git <nl> + D : \ temp > cd tensorflow \ tensorflow \ contrib \ cmake <nl> + D : \ temp \ tensorflow \ tensorflow \ contrib \ cmake > mkdir build <nl> + D : \ temp \ tensorflow \ tensorflow \ contrib \ cmake > cd build <nl> + D : \ temp \ tensorflow \ tensorflow \ contrib \ cmake \ build > <nl> + ` ` ` <nl> + <nl> + 3 . Invoke CMake to create Visual Studio solution and project files . <nl> + <nl> + * * N . B . * * This assumes that ` cmake . exe ` is in your ` % PATH % ` environment <nl> + variable . The other paths are for illustrative purposes only , and may be <nl> + different on your platform . The ` ^ ` character is a line continuation and <nl> + must be the last character on each line . <nl> + <nl> + ` ` ` <nl> + D : \ . . . \ build > cmake . . - A x64 - Thost = x64 - DCMAKE_BUILD_TYPE = Release ^ <nl> + More ? - DSWIG_EXECUTABLE = C : / tools / swigwin - 3 . 0 . 10 / swig . exe ^ <nl> + More ? - DPYTHON_EXECUTABLE = C : / Users / % USERNAME % / AppData / Local / Continuum / Anaconda3 / python . exe ^ <nl> + More ? - DPYTHON_LIBRARIES = C : / Users / % USERNAME % / AppData / Local / Continuum / Anaconda3 / libs / python35 . lib <nl> + ` ` ` <nl> + <nl> + To build with GPU support add " ^ " at the end of the last line above <nl> + following with : ` More ? - Dtensorflow_ENABLE_GPU = ON ^ More ? <nl> + - DCUDNN_HOME = " D : \ . . . \ cudnn " ` To build with MKL support add " ^ " at the end of <nl> + the last line above following with : <nl> + <nl> + ` ` ` <nl> + More ? - Dtensorflow_ENABLE_MKL_SUPPORT = ON ^ <nl> + More ? - DMKL_HOME = " D : \ . . . \ compilers_and_libraries " <nl> + ` ` ` <nl> + <nl> + To enable SIMD instructions with MSVC , as AVX and SSE , define it as follows : <nl> + <nl> + ` ` ` <nl> + More ? - Dtensorflow_WIN_CPU_SIMD_OPTIONS = / arch : AVX <nl> + ` ` ` <nl> + <nl> + Note that the ` - DCMAKE_BUILD_TYPE = Release ` flag must match the build <nl> + configuration that you choose when invoking ` msbuild ` . The known - good values <nl> + are ` Release ` and ` RelWithDebInfo ` . The ` Debug ` build type is not currently <nl> + supported , because it relies on a ` Debug ` library for Python <nl> + ( ` python35d . lib ` ) that is not distributed by default . <nl> + <nl> + The ` - Thost = x64 ` flag will ensure that the 64 bit compiler and linker is <nl> + used when building . Without this flag , MSBuild will use the 32 bit toolchain <nl> + which is prone to compile errors such as " compiler out of heap space " . <nl> + <nl> + There are various options that can be specified when generating the solution <nl> + and project files : <nl> + <nl> + * ` - DCMAKE_BUILD_TYPE = ( Release | RelWithDebInfo ) ` : Note that the <nl> + ` CMAKE_BUILD_TYPE ` option must match the build configuration that you <nl> + choose when invoking MSBuild in step 4 . The known - good values are <nl> + ` Release ` and ` RelWithDebInfo ` . The ` Debug ` build type is not currently <nl> + supported , because it relies on a ` Debug ` library for Python <nl> + ( ` python35d . lib ` ) that is not distributed by default . <nl> + <nl> + * ` - Dtensorflow_BUILD_ALL_KERNELS = ( ON | OFF ) ` . Defaults to ` ON ` . You can <nl> + build a small subset of the kernels for a faster build by setting this <nl> + option to ` OFF ` . <nl> + <nl> + * ` - Dtensorflow_BUILD_CC_EXAMPLE = ( ON | OFF ) ` . Defaults to ` ON ` . Generate <nl> + project files for a simple C + + <nl> + [ example training program ] ( https : / / github . com / tensorflow / tensorflow / blob / master / tensorflow / cc / tutorials / example_trainer . cc ) . <nl> + <nl> + * ` - Dtensorflow_BUILD_PYTHON_BINDINGS = ( ON | OFF ) ` . Defaults to ` ON ` . <nl> + Generate project files for building a PIP package containing the <nl> + TensorFlow runtime and its Python bindings . <nl> + <nl> + * ` - Dtensorflow_ENABLE_GRPC_SUPPORT = ( ON | OFF ) ` . Defaults to ` ON ` . Include <nl> + gRPC support and the distributed client and server code in the <nl> + TensorFlow runtime . <nl> + <nl> + * ` - Dtensorflow_ENABLE_SSL_SUPPORT = ( ON | OFF ) ` . Defaults to ` OFF ` . Include <nl> + SSL support ( for making secure HTTP requests ) in the TensorFlow runtime . <nl> + This support is incomplete , and will be used for Google Cloud Storage <nl> + support . <nl> + <nl> + * ` - Dtensorflow_ENABLE_GPU = ( ON | OFF ) ` . Defaults to ` OFF ` . Include GPU <nl> + support . If GPU is enabled you need to install the CUDA 8 . 0 Toolkit and <nl> + CUDNN 5 . 1 . CMake will expect the location of CUDNN in <nl> + - DCUDNN_HOME = path_you_unzipped_cudnn . <nl> + <nl> + * ` - Dtensorflow_BUILD_CC_TESTS = ( ON | OFF ) ` . Defaults to ` OFF ` . This builds <nl> + cc unit tests . There are many of them and building will take a few <nl> + hours . After cmake , build and execute the tests with ` MSBuild <nl> + / p : Configuration = RelWithDebInfo ALL_BUILD . vcxproj ctest - C <nl> + RelWithDebInfo ` <nl> + <nl> + * ` - Dtensorflow_BUILD_PYTHON_TESTS = ( ON | OFF ) ` . Defaults to ` OFF ` . This <nl> + enables python kernel tests . After building the python wheel , you need <nl> + to install the new wheel before running the tests . To execute the tests , <nl> + use ` ctest - C RelWithDebInfo ` <nl> + <nl> + * ` - Dtensorflow_BUILD_MORE_PYTHON_TESTS = ( ON | OFF ) ` . Defaults to ` OFF ` . This <nl> + enables python tests on serveral major packages . This option is only <nl> + valid if this and tensorflow_BUILD_PYTHON_TESTS are both set as ` ON ` . <nl> + After building the python wheel , you need to install the new wheel <nl> + before running the tests . To execute the tests , use ` ctest - C <nl> + RelWithDebInfo ` <nl> + <nl> + * ` - Dtensorflow_ENABLE_MKL_SUPPORT = ( ON | OFF ) ` . Defaults to ` OFF ` . Include <nl> + MKL support . If MKL is enabled you need to install the <nl> + [ Intel Math Kernal Library ] ( https : / / software . intel . com / en - us / mkl ) . CMake <nl> + will expect the location of MKL in - MKL_HOME = path_you_install_mkl . <nl> + <nl> + * ` - Dtensorflow_ENABLE_MKLDNN_SUPPORT = ( ON | OFF ) ` . Defaults to ` OFF ` . <nl> + Include MKL DNN support . MKL DNN is [ Intel ( R ) Math Kernel Library for <nl> + Deep Neural Networks ( Intel ( R ) <nl> + MKL - DNN ) ] ( https : / / github . com / intel / mkl - dnn ) . You have to add <nl> + ` - Dtensorflow_ENABLE_MKL_SUPPORT = ON ` before including MKL DNN support . <nl> + <nl> + 4 . Invoke MSBuild to build TensorFlow . <nl> + <nl> + Set up the path to find MSbuild : ` D : \ temp > " C : \ Program Files ( x86 ) \ Microsoft <nl> + Visual Studio 14 . 0 \ VC \ bin \ amd64 \ vcvarsall . bat " ` <nl> + <nl> + To build the C + + example program , which will be created as a ` . exe ` <nl> + executable in the subdirectory ` . \ Release ` : <nl> + <nl> + ` ` ` <nl> + D : \ . . . \ build > MSBuild / p : Configuration = Release tf_tutorials_example_trainer . vcxproj <nl> + D : \ . . . \ build > Release \ tf_tutorials_example_trainer . exe <nl> + ` ` ` <nl> + <nl> + To build the PIP package , which will be created as a ` . whl ` file in the <nl> + subdirectory ` . \ tf_python \ dist ` : <nl> + <nl> + ` ` ` <nl> + D : \ . . . \ build > MSBuild / p : Configuration = Release tf_python_build_pip_package . vcxproj <nl> + ` ` ` <nl> <nl> Linux Continuous Integration build <nl> = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = <nl>
Merge pull request from smistad : cmake - windows - host - 64
tensorflow/tensorflow
2c2adc92b8bc7235e21b6455e768829d9a16fbf4
2018-10-08T14:58:06Z
mmm a / xbmc / utils / Variant . cpp <nl> ppp b / xbmc / utils / Variant . cpp <nl> string trimRight ( const string & str ) <nl> return tmp ; <nl> } <nl> <nl> + wstring trimRight ( const wstring & str ) <nl> + { <nl> + wstring tmp = str ; <nl> + / / find_last_not_of will return string : : npos ( which is defined as - 1 ) <nl> + / / or a value between 0 and size ( ) - 1 = > find_last_not_of ( ) + 1 will <nl> + / / always result in a valid index between 0 and size ( ) <nl> + tmp . erase ( tmp . find_last_not_of ( L " \ n \ r \ t " ) + 1 ) ; <nl> + <nl> + return tmp ; <nl> + } <nl> + <nl> int64_t str2int64 ( const string & str , int64_t fallback / * = 0 * / ) <nl> { <nl> char * end = NULL ; <nl> int64_t str2int64 ( const string & str , int64_t fallback / * = 0 * / ) <nl> return fallback ; <nl> } <nl> <nl> + int64_t str2int64 ( const wstring & str , int64_t fallback / * = 0 * / ) <nl> + { <nl> + wchar_t * end = NULL ; <nl> + int64_t result = wcstol ( trimRight ( str ) . c_str ( ) , & end , 0 ) ; <nl> + if ( end = = NULL | | * end = = ' \ 0 ' ) <nl> + return result ; <nl> + <nl> + return fallback ; <nl> + } <nl> + <nl> uint64_t str2uint64 ( const string & str , uint64_t fallback / * = 0 * / ) <nl> { <nl> char * end = NULL ; <nl> uint64_t str2uint64 ( const string & str , uint64_t fallback / * = 0 * / ) <nl> return fallback ; <nl> } <nl> <nl> + uint64_t str2uint64 ( const wstring & str , uint64_t fallback / * = 0 * / ) <nl> + { <nl> + wchar_t * end = NULL ; <nl> + uint64_t result = wcstoul ( trimRight ( str ) . c_str ( ) , & end , 0 ) ; <nl> + if ( end = = NULL | | * end = = ' \ 0 ' ) <nl> + return result ; <nl> + <nl> + return fallback ; <nl> + } <nl> + <nl> double str2double ( const string & str , double fallback / * = 0 . 0 * / ) <nl> { <nl> char * end = NULL ; <nl> double str2double ( const string & str , double fallback / * = 0 . 0 * / ) <nl> return fallback ; <nl> } <nl> <nl> + double str2double ( const wstring & str , double fallback / * = 0 . 0 * / ) <nl> + { <nl> + wchar_t * end = NULL ; <nl> + double result = wcstod ( trimRight ( str ) . c_str ( ) , & end ) ; <nl> + if ( end = = NULL | | * end = = ' \ 0 ' ) <nl> + return result ; <nl> + <nl> + return fallback ; <nl> + } <nl> + <nl> CVariant CVariant : : ConstNullVariant = CVariant : : VariantTypeConstNull ; <nl> <nl> CVariant : : CVariant ( VariantType type ) <nl> CVariant : : CVariant ( VariantType type ) <nl> m_data . dvalue = 0 . 0 ; <nl> break ; <nl> case VariantTypeString : <nl> + case VariantTypeWideString : <nl> case VariantTypeArray : <nl> case VariantTypeObject : <nl> default : <nl> CVariant : : CVariant ( const string & str ) <nl> m_string = str ; <nl> } <nl> <nl> + CVariant : : CVariant ( const wchar_t * str ) <nl> + { <nl> + m_type = VariantTypeWideString ; <nl> + m_wstring = str ; <nl> + } <nl> + <nl> + CVariant : : CVariant ( const wchar_t * str , unsigned int length ) <nl> + { <nl> + m_type = VariantTypeWideString ; <nl> + m_wstring = wstring ( str , length ) ; <nl> + } <nl> + <nl> + CVariant : : CVariant ( const wstring & str ) <nl> + { <nl> + m_type = VariantTypeWideString ; <nl> + m_wstring = str ; <nl> + } <nl> + <nl> CVariant : : CVariant ( const std : : vector < std : : string > & strArray ) <nl> { <nl> m_type = VariantTypeArray ; <nl> bool CVariant : : isString ( ) const <nl> return m_type = = VariantTypeString ; <nl> } <nl> <nl> + bool CVariant : : isWideString ( ) const <nl> + { <nl> + return m_type = = VariantTypeWideString ; <nl> + } <nl> + <nl> bool CVariant : : isArray ( ) const <nl> { <nl> return m_type = = VariantTypeArray ; <nl> int64_t CVariant : : asInteger ( int64_t fallback ) const <nl> return ( int64_t ) m_data . dvalue ; <nl> case VariantTypeString : <nl> return str2int64 ( m_string , fallback ) ; <nl> + case VariantTypeWideString : <nl> + return str2int64 ( m_wstring , fallback ) ; <nl> default : <nl> return fallback ; <nl> } <nl> uint64_t CVariant : : asUnsignedInteger ( uint64_t fallback ) const <nl> return ( uint64_t ) m_data . dvalue ; <nl> case VariantTypeString : <nl> return str2uint64 ( m_string , fallback ) ; <nl> + case VariantTypeWideString : <nl> + return str2uint64 ( m_wstring , fallback ) ; <nl> default : <nl> return fallback ; <nl> } <nl> double CVariant : : asDouble ( double fallback ) const <nl> return ( double ) m_data . unsignedinteger ; <nl> case VariantTypeString : <nl> return str2double ( m_string , fallback ) ; <nl> + case VariantTypeWideString : <nl> + return str2double ( m_wstring , fallback ) ; <nl> default : <nl> return fallback ; <nl> } <nl> float CVariant : : asFloat ( float fallback ) const <nl> return ( float ) m_data . unsignedinteger ; <nl> case VariantTypeString : <nl> return ( float ) str2double ( m_string , fallback ) ; <nl> + case VariantTypeWideString : <nl> + return ( float ) str2double ( m_wstring , fallback ) ; <nl> default : <nl> return fallback ; <nl> } <nl> bool CVariant : : asBoolean ( bool fallback ) const <nl> if ( m_string . empty ( ) | | m_string . compare ( " 0 " ) = = 0 | | m_string . compare ( " false " ) = = 0 ) <nl> return false ; <nl> return true ; <nl> + case VariantTypeWideString : <nl> + if ( m_wstring . empty ( ) | | m_wstring . compare ( L " 0 " ) = = 0 | | m_wstring . compare ( L " false " ) = = 0 ) <nl> + return false ; <nl> + return true ; <nl> default : <nl> return fallback ; <nl> } <nl> std : : string CVariant : : asString ( const std : : string & fallback / * = " " * / ) const <nl> return fallback ; <nl> } <nl> <nl> + std : : wstring CVariant : : asWideString ( const std : : wstring & fallback / * = L " " * / ) const <nl> + { <nl> + switch ( m_type ) <nl> + { <nl> + case VariantTypeWideString : <nl> + return m_wstring ; <nl> + case VariantTypeBoolean : <nl> + return m_data . boolean ? L " true " : L " false " ; <nl> + case VariantTypeInteger : <nl> + case VariantTypeUnsignedInteger : <nl> + case VariantTypeDouble : <nl> + { <nl> + std : : wostringstream strStream ; <nl> + if ( m_type = = VariantTypeInteger ) <nl> + strStream < < m_data . integer ; <nl> + else if ( m_type = = VariantTypeUnsignedInteger ) <nl> + strStream < < m_data . unsignedinteger ; <nl> + else <nl> + strStream < < m_data . dvalue ; <nl> + return strStream . str ( ) ; <nl> + break ; <nl> + } <nl> + default : <nl> + return fallback ; <nl> + } <nl> + <nl> + return fallback ; <nl> + } <nl> + <nl> CVariant & CVariant : : operator [ ] ( const std : : string & key ) <nl> { <nl> if ( m_type = = VariantTypeNull ) <nl> CVariant & CVariant : : operator = ( const CVariant & rhs ) <nl> case VariantTypeString : <nl> m_string = rhs . m_string ; <nl> break ; <nl> + case VariantTypeWideString : <nl> + m_wstring = rhs . m_wstring ; <nl> + break ; <nl> case VariantTypeArray : <nl> m_array . assign ( rhs . m_array . begin ( ) , rhs . m_array . end ( ) ) ; <nl> break ; <nl> bool CVariant : : operator = = ( const CVariant & rhs ) const <nl> return m_data . dvalue = = rhs . m_data . dvalue ; <nl> case VariantTypeString : <nl> return m_string = = rhs . m_string ; <nl> + case VariantTypeWideString : <nl> + return m_wstring = = rhs . m_wstring ; <nl> case VariantTypeArray : <nl> return m_array = = rhs . m_array ; <nl> case VariantTypeObject : <nl> void CVariant : : swap ( CVariant & rhs ) <nl> VariantType temp_type = m_type ; <nl> VariantUnion temp_data = m_data ; <nl> string temp_string = m_string ; <nl> + wstring temp_wstring = m_wstring ; <nl> VariantArray temp_array = m_array ; <nl> VariantMap temp_map = m_map ; <nl> <nl> m_type = rhs . m_type ; <nl> m_data = rhs . m_data ; <nl> m_string = rhs . m_string ; <nl> + m_wstring = rhs . m_wstring ; <nl> m_array = rhs . m_array ; <nl> m_map = rhs . m_map ; <nl> <nl> rhs . m_type = temp_type ; <nl> rhs . m_data = temp_data ; <nl> rhs . m_string = temp_string ; <nl> + rhs . m_wstring = temp_wstring ; <nl> rhs . m_array = temp_array ; <nl> rhs . m_map = temp_map ; <nl> } <nl> unsigned int CVariant : : size ( ) const <nl> return m_array . size ( ) ; <nl> else if ( m_type = = VariantTypeString ) <nl> return m_string . size ( ) ; <nl> + else if ( m_type = = VariantTypeWideString ) <nl> + return m_wstring . size ( ) ; <nl> else <nl> return 0 ; <nl> } <nl> bool CVariant : : empty ( ) const <nl> return m_array . empty ( ) ; <nl> else if ( m_type = = VariantTypeString ) <nl> return m_string . empty ( ) ; <nl> + else if ( m_type = = VariantTypeWideString ) <nl> + return m_wstring . empty ( ) ; <nl> else <nl> return true ; <nl> } <nl> void CVariant : : clear ( ) <nl> m_array . clear ( ) ; <nl> else if ( m_type = = VariantTypeString ) <nl> m_string . clear ( ) ; <nl> + else if ( m_type = = VariantTypeWideString ) <nl> + m_wstring . clear ( ) ; <nl> } <nl> <nl> void CVariant : : erase ( const std : : string & key ) <nl> mmm a / xbmc / utils / Variant . h <nl> ppp b / xbmc / utils / Variant . h <nl> <nl> # include < vector > <nl> # include < string > <nl> # include < stdint . h > <nl> + # include < wchar . h > <nl> <nl> int64_t str2int64 ( const std : : string & str , int64_t fallback = 0 ) ; <nl> + int64_t str2int64 ( const std : : wstring & str , int64_t fallback = 0 ) ; <nl> uint64_t str2uint64 ( const std : : string & str , uint64_t fallback = 0 ) ; <nl> + uint64_t str2uint64 ( const std : : wstring & str , uint64_t fallback = 0 ) ; <nl> double str2double ( const std : : string & str , double fallback = 0 . 0 ) ; <nl> + double str2double ( const std : : wstring & str , double fallback = 0 . 0 ) ; <nl> <nl> class CVariant <nl> { <nl> class CVariant <nl> VariantTypeUnsignedInteger , <nl> VariantTypeBoolean , <nl> VariantTypeString , <nl> + VariantTypeWideString , <nl> VariantTypeDouble , <nl> VariantTypeArray , <nl> VariantTypeObject , <nl> class CVariant <nl> CVariant ( const char * str ) ; <nl> CVariant ( const char * str , unsigned int length ) ; <nl> CVariant ( const std : : string & str ) ; <nl> + CVariant ( const wchar_t * str ) ; <nl> + CVariant ( const wchar_t * str , unsigned int length ) ; <nl> + CVariant ( const std : : wstring & str ) ; <nl> CVariant ( const std : : vector < std : : string > & strArray ) ; <nl> CVariant ( const CVariant & variant ) ; <nl> <nl> class CVariant <nl> bool isUnsignedInteger ( ) const ; <nl> bool isBoolean ( ) const ; <nl> bool isString ( ) const ; <nl> + bool isWideString ( ) const ; <nl> bool isDouble ( ) const ; <nl> bool isArray ( ) const ; <nl> bool isObject ( ) const ; <nl> class CVariant <nl> uint64_t asUnsignedInteger ( uint64_t fallback = 0u ) const ; <nl> bool asBoolean ( bool fallback = false ) const ; <nl> std : : string asString ( const std : : string & fallback = " " ) const ; <nl> + std : : wstring asWideString ( const std : : wstring & fallback = L " " ) const ; <nl> double asDouble ( double fallback = 0 . 0 ) const ; <nl> float asFloat ( float fallback = 0 . 0f ) const ; <nl> <nl> class CVariant <nl> VariantType m_type ; <nl> VariantUnion m_data ; <nl> std : : string m_string ; <nl> + std : : wstring m_wstring ; <nl> VariantArray m_array ; <nl> VariantMap m_map ; <nl> } ; <nl>
CVariant : add wchar_t / wstring support ( isWideString ( ) / asWideString ( ) )
xbmc/xbmc
a2c89c12b52387cf32e54a1e7171815efeba671a
2012-06-09T07:58:13Z
mmm a / brightray / brightray . gyp <nl> ppp b / brightray / brightray . gyp <nl> <nl> ' sources ' : [ <nl> ' browser / browser_client . cc ' , <nl> ' browser / browser_client . h ' , <nl> + ' browser / browser_client_mac . mm ' , <nl> ' browser / browser_context . cc ' , <nl> ' browser / browser_context . h ' , <nl> ' browser / browser_main_parts . cc ' , <nl> mmm a / brightray / browser / browser_client . h <nl> ppp b / brightray / browser / browser_client . h <nl> class BrowserClient : public content : : ContentBrowserClient { <nl> private : <nl> virtual content : : BrowserMainParts * CreateBrowserMainParts ( const content : : MainFunctionParams & ) OVERRIDE ; <nl> virtual net : : URLRequestContextGetter * CreateRequestContext ( content : : BrowserContext * , content : : ProtocolHandlerMap * ) OVERRIDE ; <nl> + virtual void ShowDesktopNotification ( <nl> + const content : : ShowDesktopNotificationHostMsgParams & , <nl> + int render_process_id , <nl> + int render_view_id , <nl> + bool worker ) OVERRIDE ; <nl> <nl> BrowserMainParts * browser_main_parts_ ; <nl> <nl> new file mode 100644 <nl> index 000000000000 . . 8cc4b5cd3e53 <nl> mmm / dev / null <nl> ppp b / brightray / browser / browser_client_mac . mm <nl> <nl> + # import " browser / browser_client . h " <nl> + <nl> + # import " base / strings / sys_string_conversions . h " <nl> + # import " content / public / common / show_desktop_notification_params . h " <nl> + <nl> + # import < Foundation / Foundation . h > <nl> + <nl> + namespace brightray { <nl> + <nl> + void BrowserClient : : ShowDesktopNotification ( <nl> + const content : : ShowDesktopNotificationHostMsgParams & params , <nl> + int render_process_id , <nl> + int render_view_id , <nl> + bool worker ) { <nl> + auto notification = [ [ NSUserNotification alloc ] init ] ; <nl> + notification . title = base : : SysUTF16ToNSString ( params . title ) ; <nl> + notification . informativeText = base : : SysUTF16ToNSString ( params . body ) ; <nl> + <nl> + [ NSUserNotificationCenter . defaultUserNotificationCenter deliverNotification : notification ] ; <nl> + [ notification release ] ; <nl> + } <nl> + <nl> + } <nl>
Add bare - bones HTML notifications support
electron/electron
08932531885d90c1641bfaf783d4070c8f400962
2013-03-28T21:50:07Z
mmm a / jstests / sharding / read_pref . js <nl> ppp b / jstests / sharding / read_pref . js <nl> <nl> / * * <nl> * Integration test for read preference and tagging . The more comprehensive unit test can be found <nl> - * in dbtests / replica_set_monitor_test . cpp . <nl> + * in dbtests / scanning_replica_set_monitor_test . cpp . <nl> * / <nl> <nl> / / Checking UUID consistency involves talking to a shard node , which in this test is shutdown <nl> mmm a / src / mongo / client / SConscript <nl> ppp b / src / mongo / client / SConscript <nl> clientDriverEnv . Library ( <nl> ' scanning_replica_set_monitor . cpp ' , <nl> ' streamable_replica_set_monitor . cpp ' , <nl> ' replica_set_monitor_manager . cpp ' , <nl> - env . Idlc ( ' replica_set_monitor_params . idl ' ) [ 0 ] , <nl> ' server_ping_monitor . cpp ' , <nl> ] , <nl> LIBDEPS = [ <nl> clientDriverEnv . Library ( <nl> ' $ BUILD_DIR / mongo / util / background_job ' , <nl> ' $ BUILD_DIR / mongo / util / md5 ' , <nl> ' $ BUILD_DIR / mongo / util / net / network ' , <nl> + ' replica_set_monitor_server_parameters ' , <nl> ' clientdriver_minimal ' , <nl> ' read_preference ' , <nl> ] , <nl> clientDriverEnv . Library ( <nl> ] <nl> ) <nl> <nl> + env . Library ( <nl> + target = ' replica_set_monitor_server_parameters ' , <nl> + source = [ <nl> + ' replica_set_monitor_server_parameters . cpp ' , <nl> + env . Idlc ( ' replica_set_monitor_server_parameters . idl ' ) [ 0 ] , <nl> + ] , <nl> + LIBDEPS_PRIVATE = [ <nl> + ' $ BUILD_DIR / mongo / idl / server_parameter ' , <nl> + ] <nl> + ) <nl> + <nl> env . Library ( <nl> target = ' async_client ' , <nl> source = [ <nl> env . CppUnitTest ( <nl> ' authenticate_test . cpp ' , <nl> ' connection_string_test . cpp ' , <nl> ' dbclient_cursor_test . cpp ' , <nl> - ' disable_streamable_rsm_flag_test . cpp ' , <nl> ' fetcher_test . cpp ' , <nl> ' index_spec_test . cpp ' , <nl> ' mongo_uri_test . cpp ' , <nl> ' read_preference_test . cpp ' , <nl> ' remote_command_retry_scheduler_test . cpp ' , <nl> + ' replica_set_monitor_server_parameters_test . cpp ' , <nl> ' scanning_replica_set_monitor_internal_test . cpp ' , <nl> ' scanning_replica_set_monitor_read_preference_test . cpp ' , <nl> ' scanning_replica_set_monitor_scan_test . cpp ' , <nl> env . CppUnitTest ( <nl> ' fetcher ' , <nl> ' read_preference ' , <nl> ' remote_command_retry_scheduler ' , <nl> + ' replica_set_monitor_protocol_test_fixture ' , <nl> + ] , <nl> + ) <nl> + <nl> + env . Library ( <nl> + target = ' replica_set_monitor_protocol_test_fixture ' , <nl> + source = [ <nl> + ' replica_set_monitor_protocol_test_fixture . cpp ' , <nl> + ] , <nl> + LIBDEPS = [ <nl> + ' $ BUILD_DIR / mongo / dbtests / mocklib ' , <nl> + ' clientdriver_network ' , <nl> ] , <nl> ) <nl> <nl> env . CppUnitTest ( <nl> env . CppUnitTest ( <nl> target = ' client_rs_test ' , <nl> source = [ <nl> - ' dbclient_rs_test . cpp ' <nl> + ' dbclient_rs_test . cpp ' , <nl> ] , <nl> LIBDEPS = [ <nl> ' $ BUILD_DIR / mongo / dbtests / mocklib ' , <nl> ' clientdriver_network ' , <nl> + ' replica_set_monitor_protocol_test_fixture ' , <nl> ] , <nl> ) <nl> <nl> mmm a / src / mongo / client / dbclient_rs_test . cpp <nl> ppp b / src / mongo / client / dbclient_rs_test . cpp <nl> <nl> # include " mongo / client / connpool . h " <nl> # include " mongo / client / dbclient_rs . h " <nl> # include " mongo / client / replica_set_monitor . h " <nl> - # include " mongo / client / replica_set_monitor_params_gen . h " <nl> + # include " mongo / client / replica_set_monitor_protocol_test_fixture . h " <nl> # include " mongo / db / jsobj . h " <nl> # include " mongo / dbtests / mock / mock_conn_registry . h " <nl> # include " mongo / dbtests / mock / mock_replica_set . h " <nl> BSONObj makeMetadata ( ReadPreference rp , TagSet tagSet ) { <nl> / * * <nl> * Ensures a global ServiceContext exists and the ScanningReplicaSetMonitor is used for each test . <nl> * / <nl> - class DBClientRSTest : public unittest : : Test { <nl> + class DBClientRSTest : public ReplicaSetMonitorProtocolTestFixture { <nl> protected : <nl> void setUp ( ) { <nl> auto serviceContext = ServiceContext : : make ( ) ; <nl> setGlobalServiceContext ( std : : move ( serviceContext ) ) ; <nl> <nl> - setDisableStreamableTrue ( ) ; <nl> + setRSMProtocol ( ReplicaSetMonitorProtocol : : kScanning ) ; <nl> } <nl> <nl> void tearDown ( ) { <nl> - resetDisableStreamable ( ) ; <nl> + unsetRSMProtocol ( ) ; <nl> } <nl> - <nl> - / * * <nl> - * Ensures the ScanningReplicaSetMonitor is used for the tests . <nl> - * / <nl> - void setDisableStreamableTrue ( ) { <nl> - const BSONObj newFlagParameter = BSON ( kDisableStreamableFlagName < < true ) ; <nl> - BSONObjIterator parameterIterator ( newFlagParameter ) ; <nl> - BSONElement newParameter = parameterIterator . next ( ) ; <nl> - const auto foundParameter = findDisableStreamableServerParameter ( ) ; <nl> - <nl> - uassertStatusOK ( foundParameter - > second - > set ( newParameter ) ) ; <nl> - ASSERT_TRUE ( disableStreamableReplicaSetMonitor . load ( ) ) ; <nl> - } <nl> - <nl> - / * * <nl> - * Restores the disableStreamableReplicaSetMonitor parameter to its default value . <nl> - * / <nl> - void resetDisableStreamable ( ) { <nl> - const auto defaultParameter = kDefaultParameter [ kDisableStreamableFlagName ] ; <nl> - const auto foundParameter = findDisableStreamableServerParameter ( ) ; <nl> - <nl> - uassertStatusOK ( foundParameter - > second - > set ( defaultParameter ) ) ; <nl> - } <nl> - <nl> - / * * <nl> - * Finds the disableStreamableReplicaSetMonitor ServerParameter . <nl> - * / <nl> - ServerParameter : : Map : : const_iterator findDisableStreamableServerParameter ( ) { <nl> - const ServerParameter : : Map & parameterMap = ServerParameterSet : : getGlobal ( ) - > getMap ( ) ; <nl> - return parameterMap . find ( kDisableStreamableFlagName ) ; <nl> - } <nl> - <nl> - static inline const std : : string kDisableStreamableFlagName = <nl> - " disableStreamableReplicaSetMonitor " ; <nl> - <nl> - / * * <nl> - * A BSONObj containing the default for the disableStreamableReplicaSetMonitor flag . <nl> - * / <nl> - static inline const BSONObj kDefaultParameter = <nl> - BSON ( kDisableStreamableFlagName < < disableStreamableReplicaSetMonitor . load ( ) ) ; <nl> } ; <nl> <nl> / * * <nl> deleted file mode 100644 <nl> index 0cad97280d56 . . 000000000000 <nl> mmm a / src / mongo / client / disable_streamable_rsm_flag_test . cpp <nl> ppp / dev / null <nl> <nl> - / * * <nl> - * Copyright ( C ) 2020 - present MongoDB , Inc . <nl> - * <nl> - * This program is free software : you can redistribute it and / or modify <nl> - * it under the terms of the Server Side Public License , version 1 , <nl> - * as published by MongoDB , Inc . <nl> - * <nl> - * This program is distributed in the hope that it will be useful , <nl> - * but WITHOUT ANY WARRANTY ; without even the implied warranty of <nl> - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the <nl> - * Server Side Public License for more details . <nl> - * <nl> - * You should have received a copy of the Server Side Public License <nl> - * along with this program . If not , see <nl> - * < http : / / www . mongodb . com / licensing / server - side - public - license > . <nl> - * <nl> - * As a special exception , the copyright holders give permission to link the <nl> - * code of portions of this program with the OpenSSL library under certain <nl> - * conditions as described in each individual source file and distribute <nl> - * linked combinations including the program with the OpenSSL library . You <nl> - * must comply with the Server Side Public License in all respects for <nl> - * all of the code used other than as permitted herein . If you modify file ( s ) <nl> - * with this exception , you may extend this exception to your version of the <nl> - * file ( s ) , but you are not obligated to do so . If you do not wish to do so , <nl> - * delete this exception statement from your version . If you delete this <nl> - * exception statement from all source files in the program , then also delete <nl> - * it in the license file . <nl> - * / <nl> - <nl> - # include " mongo / platform / basic . h " <nl> - <nl> - # include " mongo / client / replica_set_monitor . h " <nl> - # include " mongo / client / replica_set_monitor_params_gen . h " <nl> - # include " mongo / client / scanning_replica_set_monitor . h " <nl> - # include " mongo / client / streamable_replica_set_monitor . h " <nl> - # include " mongo / unittest / unittest . h " <nl> - # include " mongo / util / assert_util . h " <nl> - <nl> - namespace mongo { <nl> - namespace { <nl> - <nl> - class RSMDisableStreamableFlagTestFixture : public unittest : : Test { <nl> - protected : <nl> - void setUp ( ) { <nl> - setGlobalServiceContext ( ServiceContext : : make ( ) ) ; <nl> - ReplicaSetMonitor : : cleanup ( ) ; <nl> - } <nl> - <nl> - void tearDown ( ) { <nl> - unsetParameter ( ) ; <nl> - } <nl> - <nl> - / * * <nl> - * Sets the data of the disableStreamableReplicaSetMonitor parameter to flagValue . <nl> - * / <nl> - void setParameter ( bool flagValue ) { <nl> - const BSONObj newFlagParameter = BSON ( kDisableStreamableFlagName < < flagValue ) ; <nl> - BSONObjIterator parameterIterator ( newFlagParameter ) ; <nl> - BSONElement newParameter = parameterIterator . next ( ) ; <nl> - const auto foundParameter = findDisableStreamableServerParameter ( ) ; <nl> - <nl> - uassertStatusOK ( foundParameter - > second - > set ( newParameter ) ) ; <nl> - ASSERT_EQ ( flagValue , disableStreamableReplicaSetMonitor . load ( ) ) ; <nl> - } <nl> - <nl> - / * * <nl> - * Restores the disableStreamableReplicaSetMonitor parameter to its default value . <nl> - * / <nl> - void unsetParameter ( ) { <nl> - const auto defaultParameter = kDefaultParameter [ kDisableStreamableFlagName ] ; <nl> - const auto foundParameter = findDisableStreamableServerParameter ( ) ; <nl> - <nl> - uassertStatusOK ( foundParameter - > second - > set ( defaultParameter ) ) ; <nl> - } <nl> - <nl> - / * * <nl> - * Finds the disableStreamableReplicaSetMonitor ServerParameter . <nl> - * / <nl> - ServerParameter : : Map : : const_iterator findDisableStreamableServerParameter ( ) { <nl> - const ServerParameter : : Map & parameterMap = ServerParameterSet : : getGlobal ( ) - > getMap ( ) ; <nl> - return parameterMap . find ( kDisableStreamableFlagName ) ; <nl> - } <nl> - <nl> - static inline const std : : string kDisableStreamableFlagName = <nl> - " disableStreamableReplicaSetMonitor " ; <nl> - <nl> - / * * <nl> - * A BSONObj containing the default for the disableStreamableReplicaSetMonitor flag . <nl> - * / <nl> - static inline const BSONObj kDefaultParameter = <nl> - BSON ( kDisableStreamableFlagName < < disableStreamableReplicaSetMonitor . load ( ) ) ; <nl> - } ; <nl> - <nl> - / * * <nl> - * Checks that a ScanningReplicaSetMonitor is created when the disableStreamableReplicaSetMonitor <nl> - * flag is set to true . <nl> - * / <nl> - TEST_F ( RSMDisableStreamableFlagTestFixture , checkIsScanningIfDisableStreamableIsTrue ) { <nl> - setParameter ( true ) ; <nl> - auto uri = MongoURI : : parse ( " mongodb : / / a , b , c / ? replicaSet = name " ) ; <nl> - ASSERT_OK ( uri . getStatus ( ) ) ; <nl> - auto createdMonitor = ReplicaSetMonitor : : createIfNeeded ( uri . getValue ( ) ) ; <nl> - <nl> - / / If the created monitor does not point to a ScanningReplicaSetMonitor , the cast returns a <nl> - / / nullptr . <nl> - auto scanningMonitorCast = dynamic_cast < ScanningReplicaSetMonitor * > ( createdMonitor . get ( ) ) ; <nl> - ASSERT ( scanningMonitorCast ) ; <nl> - <nl> - auto streamableMonitorCast = dynamic_cast < StreamableReplicaSetMonitor * > ( createdMonitor . get ( ) ) ; <nl> - ASSERT_FALSE ( streamableMonitorCast ) ; <nl> - } <nl> - <nl> - / * * <nl> - * Checks that a StreamableReplicaSetMonitor is created when the the <nl> - * disableStreamableReplicaSetMonitor flag is set to false . <nl> - * <nl> - * TODO SERVER - 43332 : Once the StreamableReplicaSetMonitor is integrated into the codebase , this <nl> - * test should mirror the logic in checkIsScanningIfDisableStreamableIsTrue accordingly . <nl> - * / <nl> - TEST_F ( RSMDisableStreamableFlagTestFixture , checkIsStreamableIfDisableStreamableIsFalse ) { <nl> - setParameter ( false ) ; <nl> - auto uri = MongoURI : : parse ( " mongodb : / / a , b , c / ? replicaSet = name " ) ; <nl> - ASSERT_OK ( uri . getStatus ( ) ) ; <nl> - ASSERT_THROWS_CODE ( ReplicaSetMonitor : : createIfNeeded ( uri . getValue ( ) ) , DBException , 31451 ) ; <nl> - } <nl> - <nl> - } / / namespace <nl> - } / / namespace mongo <nl> mmm a / src / mongo / client / replica_set_monitor_manager . cpp <nl> ppp b / src / mongo / client / replica_set_monitor_manager . cpp <nl> <nl> # include " mongo / bson / bsonobjbuilder . h " <nl> # include " mongo / client / connection_string . h " <nl> # include " mongo / client / mongo_uri . h " <nl> - # include " mongo / client / replica_set_monitor_params_gen . h " <nl> + # include " mongo / client / replica_set_monitor_server_parameters . h " <nl> # include " mongo / client / scanning_replica_set_monitor . h " <nl> # include " mongo / client / streamable_replica_set_monitor . h " <nl> # include " mongo / executor / network_connection_hook . h " <nl> shared_ptr < ReplicaSetMonitor > ReplicaSetMonitorManager : : getOrCreateMonitor ( const <nl> <nl> LOGV2 ( 20186 , " Starting new replica set monitor for { uri } " , " uri " _attr = uri . toString ( ) ) ; <nl> <nl> - if ( disableStreamableReplicaSetMonitor . load ( ) ) { <nl> - auto newMonitor = std : : make_shared < ScanningReplicaSetMonitor > ( uri ) ; <nl> - _monitors [ setName ] = newMonitor ; <nl> + std : : shared_ptr < ReplicaSetMonitor > newMonitor ; <nl> + if ( gReplicaSetMonitorProtocol = = ReplicaSetMonitorProtocol : : kScanning ) { <nl> + newMonitor = std : : make_shared < ScanningReplicaSetMonitor > ( uri ) ; <nl> newMonitor - > init ( ) ; <nl> - return newMonitor ; <nl> } else { <nl> - uasserted ( 31451 , " StreamableReplicaSetMonitor is not yet implemented " ) ; <nl> + newMonitor = std : : make_shared < StreamableReplicaSetMonitor > ( uri ) ; <nl> } <nl> + _monitors [ setName ] = newMonitor ; <nl> + return newMonitor ; <nl> } <nl> <nl> vector < string > ReplicaSetMonitorManager : : getAllSetNames ( ) { <nl> new file mode 100644 <nl> index 000000000000 . . 1fa996ab6e16 <nl> mmm / dev / null <nl> ppp b / src / mongo / client / replica_set_monitor_protocol_test_fixture . cpp <nl> <nl> + / * * <nl> + * Copyright ( C ) 2020 - present MongoDB , Inc . <nl> + * <nl> + * This program is free software : you can redistribute it and / or modify <nl> + * it under the terms of the Server Side Public License , version 1 , <nl> + * as published by MongoDB , Inc . <nl> + * <nl> + * This program is distributed in the hope that it will be useful , <nl> + * but WITHOUT ANY WARRANTY ; without even the implied warranty of <nl> + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the <nl> + * Server Side Public License for more details . <nl> + * <nl> + * You should have received a copy of the Server Side Public License <nl> + * along with this program . If not , see <nl> + * < http : / / www . mongodb . com / licensing / server - side - public - license > . <nl> + * <nl> + * As a special exception , the copyright holders give permission to link the <nl> + * code of portions of this program with the OpenSSL library under certain <nl> + * conditions as described in each individual source file and distribute <nl> + * linked combinations including the program with the OpenSSL library . You <nl> + * must comply with the Server Side Public License in all respects for <nl> + * all of the code used other than as permitted herein . If you modify file ( s ) <nl> + * with this exception , you may extend this exception to your version of the <nl> + * file ( s ) , but you are not obligated to do so . If you do not wish to do so , <nl> + * delete this exception statement from your version . If you delete this <nl> + * exception statement from all source files in the program , then also delete <nl> + * it in the license file . <nl> + * / <nl> + # include " mongo / platform / basic . h " <nl> + <nl> + # include " mongo / client / replica_set_monitor_protocol_test_fixture . h " <nl> + # include " mongo / unittest / unittest . h " <nl> + # include " mongo / util / assert_util . h " <nl> + <nl> + namespace mongo { <nl> + void ReplicaSetMonitorProtocolTestFixture : : setRSMProtocol ( ReplicaSetMonitorProtocol protocol ) { <nl> + const BSONObj newParameterObj = BSON ( kRSMProtocolFieldName < < toString ( protocol ) ) ; <nl> + BSONObjIterator parameterIterator ( newParameterObj ) ; <nl> + BSONElement newParameter = parameterIterator . next ( ) ; <nl> + const auto foundParameter = findRSMProtocolServerParameter ( ) ; <nl> + <nl> + uassertStatusOK ( foundParameter - > second - > set ( newParameter ) ) ; <nl> + } <nl> + <nl> + void ReplicaSetMonitorProtocolTestFixture : : unsetRSMProtocol ( ) { <nl> + const auto defaultParameter = kDefaultParameter [ kRSMProtocolFieldName ] ; <nl> + const auto foundParameter = findRSMProtocolServerParameter ( ) ; <nl> + <nl> + uassertStatusOK ( foundParameter - > second - > set ( defaultParameter ) ) ; <nl> + } <nl> + <nl> + ServerParameter : : Map : : const_iterator <nl> + ReplicaSetMonitorProtocolTestFixture : : findRSMProtocolServerParameter ( ) { <nl> + const ServerParameter : : Map & parameterMap = ServerParameterSet : : getGlobal ( ) - > getMap ( ) ; <nl> + invariant ( parameterMap . size ( ) ) ; <nl> + return parameterMap . find ( kRSMProtocolFieldName ) ; <nl> + } <nl> + <nl> + } / / namespace mongo <nl> new file mode 100644 <nl> index 000000000000 . . 7c2c7d0ee9db <nl> mmm / dev / null <nl> ppp b / src / mongo / client / replica_set_monitor_protocol_test_fixture . h <nl> <nl> + / * * <nl> + * Copyright ( C ) 2020 - present MongoDB , Inc . <nl> + * <nl> + * This program is free software : you can redistribute it and / or modify <nl> + * it under the terms of the Server Side Public License , version 1 , <nl> + * as published by MongoDB , Inc . <nl> + * <nl> + * This program is distributed in the hope that it will be useful , <nl> + * but WITHOUT ANY WARRANTY ; without even the implied warranty of <nl> + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the <nl> + * Server Side Public License for more details . <nl> + * <nl> + * You should have received a copy of the Server Side Public License <nl> + * along with this program . If not , see <nl> + * < http : / / www . mongodb . com / licensing / server - side - public - license > . <nl> + * <nl> + * As a special exception , the copyright holders give permission to link the <nl> + * code of portions of this program with the OpenSSL library under certain <nl> + * conditions as described in each individual source file and distribute <nl> + * linked combinations including the program with the OpenSSL library . You <nl> + * must comply with the Server Side Public License in all respects for <nl> + * all of the code used other than as permitted herein . If you modify file ( s ) <nl> + * with this exception , you may extend this exception to your version of the <nl> + * file ( s ) , but you are not obligated to do so . If you do not wish to do so , <nl> + * delete this exception statement from your version . If you delete this <nl> + * exception statement from all source files in the program , then also delete <nl> + * it in the license file . <nl> + * / <nl> + <nl> + # pragma once <nl> + <nl> + # include " mongo / platform / basic . h " <nl> + <nl> + # include " mongo / client / replica_set_monitor_server_parameters . h " <nl> + # include " mongo / client / replica_set_monitor_server_parameters_gen . h " <nl> + # include " mongo / unittest / unittest . h " <nl> + # include " mongo / util / assert_util . h " <nl> + <nl> + namespace mongo { <nl> + <nl> + / * * <nl> + * Test wrapper for tests that need to set and unset the replicaSetMonitorProtocol server parameter . <nl> + * / <nl> + class ReplicaSetMonitorProtocolTestFixture : public unittest : : Test { <nl> + protected : <nl> + / * * <nl> + * Sets the replicaSetMonitorProtocol to ' protocol ' . <nl> + * / <nl> + void setRSMProtocol ( ReplicaSetMonitorProtocol protocol ) ; <nl> + <nl> + / * * <nl> + * Restores the replicaSetMonitorProtocol parameter to its default value . <nl> + * / <nl> + void unsetRSMProtocol ( ) ; <nl> + <nl> + / * * <nl> + * Finds the replicaSetMonitorProtocol ServerParameter . <nl> + * / <nl> + ServerParameter : : Map : : const_iterator findRSMProtocolServerParameter ( ) ; <nl> + <nl> + static inline const std : : string kRSMProtocolFieldName = " replicaSetMonitorProtocol " ; <nl> + <nl> + / * * <nl> + * A BSONObj containing the default for the replicaSetMonitorProtocol server parameter . <nl> + * / <nl> + const BSONObj kDefaultParameter = <nl> + BSON ( kRSMProtocolFieldName < < toString ( gReplicaSetMonitorProtocol ) ) ; <nl> + } ; <nl> + <nl> + } / / namespace mongo <nl> new file mode 100644 <nl> index 000000000000 . . 8fe3df2b8073 <nl> mmm / dev / null <nl> ppp b / src / mongo / client / replica_set_monitor_server_parameters . cpp <nl> <nl> + / * * <nl> + * Copyright ( C ) 2020 - present MongoDB , Inc . <nl> + * <nl> + * This program is free software : you can redistribute it and / or modify <nl> + * it under the terms of the Server Side Public License , version 1 , <nl> + * as published by MongoDB , Inc . <nl> + * <nl> + * This program is distributed in the hope that it will be useful , <nl> + * but WITHOUT ANY WARRANTY ; without even the implied warranty of <nl> + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the <nl> + * Server Side Public License for more details . <nl> + * <nl> + * You should have received a copy of the Server Side Public License <nl> + * along with this program . If not , see <nl> + * < http : / / www . mongodb . com / licensing / server - side - public - license > . <nl> + * <nl> + * As a special exception , the copyright holders give permission to link the <nl> + * code of portions of this program with the OpenSSL library under certain <nl> + * conditions as described in each individual source file and distribute <nl> + * linked combinations including the program with the OpenSSL library . You <nl> + * must comply with the Server Side Public License in all respects for <nl> + * all of the code used other than as permitted herein . If you modify file ( s ) <nl> + * with this exception , you may extend this exception to your version of the <nl> + * file ( s ) , but you are not obligated to do so . If you do not wish to do so , <nl> + * delete this exception statement from your version . If you delete this <nl> + * exception statement from all source files in the program , then also delete <nl> + * it in the license file . <nl> + * / <nl> + <nl> + # include " mongo / client / replica_set_monitor_server_parameters . h " <nl> + <nl> + # include " mongo / base / status . h " <nl> + # include " mongo / client / replica_set_monitor_server_parameters_gen . h " <nl> + # include " mongo / util / str . h " <nl> + <nl> + namespace mongo { <nl> + <nl> + ReplicaSetMonitorProtocol gReplicaSetMonitorProtocol { ReplicaSetMonitorProtocol : : kScanning } ; <nl> + <nl> + std : : string toString ( ReplicaSetMonitorProtocol protocol ) { <nl> + if ( protocol = = ReplicaSetMonitorProtocol : : kScanning ) { <nl> + return " scanning " ; <nl> + } else { <nl> + return " sdam " ; <nl> + } <nl> + } <nl> + <nl> + void RSMProtocolServerParameter : : append ( OperationContext * , <nl> + BSONObjBuilder & builder , <nl> + const std : : string & name ) { <nl> + builder . append ( name , toString ( gReplicaSetMonitorProtocol ) ) ; <nl> + } <nl> + <nl> + Status RSMProtocolServerParameter : : setFromString ( const std : : string & protocolStr ) { <nl> + if ( protocolStr = = toString ( ReplicaSetMonitorProtocol : : kScanning ) ) { <nl> + gReplicaSetMonitorProtocol = ReplicaSetMonitorProtocol : : kScanning ; <nl> + } else if ( protocolStr = = toString ( ReplicaSetMonitorProtocol : : kSdam ) ) { <nl> + gReplicaSetMonitorProtocol = ReplicaSetMonitorProtocol : : kSdam ; <nl> + } else { <nl> + return Status { ErrorCodes : : BadValue , <nl> + str : : stream ( ) <nl> + < < " Unrecognized replicaSetMonitorProtocol ' " < < protocolStr < < " ' " } ; <nl> + } <nl> + return Status : : OK ( ) ; <nl> + } <nl> + <nl> + } / / namespace mongo <nl> new file mode 100644 <nl> index 000000000000 . . 14d0ca7f6124 <nl> mmm / dev / null <nl> ppp b / src / mongo / client / replica_set_monitor_server_parameters . h <nl> <nl> + / * * <nl> + * Copyright ( C ) 2020 - present MongoDB , Inc . <nl> + * <nl> + * This program is free software : you can redistribute it and / or modify <nl> + * it under the terms of the Server Side Public License , version 1 , <nl> + * as published by MongoDB , Inc . <nl> + * <nl> + * This program is distributed in the hope that it will be useful , <nl> + * but WITHOUT ANY WARRANTY ; without even the implied warranty of <nl> + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the <nl> + * Server Side Public License for more details . <nl> + * <nl> + * You should have received a copy of the Server Side Public License <nl> + * along with this program . If not , see <nl> + * < http : / / www . mongodb . com / licensing / server - side - public - license > . <nl> + * <nl> + * As a special exception , the copyright holders give permission to link the <nl> + * code of portions of this program with the OpenSSL library under certain <nl> + * conditions as described in each individual source file and distribute <nl> + * linked combinations including the program with the OpenSSL library . You <nl> + * must comply with the Server Side Public License in all respects for <nl> + * all of the code used other than as permitted herein . If you modify file ( s ) <nl> + * with this exception , you may extend this exception to your version of the <nl> + * file ( s ) , but you are not obligated to do so . If you do not wish to do so , <nl> + * delete this exception statement from your version . If you delete this <nl> + * exception statement from all source files in the program , then also delete <nl> + * it in the license file . <nl> + * / <nl> + <nl> + # pragma once <nl> + <nl> + # include " mongo / platform / atomic_word . h " <nl> + # include " mongo / platform / basic . h " <nl> + # include " mongo / util / str . h " <nl> + <nl> + namespace mongo { <nl> + <nl> + enum class ReplicaSetMonitorProtocol { kScanning , kSdam } ; <nl> + extern ReplicaSetMonitorProtocol gReplicaSetMonitorProtocol ; <nl> + std : : string toString ( ReplicaSetMonitorProtocol protocol ) ; <nl> + <nl> + } / / namespace mongo <nl> similarity index 83 % <nl> rename from src / mongo / client / replica_set_monitor_params . idl <nl> rename to src / mongo / client / replica_set_monitor_server_parameters . idl <nl> mmm a / src / mongo / client / replica_set_monitor_params . idl <nl> ppp b / src / mongo / client / replica_set_monitor_server_parameters . idl <nl> global : <nl> cpp_namespace : mongo <nl> <nl> server_parameters : <nl> - disableStreamableReplicaSetMonitor : <nl> + replicaSetMonitorProtocol : <nl> description : > - <nl> - Disable the StreamableReplicaSetMonitor and revert to the prior behavior with the <nl> - ScanningReplicaSetMonitor <nl> + Select which replica set monitor protocol to use - the new ' sdam ' compliant protocol or <nl> + the old ' scanning ' protocol . <nl> set_at : startup <nl> - cpp_vartype : AtomicWord < bool > <nl> - cpp_varname : disableStreamableReplicaSetMonitor <nl> - default : true <nl> + cpp_class : <nl> + name : RSMProtocolServerParameter <nl> new file mode 100644 <nl> index 000000000000 . . f9644bada884 <nl> mmm / dev / null <nl> ppp b / src / mongo / client / replica_set_monitor_server_parameters_test . cpp <nl> <nl> + / * * <nl> + * Copyright ( C ) 2020 - present MongoDB , Inc . <nl> + * <nl> + * This program is free software : you can redistribute it and / or modify <nl> + * it under the terms of the Server Side Public License , version 1 , <nl> + * as published by MongoDB , Inc . <nl> + * <nl> + * This program is distributed in the hope that it will be useful , <nl> + * but WITHOUT ANY WARRANTY ; without even the implied warranty of <nl> + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the <nl> + * Server Side Public License for more details . <nl> + * <nl> + * You should have received a copy of the Server Side Public License <nl> + * along with this program . If not , see <nl> + * < http : / / www . mongodb . com / licensing / server - side - public - license > . <nl> + * <nl> + * As a special exception , the copyright holders give permission to link the <nl> + * code of portions of this program with the OpenSSL library under certain <nl> + * conditions as described in each individual source file and distribute <nl> + * linked combinations including the program with the OpenSSL library . You <nl> + * must comply with the Server Side Public License in all respects for <nl> + * all of the code used other than as permitted herein . If you modify file ( s ) <nl> + * with this exception , you may extend this exception to your version of the <nl> + * file ( s ) , but you are not obligated to do so . If you do not wish to do so , <nl> + * delete this exception statement from your version . If you delete this <nl> + * exception statement from all source files in the program , then also delete <nl> + * it in the license file . <nl> + * / <nl> + <nl> + # include " mongo / platform / basic . h " <nl> + <nl> + # include " mongo / client / replica_set_monitor . h " <nl> + # include " mongo / client / replica_set_monitor_protocol_test_fixture . h " <nl> + # include " mongo / client / replica_set_monitor_server_parameters . h " <nl> + # include " mongo / client / scanning_replica_set_monitor . h " <nl> + # include " mongo / client / streamable_replica_set_monitor . h " <nl> + # include " mongo / unittest / unittest . h " <nl> + # include " mongo / util / assert_util . h " <nl> + <nl> + namespace mongo { <nl> + namespace { <nl> + <nl> + / * * <nl> + * Tests the replicaSetMonitorProtocol server parameter . <nl> + * / <nl> + class ReplicaSetMonitorProtocolTest : public ReplicaSetMonitorProtocolTestFixture { <nl> + protected : <nl> + void setUp ( ) { <nl> + setGlobalServiceContext ( ServiceContext : : make ( ) ) ; <nl> + ReplicaSetMonitor : : cleanup ( ) ; <nl> + } <nl> + <nl> + void tearDown ( ) { <nl> + ReplicaSetMonitor : : cleanup ( ) ; <nl> + unsetRSMProtocol ( ) ; <nl> + } <nl> + } ; <nl> + <nl> + / * * <nl> + * Checks that a StreamableReplicaSetMonitor is created when the replicaSetMonitorProtocol server <nl> + * parameter is set to ' sdam ' . <nl> + * / <nl> + TEST_F ( ReplicaSetMonitorProtocolTest , checkRSMProtocolParamSdam ) { <nl> + setRSMProtocol ( ReplicaSetMonitorProtocol : : kSdam ) ; <nl> + auto uri = MongoURI : : parse ( " mongodb : a , b , c / ? replicaSet = name " ) ; <nl> + ASSERT_OK ( uri . getStatus ( ) ) ; <nl> + auto createdMonitor = ReplicaSetMonitor : : createIfNeeded ( uri . getValue ( ) ) ; <nl> + <nl> + / / If the created monitor does not point to a ScanningReplicaSetMonitor , the cast returns a <nl> + / / nullptr . <nl> + auto streamableMonitorCast = dynamic_cast < StreamableReplicaSetMonitor * > ( createdMonitor . get ( ) ) ; <nl> + ASSERT ( streamableMonitorCast ) ; <nl> + <nl> + auto scanningMonitorCast = dynamic_cast < ScanningReplicaSetMonitor * > ( createdMonitor . get ( ) ) ; <nl> + ASSERT_FALSE ( scanningMonitorCast ) ; <nl> + } <nl> + <nl> + / * * <nl> + * Checks that a ScanningReplicaSetMonitor is created when the replicaSetMonitorProtocol server <nl> + * parameter is set to ' scanning ' . <nl> + * / <nl> + TEST_F ( ReplicaSetMonitorProtocolTest , checkRSMProtocolParamScanning ) { <nl> + setRSMProtocol ( ReplicaSetMonitorProtocol : : kScanning ) ; <nl> + auto uri = MongoURI : : parse ( " mongodb : a , b , c / ? replicaSet = name " ) ; <nl> + ASSERT_OK ( uri . getStatus ( ) ) ; <nl> + auto createdMonitor = ReplicaSetMonitor : : createIfNeeded ( uri . getValue ( ) ) ; <nl> + <nl> + / / If the created monitor does not point to a ScanningReplicaSetMonitor , the cast returns a <nl> + / / nullptr . <nl> + auto scanningMonitorCast = dynamic_cast < ScanningReplicaSetMonitor * > ( createdMonitor . get ( ) ) ; <nl> + ASSERT ( scanningMonitorCast ) ; <nl> + <nl> + auto streamableMonitorCast = dynamic_cast < StreamableReplicaSetMonitor * > ( createdMonitor . get ( ) ) ; <nl> + ASSERT_FALSE ( streamableMonitorCast ) ; <nl> + } <nl> + } / / namespace <nl> + } / / namespace mongo <nl> mmm a / src / mongo / client / scanning_replica_set_monitor_test_fixture . cpp <nl> ppp b / src / mongo / client / scanning_replica_set_monitor_test_fixture . cpp <nl> <nl> / * * <nl> - * Copyright ( C ) 2018 - present MongoDB , Inc . <nl> + * Copyright ( C ) 2020 - present MongoDB , Inc . <nl> * <nl> * This program is free software : you can redistribute it and / or modify <nl> * it under the terms of the Server Side Public License , version 1 , <nl> <nl> # include " mongo / client / scanning_replica_set_monitor_test_fixture . h " <nl> <nl> namespace mongo { <nl> + <nl> + / * * <nl> + * Setup every test to use replicaSetMonitorProtocol : : kScanning . <nl> + * / <nl> + void ScanningReplicaSetMonitorTest : : setUp ( ) { <nl> + setGlobalServiceContext ( ServiceContext : : make ( ) ) ; <nl> + setRSMProtocol ( ReplicaSetMonitorProtocol : : kScanning ) ; <nl> + ReplicaSetMonitor : : cleanup ( ) ; <nl> + } <nl> + <nl> + void ScanningReplicaSetMonitorTest : : tearDown ( ) { <nl> + ReplicaSetMonitor : : cleanup ( ) ; <nl> + unsetRSMProtocol ( ) ; <nl> + } <nl> + <nl> const std : : vector < HostAndPort > ScanningReplicaSetMonitorTest : : basicSeeds = { <nl> HostAndPort ( " a " ) , HostAndPort ( " b " ) , HostAndPort ( " c " ) } ; <nl> const std : : set < HostAndPort > ScanningReplicaSetMonitorTest : : basicSeedsSet = { std : : begin ( basicSeeds ) , <nl> mmm a / src / mongo / client / scanning_replica_set_monitor_test_fixture . h <nl> ppp b / src / mongo / client / scanning_replica_set_monitor_test_fixture . h <nl> <nl> / * * <nl> - * Copyright ( C ) 2018 - present MongoDB , Inc . <nl> + * Copyright ( C ) 2020 - present MongoDB , Inc . <nl> * <nl> * This program is free software : you can redistribute it and / or modify <nl> * it under the terms of the Server Side Public License , version 1 , <nl> <nl> # include < vector > <nl> <nl> # include " mongo / client / replica_set_change_notifier . h " <nl> + # include " mongo / client / replica_set_monitor_protocol_test_fixture . h " <nl> # include " mongo / client / scanning_replica_set_monitor . h " <nl> # include " mongo / client / scanning_replica_set_monitor_internal . h " <nl> # include " mongo / unittest / unittest . h " <nl> namespace mongo { <nl> / / current ( only ) thread , so they do not lock SetState : : mutex before examining state . This is <nl> / / NOT something that non - test code should do . <nl> <nl> - class ScanningReplicaSetMonitorTest : public unittest : : Test { <nl> + class ScanningReplicaSetMonitorTest : public ReplicaSetMonitorProtocolTestFixture { <nl> public : <nl> / / Pull in nested types <nl> using SetState = ScanningReplicaSetMonitor : : SetState ; <nl> class ScanningReplicaSetMonitorTest : public unittest : : Test { <nl> std : : forward < Args > ( args ) . . . , & _notifier , nullptr ) ; <nl> } <nl> <nl> - void setUp ( ) override { } <nl> - void tearDown ( ) override { } <nl> + void setUp ( ) override ; <nl> + void tearDown ( ) override ; <nl> <nl> static const std : : vector < HostAndPort > basicSeeds ; <nl> static const std : : set < HostAndPort > basicSeedsSet ; <nl> mmm a / src / mongo / client / streamable_replica_set_monitor . cpp <nl> ppp b / src / mongo / client / streamable_replica_set_monitor . cpp <nl> bool StreamableReplicaSetMonitor : : isKnownToHaveGoodPrimary ( ) const { <nl> MONGO_UNREACHABLE ; <nl> } <nl> <nl> + void StreamableReplicaSetMonitor : : runScanForMockReplicaSet ( ) { <nl> + MONGO_UNREACHABLE ; <nl> + } <nl> + <nl> } / / namespace mongo <nl> mmm a / src / mongo / client / streamable_replica_set_monitor . h <nl> ppp b / src / mongo / client / streamable_replica_set_monitor . h <nl> class StreamableReplicaSetMonitor : public ReplicaSetMonitor { <nl> void appendInfo ( BSONObjBuilder & b , bool forFTDC = false ) const override ; <nl> <nl> bool isKnownToHaveGoodPrimary ( ) const override ; <nl> + <nl> + void runScanForMockReplicaSet ( ) override ; <nl> } ; <nl> <nl> } / / namespace mongo <nl> mmm a / src / mongo / dbtests / SConscript <nl> ppp b / src / mongo / dbtests / SConscript <nl> if not has_option ( ' noshell ' ) and usemozjs : <nl> ' query_stage_trial . cpp ' , <nl> ' query_stage_update . cpp ' , <nl> ' querytests . cpp ' , <nl> - ' replica_set_monitor_test . cpp ' , <nl> ' replica_set_tests . cpp ' , <nl> ' repltests . cpp ' , <nl> ' rollbacktests . cpp ' , <nl> + ' scanning_replica_set_monitor_test . cpp ' , <nl> ' socktests . cpp ' , <nl> ' storage_timestamp_tests . cpp ' , <nl> ' threadedtests . cpp ' , <nl> if not has_option ( ' noshell ' ) and usemozjs : <nl> ] , <nl> LIBDEPS = [ <nl> " $ BUILD_DIR / mongo / bson / mutable / mutable_bson_test_utils " , <nl> + " $ BUILD_DIR / mongo / client / replica_set_monitor_protocol_test_fixture " , <nl> " $ BUILD_DIR / mongo / db / auth / authmongod " , <nl> " $ BUILD_DIR / mongo / db / bson / dotted_path_support " , <nl> " $ BUILD_DIR / mongo / db / catalog / collection_validation " , <nl> similarity index 94 % <nl> rename from src / mongo / dbtests / replica_set_monitor_test . cpp <nl> rename to src / mongo / dbtests / scanning_replica_set_monitor_test . cpp <nl> mmm a / src / mongo / dbtests / replica_set_monitor_test . cpp <nl> ppp b / src / mongo / dbtests / scanning_replica_set_monitor_test . cpp <nl> <nl> # include " mongo / client / connpool . h " <nl> # include " mongo / client / dbclient_rs . h " <nl> # include " mongo / client / replica_set_monitor . h " <nl> + # include " mongo / client / replica_set_monitor_protocol_test_fixture . h " <nl> # include " mongo / client / scanning_replica_set_monitor_internal . h " <nl> # include " mongo / dbtests / mock / mock_conn_registry . h " <nl> # include " mongo / dbtests / mock / mock_replica_set . h " <nl> MONGO_INITIALIZER ( DisableReplicaSetMonitorRefreshRetries ) ( InitializerContext * ) { <nl> * Warning : Tests running this fixture cannot be run in parallel with other tests <nl> * that uses ConnectionString : : setConnectionHook <nl> * / <nl> - class ReplicaSetMonitorTest : public mongo : : unittest : : Test { <nl> + class ScanningReplicaSetMonitorDBTest : public ReplicaSetMonitorProtocolTestFixture { <nl> protected : <nl> void setUp ( ) { <nl> _replSet . reset ( new MockReplicaSet ( " test " , 3 ) ) ; <nl> _originalConnectionHook = ConnectionString : : getConnectionHook ( ) ; <nl> ConnectionString : : setConnectionHook ( mongo : : MockConnRegistry : : get ( ) - > getConnStrHook ( ) ) ; <nl> + <nl> + / / Restrict the test to use ReplicaSetMonitorProtocol : : kScanning only . <nl> + setRSMProtocol ( ReplicaSetMonitorProtocol : : kScanning ) ; <nl> } <nl> <nl> void tearDown ( ) { <nl> class ReplicaSetMonitorTest : public mongo : : unittest : : Test { <nl> ReplicaSetMonitor : : cleanup ( ) ; <nl> _replSet . reset ( ) ; <nl> mongo : : ScopedDbConnection : : clearPool ( ) ; <nl> + unsetRSMProtocol ( ) ; <nl> } <nl> <nl> MockReplicaSet * getReplSet ( ) { <nl> class ReplicaSetMonitorTest : public mongo : : unittest : : Test { <nl> std : : unique_ptr < MockReplicaSet > _replSet ; <nl> } ; <nl> <nl> - TEST_F ( ReplicaSetMonitorTest , SeedWithPriOnlySecDown ) { <nl> + TEST_F ( ScanningReplicaSetMonitorDBTest , SeedWithPriOnlySecDown ) { <nl> / / Test to make sure that the monitor doesn ' t crash when <nl> / / ConnectionString : : connect returns NULL <nl> MockReplicaSet * replSet = getReplSet ( ) ; <nl> repl : : ReplSetConfig _getConfigWithMemberRemoved ( const repl : : ReplSetConfig & oldCo <nl> / / This test goes through configurations with different positions for the primary node <nl> / / in the host list returned from the isMaster command . The test here is to make sure <nl> / / that the ReplicaSetMonitor will not crash under these situations . <nl> - TEST ( ReplicaSetMonitorTest , PrimaryRemovedFromSetStress ) { <nl> + TEST ( ScanningReplicaSetMonitorDBTest , PrimaryRemovedFromSetStress ) { <nl> const size_t NODE_COUNT = 5 ; <nl> MockReplicaSet replSet ( " test " , NODE_COUNT ) ; <nl> ConnectionString : : ConnectionHook * originalConnHook = ConnectionString : : getConnectionHook ( ) ; <nl>
SERVER - 46509 Change disableStreamableReplicaSetMonitor server parameter to an enum
mongodb/mongo
b7dfdde9d038433cf34e77618d4b1b114d3f4d91
2020-03-03T00:17:45Z
mmm a / Telegram / SourceFiles / mainwidget . cpp <nl> ppp b / Telegram / SourceFiles / mainwidget . cpp <nl> void MainWidget : : inviteCheckDone ( QString hash , const MTPChatInvite & invite ) { <nl> const MTPDchatInviteAlready & d ( invite . c_chatInviteAlready ( ) ) ; <nl> PeerData * chat = App : : feedChats ( MTP_vector < MTPChat > ( 1 , d . vchat ) ) ; <nl> if ( chat ) { <nl> - if ( chat - > isChat ( ) & & chat - > asChat ( ) - > haveLeft ) { <nl> - ConfirmBox * box = new ConfirmBox ( lng_group_invite_want_join ( lt_title , chat - > name ) , lang ( lng_group_invite_join ) ) ; <nl> - _inviteHash = ' / ' + QString : : number ( chat - > id ) ; <nl> - connect ( box , SIGNAL ( confirmed ( ) ) , this , SLOT ( onInviteImport ( ) ) ) ; <nl> - App : : wnd ( ) - > showLayer ( box ) ; <nl> - } else { <nl> - showPeerHistory ( chat - > id , ShowAtUnreadMsgId ) ; <nl> - } <nl> + showPeerHistory ( chat - > id , ShowAtUnreadMsgId ) ; <nl> } <nl> } break ; <nl> } <nl> bool MainWidget : : inviteCheckFail ( const RPCError & error ) { <nl> <nl> void MainWidget : : onInviteImport ( ) { <nl> if ( _inviteHash . isEmpty ( ) ) return ; <nl> - if ( _inviteHash . at ( 0 ) = = ' / ' ) { <nl> - PeerId id = _inviteHash . midRef ( 1 ) . toULongLong ( ) ; <nl> - MTP : : send ( MTPmessages_AddChatUser ( MTP_int ( peerToChat ( id ) ) , App : : self ( ) - > inputUser , MTP_int ( ForwardOnAdd ) ) , rpcDone ( & MainWidget : : inviteImportDone ) , rpcFail ( & MainWidget : : inviteImportFail ) , 0 , 5 ) ; <nl> - } else { <nl> - MTP : : send ( MTPmessages_ImportChatInvite ( MTP_string ( _inviteHash ) ) , rpcDone ( & MainWidget : : inviteImportDone ) , rpcFail ( & MainWidget : : inviteImportFail ) ) ; <nl> - } <nl> + MTP : : send ( MTPmessages_ImportChatInvite ( MTP_string ( _inviteHash ) ) , rpcDone ( & MainWidget : : inviteImportDone ) , rpcFail ( & MainWidget : : inviteImportFail ) ) ; <nl> } <nl> <nl> void MainWidget : : inviteImportDone ( const MTPUpdates & updates ) { <nl>
removed workaround in checkChatInvite when user has left the chat ( resolved on server side now )
telegramdesktop/tdesktop
89a3d6b5059cc49e5464d1d6729d633a365ffb73
2015-10-29T15:26:41Z
mmm a / src / csharp / Grpc . Tools / Grpc . Tools . csproj <nl> ppp b / src / csharp / Grpc . Tools / Grpc . Tools . csproj <nl> Linux and MacOS . Managed runtime is supplied separately in the Grpc . Core package <nl> < _Asset PackagePath = " build / native / include / google / protobuf / " Include = " @ ( _ProtoAssetName - > ' $ ( Assets_ProtoInclude ) % ( Identity ) . proto ' ) " / > <nl> <nl> < ! - - TODO ( kkm ) : GPB builds assets into " macosx " , GRPC into " macos " . - - > <nl> - < _Asset PackagePath = " build / native / bin / windows / " Include = " $ ( Assets_ProtoCompiler ) windows_x86 / protoc . exe " / > <nl> - < _Asset PackagePath = " build / native / bin / linux_x86 / " Include = " $ ( Assets_ProtoCompiler ) linux_x86 / protoc " / > <nl> - < _Asset PackagePath = " build / native / bin / linux_x64 / " Include = " $ ( Assets_ProtoCompiler ) linux_x64 / protoc " / > <nl> - < _Asset PackagePath = " build / native / bin / macosx_x86 / " Include = " $ ( Assets_ProtoCompiler ) macos_x86 / protoc " / > < ! - - GPB : macosx - - > <nl> - < _Asset PackagePath = " build / native / bin / macosx_x64 / " Include = " $ ( Assets_ProtoCompiler ) macos_x64 / protoc " / > < ! - - GPB : macosx - - > <nl> + < ! - - TODO ( kkm ) : Do not place non - tools under tools / , use build / native / bin / . - - > <nl> + < ! - - TODO ( kkm ) : Do not package windows x64 builds ( # 13098 ) . - - > <nl> + < _Asset PackagePath = " tools / windows_x86 / " Include = " $ ( Assets_ProtoCompiler ) windows_x86 / protoc . exe " / > <nl> + < _Asset PackagePath = " tools / windows_x64 / " Include = " $ ( Assets_ProtoCompiler ) windows_x64 / protoc . exe " / > <nl> + < _Asset PackagePath = " tools / linux_x86 / " Include = " $ ( Assets_ProtoCompiler ) linux_x86 / protoc " / > <nl> + < _Asset PackagePath = " tools / linux_x64 / " Include = " $ ( Assets_ProtoCompiler ) linux_x64 / protoc " / > <nl> + < _Asset PackagePath = " tools / macosx_x86 / " Include = " $ ( Assets_ProtoCompiler ) macos_x86 / protoc " / > < ! - - GPB : macosx - - > <nl> + < _Asset PackagePath = " tools / macosx_x64 / " Include = " $ ( Assets_ProtoCompiler ) macos_x64 / protoc " / > < ! - - GPB : macosx - - > <nl> <nl> < ! - - gRPC assets ( for Grpc . Tools ) - - > <nl> - < _Asset PackagePath = " build / native / bin / windows / " Include = " $ ( Assets_GrpcPlugins ) protoc_windows_x86 / grpc_csharp_plugin . exe " / > <nl> - < _Asset PackagePath = " build / native / bin / linux_x86 / " Include = " $ ( Assets_GrpcPlugins ) protoc_linux_x86 / grpc_csharp_plugin " / > <nl> - < _Asset PackagePath = " build / native / bin / linux_x64 / " Include = " $ ( Assets_GrpcPlugins ) protoc_linux_x64 / grpc_csharp_plugin " / > <nl> - < _Asset PackagePath = " build / native / bin / macosx_x86 / " Include = " $ ( Assets_GrpcPlugins ) protoc_macos_x86 / grpc_csharp_plugin " / > <nl> - < _Asset PackagePath = " build / native / bin / macosx_x64 / " Include = " $ ( Assets_GrpcPlugins ) protoc_macos_x64 / grpc_csharp_plugin " / > <nl> + < _Asset PackagePath = " tools / windows_x86 / " Include = " $ ( Assets_GrpcPlugins ) protoc_windows_x86 / grpc_csharp_plugin . exe " / > <nl> + < _Asset PackagePath = " tools / windows_x64 / " Include = " $ ( Assets_GrpcPlugins ) protoc_windows_x64 / grpc_csharp_plugin . exe " / > <nl> + < _Asset PackagePath = " tools / linux_x86 / " Include = " $ ( Assets_GrpcPlugins ) protoc_linux_x86 / grpc_csharp_plugin " / > <nl> + < _Asset PackagePath = " tools / linux_x64 / " Include = " $ ( Assets_GrpcPlugins ) protoc_linux_x64 / grpc_csharp_plugin " / > <nl> + < _Asset PackagePath = " tools / macosx_x86 / " Include = " $ ( Assets_GrpcPlugins ) protoc_macos_x86 / grpc_csharp_plugin " / > <nl> + < _Asset PackagePath = " tools / macosx_x64 / " Include = " $ ( Assets_GrpcPlugins ) protoc_macos_x64 / grpc_csharp_plugin " / > <nl> <nl> < None Include = " @ ( _Asset ) " Pack = " true " Visible = " false " / > <nl> < / ItemGroup > <nl> mmm a / src / csharp / Grpc . Tools / build / _grpc / _Grpc . Tools . targets <nl> ppp b / src / csharp / Grpc . Tools / build / _grpc / _Grpc . Tools . targets <nl> <nl> <nl> < ItemDefinitionGroup Condition = " ' $ ( Protobuf_ProjectSupported ) ' = = ' true ' and ' $ ( Language ) ' = = ' C # ' " > <nl> < ProtoBuf > <nl> - < GrpcServices Condition = " ' % ( ProtoBuf . GrpcServices ) ' = = ' ' " > Both < / GrpcServices > <nl> + < GrpcServices Condition = " ' % ( ProtoBuf . GrpcServices ) ' = = ' ' " > Both < / GrpcServices > <nl> < / ProtoBuf > <nl> < / ItemDefinitionGroup > <nl> <nl> < ! - - This target is invoked in a C # project , or can be called in a customized project . - - > <nl> < Target Name = " gRPC_ResolvePluginFullPath " AfterTargets = " Protobuf_ResolvePlatform " > <nl> < PropertyGroup > <nl> + < ! - - TODO ( kkm ) : Do not use Protobuf_PackagedToolsPath , roll gRPC ' s own . - - > <nl> + < ! - - TODO ( kkm ) : Do not package windows x64 builds ( # 13098 ) . - - > <nl> < gRPC_PluginFullPath Condition = " ' $ ( gRPC_PluginFullPath ) ' = = ' ' and ' $ ( Protobuf_ToolsOs ) ' = = ' windows ' " <nl> - > $ ( Protobuf_PackagedToolsPath ) bin \ $ ( Protobuf_ToolsOs ) \ $ ( gRPC_PluginFileName ) . exe < / gRPC_PluginFullPath > <nl> + > $ ( Protobuf_PackagedToolsPath ) \ $ ( Protobuf_ToolsOs ) _x86 \ $ ( gRPC_PluginFileName ) . exe < / gRPC_PluginFullPath > <nl> < gRPC_PluginFullPath Condition = " ' $ ( gRPC_PluginFullPath ) ' = = ' ' " <nl> - > $ ( Protobuf_PackagedToolsPath ) bin / $ ( Protobuf_ToolsOs ) _ $ ( Protobuf_ToolsCpu ) / $ ( gRPC_PluginFileName ) < / gRPC_PluginFullPath > <nl> + > $ ( Protobuf_PackagedToolsPath ) / $ ( Protobuf_ToolsOs ) _ $ ( Protobuf_ToolsCpu ) / $ ( gRPC_PluginFileName ) < / gRPC_PluginFullPath > <nl> < / PropertyGroup > <nl> < / Target > <nl> <nl> mmm a / src / csharp / Grpc . Tools / build / _protobuf / Google . Protobuf . Tools . props <nl> ppp b / src / csharp / Grpc . Tools / build / _protobuf / Google . Protobuf . Tools . props <nl> <nl> < ! - - Revision number of this package conventions ( as if " API " version ) . - - > <nl> < Protobuf_ToolingRevision > 1 < / Protobuf_ToolingRevision > <nl> <nl> - < ! - - TODO ( kkm ) : Remove " . . / " when separating packages . - - > <nl> - < Protobuf_PackagedToolsPath > $ ( [ System . IO . Path ] : : GetFullPath ( $ ( MSBuildThisFileDirectory ) . . / native / ) ) < / Protobuf_PackagedToolsPath > <nl> - < Protobuf_StandardImportsPath > $ ( Protobuf_PackagedToolsPath ) include < / Protobuf_StandardImportsPath > <nl> + < ! - - TODO ( kkm ) : Remove one " . . / " when separating packages . - - > <nl> + < ! - - TODO ( kkm ) : Do not place non - tools under tools / , use build / native / bin / . - - > <nl> + < Protobuf_PackagedToolsPath > $ ( [ System . IO . Path ] : : GetFullPath ( $ ( MSBuildThisFileDirectory ) . . / . . / tools ) ) < / Protobuf_PackagedToolsPath > <nl> + < Protobuf_StandardImportsPath > $ ( [ System . IO . Path ] : : GetFullPath ( $ ( MSBuildThisFileDirectory ) . . / native / include ) ) < / Protobuf_StandardImportsPath > <nl> < / PropertyGroup > <nl> <nl> < ! - - NET SDK projects only : include proto files by default . Other project <nl> mmm a / src / csharp / Grpc . Tools / build / _protobuf / Google . Protobuf . Tools . targets <nl> ppp b / src / csharp / Grpc . Tools / build / _protobuf / Google . Protobuf . Tools . targets <nl> <nl> < ! - - Next try OS and CPU resolved by ProtoToolsPlatform . - - > <nl> < Protobuf_ToolsOs Condition = " ' $ ( Protobuf_ToolsOs ) ' = = ' ' " > $ ( _Protobuf_ToolsOs ) < / Protobuf_ToolsOs > <nl> < Protobuf_ToolsCpu Condition = " ' $ ( Protobuf_ToolsCpu ) ' = = ' ' " > $ ( _Protobuf_ToolsCpu ) < / Protobuf_ToolsCpu > <nl> + < ! - - TODO ( kkm ) : Do not package windows x64 builds ( # 13098 ) . - - > <nl> < Protobuf_ProtocFullPath Condition = " ' $ ( Protobuf_ProtocFullPath ) ' = = ' ' and ' $ ( Protobuf_ToolsOs ) ' = = ' windows ' " <nl> - > $ ( Protobuf_PackagedToolsPath ) bin \ $ ( Protobuf_ToolsOs ) \ protoc . exe < / Protobuf_ProtocFullPath > <nl> + > $ ( Protobuf_PackagedToolsPath ) \ $ ( Protobuf_ToolsOs ) _x86 \ protoc . exe < / Protobuf_ProtocFullPath > <nl> < Protobuf_ProtocFullPath Condition = " ' $ ( Protobuf_ProtocFullPath ) ' = = ' ' " <nl> - > $ ( Protobuf_PackagedToolsPath ) bin / $ ( Protobuf_ToolsOs ) _ $ ( Protobuf_ToolsCpu ) / protoc < / Protobuf_ProtocFullPath > <nl> + > $ ( Protobuf_PackagedToolsPath ) / $ ( Protobuf_ToolsOs ) _ $ ( Protobuf_ToolsCpu ) / protoc < / Protobuf_ProtocFullPath > <nl> < / PropertyGroup > <nl> <nl> < Error Condition = " ' $ ( DesignTimeBuild ) ' ! = ' true ' and ' $ ( PROTOBUF_PROTOC ) ' = = ' ' <nl> mmm a / src / csharp / Grpc . Tools / build / native / Grpc . Tools . props <nl> ppp b / src / csharp / Grpc . Tools / build / native / Grpc . Tools . props <nl> <nl> < Protobuf_ToolingRevision > 1 < / Protobuf_ToolingRevision > <nl> <nl> < ! - - For a Visual Studio C + + native project we currently only resolve tools and import paths . - - > <nl> - < Protobuf_ProtocFullPath > $ ( MSBuildThisFileDirectory ) bin \ windows \ protoc . exe < / Protobuf_ProtocFullPath > <nl> - < Protobuf_StandardImportsPath > $ ( MSBuildThisFileDirectory ) bin \ include \ < / Protobuf_StandardImportsPath > <nl> + < ! - - TODO ( kkm ) : Do not place non - tools under tools / , use build / native / bin / . - - > <nl> + < ! - - TODO ( kkm ) : Do not package windows x64 builds ( # 13098 ) . - - > <nl> + < Protobuf_ProtocFullPath > $ ( MSBuildThisFileDirectory ) . . \ . . \ tools \ windows_x86 \ protoc . exe < / Protobuf_ProtocFullPath > <nl> + < Protobuf_StandardImportsPath > $ ( MSBuildThisFileDirectory ) include \ < / Protobuf_StandardImportsPath > <nl> < gRPC_PluginFileName > grpc_cpp_plugin < / gRPC_PluginFileName > <nl> - < gRPC_PluginFullPath > $ ( MSBuildThisFileDirectory ) bin \ windows \ grpc_cpp_plugin . exe < / gRPC_PluginFullPath > <nl> + < gRPC_PluginFullPath > $ ( MSBuildThisFileDirectory ) . . \ . . \ tools \ windows_x86 \ grpc_cpp_plugin . exe < / gRPC_PluginFullPath > <nl> < / PropertyGroup > <nl> < / Project > <nl>
Restore packaging of binary tools under tools /
grpc/grpc
f626d4618d8ed15853e763e96a23e3635be0c339
2018-10-14T14:10:13Z
mmm a / src / commands / cmd_rotate_canvas . cpp <nl> ppp b / src / commands / cmd_rotate_canvas . cpp <nl> class RotateCanvasJob : public Job <nl> / * * <nl> * [ working thread ] <nl> * / <nl> - virtual void on_job ( ) <nl> + virtual void onJob ( ) <nl> { <nl> Undoable undoable ( m_sprite , " Rotate Canvas " ) ; <nl> <nl> class RotateCanvasJob : public Job <nl> <nl> undoable . replace_stock_image ( i , new_image ) ; <nl> <nl> - job_progress ( ( float ) i / m_sprite - > getStock ( ) - > nimage ) ; <nl> + jobProgress ( ( float ) i / m_sprite - > getStock ( ) - > nimage ) ; <nl> <nl> / / cancel all the operation ? <nl> - if ( is_canceled ( ) ) <nl> + if ( isCanceled ( ) ) <nl> return ; / / Undoable destructor will undo all operations <nl> } <nl> <nl> void RotateCanvasCommand : : onExecute ( Context * context ) <nl> CurrentSpriteReader sprite ( context ) ; <nl> { <nl> RotateCanvasJob job ( sprite , m_angle ) ; <nl> - job . do_job ( ) ; <nl> + job . startJob ( ) ; <nl> } <nl> sprite - > generateMaskBoundaries ( ) ; <nl> update_screen_for_sprite ( sprite ) ; <nl> mmm a / src / commands / cmd_sprite_size . cpp <nl> ppp b / src / commands / cmd_sprite_size . cpp <nl> class SpriteSizeJob : public Job <nl> / * * <nl> * [ working thread ] <nl> * / <nl> - virtual void on_job ( ) <nl> + virtual void onJob ( ) <nl> { <nl> Undoable undoable ( m_sprite , " Sprite Size " ) ; <nl> <nl> class SpriteSizeJob : public Job <nl> <nl> undoable . replace_stock_image ( cel - > image , new_image ) ; <nl> <nl> - job_progress ( ( float ) progress / cels . size ( ) ) ; <nl> + jobProgress ( ( float ) progress / cels . size ( ) ) ; <nl> <nl> / / cancel all the operation ? <nl> - if ( is_canceled ( ) ) <nl> + if ( isCanceled ( ) ) <nl> return ; / / Undoable destructor will undo all operations <nl> } <nl> <nl> void SpriteSizeCommand : : onExecute ( Context * context ) <nl> <nl> { <nl> SpriteSizeJob job ( sprite , new_width , new_height , resize_method ) ; <nl> - job . do_job ( ) ; <nl> + job . startJob ( ) ; <nl> } <nl> <nl> update_screen_for_sprite ( sprite ) ; <nl> mmm a / src / job . cpp <nl> ppp b / src / job . cpp <nl> Job : : ~ Job ( ) <nl> jwidget_free ( m_alert_window ) ; <nl> } <nl> <nl> - void Job : : do_job ( ) <nl> + void Job : : startJob ( ) <nl> { <nl> m_thread = jthread_new ( & Job : : thread_proc , ( void * ) this ) ; <nl> m_alert_window - > open_window_fg ( ) ; <nl> } <nl> <nl> - void Job : : job_progress ( float f ) <nl> + void Job : : jobProgress ( float f ) <nl> { <nl> ScopedLock hold ( * m_mutex ) ; <nl> m_last_progress = f ; <nl> } <nl> <nl> - bool Job : : is_canceled ( ) <nl> + bool Job : : isCanceled ( ) <nl> { <nl> ScopedLock hold ( * m_mutex ) ; <nl> return m_canceled_flag ; <nl> } <nl> <nl> - / * * <nl> - * Called from another thread to do the hard work ( image processing ) . <nl> - * <nl> - * [ working thread ] <nl> - * / <nl> - void Job : : on_job ( ) <nl> + void Job : : onJob ( ) <nl> { <nl> / / do nothing <nl> } <nl> <nl> - / * * <nl> - * Called each 1000 msecs by the GUI queue processing . <nl> - * <nl> - * [ main thread ] <nl> - * / <nl> - void Job : : on_monitor_tick ( ) <nl> + void Job : : onMonitorTick ( ) <nl> { <nl> ScopedLock hold ( * m_mutex ) ; <nl> <nl> void Job : : on_monitor_tick ( ) <nl> remove_gui_monitor ( m_monitor ) ; <nl> } <nl> <nl> - / * * <nl> - * Called when the monitor is destroyed . <nl> - * <nl> - * [ main thread ] <nl> - * / <nl> - void Job : : on_monitor_destroyed ( ) <nl> + void Job : : onMonitorDestroyed ( ) <nl> { <nl> if ( m_alert_window ! = NULL ) { <nl> m_monitor = NULL ; <nl> void Job : : thread_proc ( void * data ) <nl> { <nl> Job * self = ( Job * ) data ; <nl> try { <nl> - self - > on_job ( ) ; <nl> + self - > onJob ( ) ; <nl> } <nl> catch ( . . . ) { <nl> / / TODO handle this exception <nl> void Job : : thread_proc ( void * data ) <nl> void Job : : monitor_proc ( void * data ) <nl> { <nl> Job * self = ( Job * ) data ; <nl> - self - > on_monitor_tick ( ) ; <nl> + self - > onMonitorTick ( ) ; <nl> } <nl> <nl> / * * <nl> void Job : : monitor_proc ( void * data ) <nl> void Job : : monitor_free ( void * data ) <nl> { <nl> Job * self = ( Job * ) data ; <nl> - self - > on_monitor_destroyed ( ) ; <nl> + self - > onMonitorDestroyed ( ) ; <nl> } <nl> mmm a / src / job . h <nl> ppp b / src / job . h <nl> class Progress ; <nl> <nl> class Job <nl> { <nl> - JThread m_thread ; <nl> - Monitor * m_monitor ; <nl> - Progress * m_progress ; <nl> - Vaca : : Mutex * m_mutex ; <nl> - Frame * m_alert_window ; <nl> - float m_last_progress ; <nl> - bool m_done_flag ; <nl> - bool m_canceled_flag ; <nl> - <nl> - / / these methods are privated and not defined <nl> - Job ( ) ; <nl> - Job ( const Job & ) ; <nl> - Job & operator = = ( const Job & ) ; <nl> - <nl> public : <nl> - <nl> Job ( const char * job_name ) ; <nl> virtual ~ Job ( ) ; <nl> <nl> - void do_job ( ) ; <nl> - void job_progress ( float f ) ; <nl> - bool is_canceled ( ) ; <nl> + / / Starts the job calling onJob ( ) event in another thread and <nl> + / / monitoring the progress with onMonitorTick ( ) event . <nl> + void startJob ( ) ; <nl> + <nl> + / / The onJob ( ) can use this function to report progress of the <nl> + / / background job being done . 1 . 0 is completed . <nl> + void jobProgress ( float f ) ; <nl> + <nl> + / / Returns true if the job was canceled by the user ( in case he <nl> + / / pressed a " Cancel " button in the GUI ) . The onJob ( ) thread should <nl> + / / check this variable periodically to stop working . <nl> + bool isCanceled ( ) ; <nl> <nl> protected : <nl> <nl> - virtual void on_job ( ) ; <nl> - virtual void on_monitor_tick ( ) ; <nl> - virtual void on_monitor_destroyed ( ) ; <nl> + / / This member function is called from another dedicated thread <nl> + / / outside the GUI one , so you can do some image processing here . <nl> + / / Remember that you cannot use any GUI element in this handler . <nl> + virtual void onJob ( ) ; <nl> <nl> - private : <nl> + / / Called each 1000 msecs by the GUI queue processing . <nl> + / / It is executed from the main GUI thread . <nl> + virtual void onMonitorTick ( ) ; <nl> <nl> + / / Called when the monitor is destroyed . It is executed from the <nl> + / / main GUI thread . <nl> + virtual void onMonitorDestroyed ( ) ; <nl> + <nl> + private : <nl> void done ( ) ; <nl> <nl> static void thread_proc ( void * data ) ; <nl> static void monitor_proc ( void * data ) ; <nl> static void monitor_free ( void * data ) ; <nl> <nl> + JThread m_thread ; <nl> + Monitor * m_monitor ; <nl> + Progress * m_progress ; <nl> + Vaca : : Mutex * m_mutex ; <nl> + Frame * m_alert_window ; <nl> + float m_last_progress ; <nl> + bool m_done_flag ; <nl> + bool m_canceled_flag ; <nl> + <nl> + / / these methods are privated and not defined <nl> + Job ( ) ; <nl> + Job ( const Job & ) ; <nl> + Job & operator = = ( const Job & ) ; <nl> + <nl> } ; <nl> <nl> # endif <nl>
Rename member functions of Job class .
aseprite/aseprite
1677cbadaf0c1f9535eb978a7a7881aaddaee14d
2010-09-18T22:22:45Z
mmm a / drivers / vulkan / rendering_device_vulkan . cpp <nl> ppp b / drivers / vulkan / rendering_device_vulkan . cpp <nl> static VkShaderStageFlagBits shader_stage_masks [ RenderingDevice : : SHADER_STAGE_MA <nl> VK_SHADER_STAGE_COMPUTE_BIT , <nl> } ; <nl> <nl> - bool RenderingDeviceVulkan : : _uniform_add_binding ( Vector < Vector < VkDescriptorSetLayoutBinding > > & bindings , Vector < Vector < Shader : : UniformInfo > > & uniform_infos , const glslang : : TObjectReflection & reflection , RenderingDevice : : ShaderStage p_stage , String * r_error ) { <nl> + bool RenderingDeviceVulkan : : _uniform_add_binding ( Vector < Vector < VkDescriptorSetLayoutBinding > > & bindings , Vector < Vector < Shader : : UniformInfo > > & uniform_infos , const glslang : : TObjectReflection & reflection , RenderingDevice : : ShaderStage p_stage , Shader : : PushConstant & push_constant , String * r_error ) { <nl> <nl> VkDescriptorSetLayoutBinding layout_binding ; <nl> Shader : : UniformInfo info ; <nl> bool RenderingDeviceVulkan : : _uniform_add_binding ( Vector < Vector < VkDescriptorSetLa <nl> case glslang : : EbtBlock : { <nl> print_line ( " DEBUG : Block " ) ; <nl> if ( reflection . getType ( ) - > getQualifier ( ) . storage = = glslang : : EvqUniform ) { <nl> + if ( reflection . getType ( ) - > getQualifier ( ) . layoutPushConstant ) { <nl> + uint32_t len = reflection . size ; <nl> + if ( push_constant . push_constant_size ! = 0 & & push_constant . push_constant_size ! = len ) { <nl> + * r_error = " On shader stage ' " + String ( shader_stage_names [ p_stage ] ) + " ' , uniform ' " + reflection . name . c_str ( ) + " ' push constants for different stages should all be the same size . " ; <nl> + return false ; <nl> + } <nl> + push_constant . push_constant_size = len ; <nl> + push_constant . push_constants_vk_stage | = shader_stage_masks [ p_stage ] ; <nl> + return true ; <nl> + } <nl> print_line ( " DEBUG : Uniform buffer " ) ; <nl> layout_binding . descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER ; <nl> info . type = UNIFORM_TYPE_UNIFORM_BUFFER ; <nl> - <nl> } else if ( reflection . getType ( ) - > getQualifier ( ) . storage = = glslang : : EvqBuffer ) { <nl> layout_binding . descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER ; <nl> info . type = UNIFORM_TYPE_STORAGE_BUFFER ; <nl> print_line ( " DEBUG : Storage buffer " ) ; <nl> } else { <nl> if ( r_error ) { <nl> - * r_error = " On shader stage ' " + String ( shader_stage_names [ p_stage ] ) + " ' , uniform ' " + reflection . name . c_str ( ) + " ' is of unsupported block type . " ; <nl> + * r_error = " On shader stage ' " + String ( shader_stage_names [ p_stage ] ) + " ' , uniform ' " + reflection . name . c_str ( ) + " ' is of unsupported block type : ( " + itos ( reflection . getType ( ) - > getQualifier ( ) . storage ) + " ) . " ; <nl> } <nl> return false ; <nl> } <nl> RenderingDevice : : ID RenderingDeviceVulkan : : shader_create_from_source ( const Vecto <nl> / / descriptor layouts <nl> Vector < Vector < VkDescriptorSetLayoutBinding > > bindings ; <nl> Vector < Vector < Shader : : UniformInfo > > uniform_info ; <nl> + Shader : : PushConstant push_constant ; <nl> + push_constant . push_constant_size = 0 ; <nl> + push_constant . push_constants_vk_stage = 0 ; <nl> + <nl> Vector < int > vertex_input_locations ; <nl> int fragment_outputs = 0 ; <nl> <nl> RenderingDevice : : ID RenderingDeviceVulkan : : shader_create_from_source ( const Vecto <nl> program . dumpReflection ( ) ; <nl> <nl> for ( int j = 0 ; j < program . getNumUniformVariables ( ) ; j + + ) { <nl> - if ( ! _uniform_add_binding ( bindings , uniform_info , program . getUniform ( j ) , p_stages [ i ] . shader_stage , r_error ) ) { <nl> + if ( ! _uniform_add_binding ( bindings , uniform_info , program . getUniform ( j ) , p_stages [ i ] . shader_stage , push_constant , r_error ) ) { <nl> return INVALID_ID ; <nl> } <nl> } <nl> <nl> for ( int j = 0 ; j < program . getNumUniformBlocks ( ) ; j + + ) { <nl> - if ( ! _uniform_add_binding ( bindings , uniform_info , program . getUniformBlock ( j ) , p_stages [ i ] . shader_stage , r_error ) ) { <nl> + if ( ! _uniform_add_binding ( bindings , uniform_info , program . getUniformBlock ( j ) , p_stages [ i ] . shader_stage , push_constant , r_error ) ) { <nl> return INVALID_ID ; <nl> } <nl> } <nl> <nl> for ( int j = 0 ; j < program . getNumBufferVariables ( ) ; j + + ) { <nl> - if ( ! _uniform_add_binding ( bindings , uniform_info , program . getBufferVariable ( j ) , p_stages [ i ] . shader_stage , r_error ) ) { <nl> + if ( ! _uniform_add_binding ( bindings , uniform_info , program . getBufferVariable ( j ) , p_stages [ i ] . shader_stage , push_constant , r_error ) ) { <nl> return INVALID_ID ; <nl> } <nl> } <nl> <nl> for ( int j = 0 ; j < program . getNumBufferBlocks ( ) ; j + + ) { <nl> - if ( ! _uniform_add_binding ( bindings , uniform_info , program . getBufferBlock ( j ) , p_stages [ i ] . shader_stage , r_error ) ) { <nl> + if ( ! _uniform_add_binding ( bindings , uniform_info , program . getBufferBlock ( j ) , p_stages [ i ] . shader_stage , push_constant , r_error ) ) { <nl> return INVALID_ID ; <nl> } <nl> } <nl> RenderingDevice : : ID RenderingDeviceVulkan : : shader_create_from_source ( const Vecto <nl> <nl> shader . vertex_input_locations = vertex_input_locations ; <nl> shader . fragment_outputs = fragment_outputs ; <nl> + shader . push_constant = push_constant ; <nl> <nl> bool success = true ; <nl> for ( int i = 0 ; i < p_stages . size ( ) ; i + + ) { <nl> RenderingDevice : : ID RenderingDeviceVulkan : : shader_create_from_source ( const Vecto <nl> layouts . write [ i ] = shader . sets [ i ] . descriptor_set_layout ; <nl> } <nl> <nl> - / / unsupported for now <nl> pipeline_layout_create_info . pSetLayouts = layouts . ptr ( ) ; <nl> - pipeline_layout_create_info . pushConstantRangeCount = 0 ; <nl> - pipeline_layout_create_info . pPushConstantRanges = NULL ; <nl> + if ( push_constant . push_constant_size ) { <nl> + VkPushConstantRange push_constant_range ; <nl> + push_constant_range . stageFlags = push_constant . push_constants_vk_stage ; <nl> + push_constant_range . offset = 0 ; <nl> + push_constant_range . size = push_constant . push_constant_size ; <nl> + <nl> + pipeline_layout_create_info . pushConstantRangeCount = 1 ; <nl> + pipeline_layout_create_info . pPushConstantRanges = & push_constant_range ; <nl> + } else { <nl> + pipeline_layout_create_info . pushConstantRangeCount = 0 ; <nl> + pipeline_layout_create_info . pPushConstantRanges = NULL ; <nl> + } <nl> <nl> VkResult err = vkCreatePipelineLayout ( device , & pipeline_layout_create_info , NULL , & shader . pipeline_layout ) ; <nl> <nl> RenderingDevice : : ID RenderingDeviceVulkan : : render_pipeline_create ( ID p_shader , I <nl> pipeline . vertex_format = p_vertex_description ; <nl> pipeline . uses_restart_indices = input_assembly_create_info . primitiveRestartEnable ; <nl> pipeline . set_hashes = shader - > set_hashes ; <nl> + pipeline . push_constant_size = shader - > push_constant . push_constant_size ; <nl> + pipeline . push_constant_stages = shader - > push_constant . push_constants_vk_stage ; <nl> + pipeline . pipeline_layout = shader - > pipeline_layout ; <nl> <nl> static const uint32_t primitive_divisor [ RENDER_PRIMITIVE_MAX ] = { <nl> 1 , 2 , 1 , 1 , 1 , 3 , 1 , 1 , 1 , 1 , 1 <nl> void RenderingDeviceVulkan : : draw_list_bind_render_pipeline ( ID p_list , ID p_rende <nl> <nl> dl - > validation . pipeline_primitive_minimum = pipeline - > primitive_minimum ; <nl> dl - > validation . pipeline_set_hashes = pipeline - > set_hashes ; <nl> + dl - > validation . pipeline_push_constant_size = pipeline - > push_constant_size ; <nl> + if ( pipeline - > push_constant_size ) { <nl> + dl - > validation . pipeline_push_constant_stages = pipeline - > push_constant_stages ; <nl> + dl - > validation . pipeline_push_constant_suppplied = false ; <nl> + dl - > validation . pipeline_push_constant_layout = pipeline - > pipeline_layout ; <nl> + } <nl> } <nl> <nl> void RenderingDeviceVulkan : : draw_list_bind_uniform_set ( ID p_list , ID p_uniform_set , uint32_t p_index ) { <nl> void RenderingDeviceVulkan : : draw_list_bind_index_array ( ID p_list , ID p_index_arr <nl> vkCmdBindIndexBuffer ( dl - > command_buffer , index_array - > buffer , index_array - > offset , index_array - > index_type ) ; <nl> } <nl> <nl> + void RenderingDeviceVulkan : : draw_list_set_push_constant ( ID p_list , void * p_data , uint32_t p_data_size ) { <nl> + DrawList * dl = _get_draw_list_ptr ( p_list ) ; <nl> + ERR_FAIL_COND ( ! dl ) ; <nl> + <nl> + ERR_FAIL_COND_MSG ( p_data_size ! = dl - > validation . pipeline_push_constant_size , <nl> + " This render pipeline requires ( " + itos ( dl - > validation . pipeline_push_constant_size ) + " ) bytes of push constant data , supplied : ( " + itos ( p_data_size ) + " ) " ) ; <nl> + <nl> + vkCmdPushConstants ( dl - > command_buffer , dl - > validation . pipeline_push_constant_layout , dl - > validation . pipeline_push_constant_stages , 0 , p_data_size , p_data ) ; <nl> + dl - > validation . pipeline_push_constant_suppplied = true ; <nl> + } <nl> + <nl> void RenderingDeviceVulkan : : draw_list_draw ( ID p_list , bool p_use_indices , uint32_t p_instances ) { <nl> <nl> DrawList * dl = _get_draw_list_ptr ( p_list ) ; <nl> void RenderingDeviceVulkan : : draw_list_draw ( ID p_list , bool p_use_indices , uint32 <nl> ERR_FAIL_COND_MSG ( p_instances > dl - > validation . vertex_max_instances_allowed , <nl> " Amount of instances requested ( " + itos ( p_instances ) + " is larger than the maximum amount suported by the bound vertex array ( " + itos ( dl - > validation . vertex_max_instances_allowed ) + " ) . " ) ; <nl> } <nl> + <nl> + if ( dl - > validation . pipeline_push_constant_size > 0 ) { <nl> + / / using push constants , check that they were supplied <nl> + ERR_FAIL_COND_MSG ( ! dl - > validation . pipeline_push_constant_suppplied , <nl> + " The shader in this pipeline requires a push constant to be set before drawing , but it ' s not present . " ) ; <nl> + } <nl> / / compare hashes <nl> if ( dl - > validation . pipeline_set_hashes . size ( ) ) { <nl> ERR_FAIL_COND_MSG ( dl - > validation . pipeline_set_hashes . size ( ) > dl - > validation . set_hashes . size ( ) , <nl> mmm a / drivers / vulkan / rendering_device_vulkan . h <nl> ppp b / drivers / vulkan / rendering_device_vulkan . h <nl> class RenderingDeviceVulkan : public RenderingDevice { <nl> Vector < int > vertex_input_locations ; / / inputs used , this is mostly for validation <nl> int fragment_outputs ; <nl> <nl> + struct PushConstant { <nl> + uint32_t push_constant_size ; <nl> + uint32_t push_constants_vk_stage ; <nl> + } ; <nl> + <nl> + PushConstant push_constant ; <nl> + <nl> int max_output ; <nl> Vector < Set > sets ; <nl> Vector < uint32_t > set_hashes ; <nl> class RenderingDeviceVulkan : public RenderingDevice { <nl> VkPipelineLayout pipeline_layout ; <nl> } ; <nl> <nl> - bool _uniform_add_binding ( Vector < Vector < VkDescriptorSetLayoutBinding > > & bindings , Vector < Vector < Shader : : UniformInfo > > & uniform_infos , const glslang : : TObjectReflection & reflection , RenderingDevice : : ShaderStage p_stage , String * r_error ) ; <nl> + bool _uniform_add_binding ( Vector < Vector < VkDescriptorSetLayoutBinding > > & bindings , Vector < Vector < Shader : : UniformInfo > > & uniform_infos , const glslang : : TObjectReflection & reflection , RenderingDevice : : ShaderStage p_stage , Shader : : PushConstant & push_constant , String * r_error ) ; <nl> <nl> ID_Pool < Shader , ID_TYPE_SHADER > shader_owner ; <nl> <nl> class RenderingDeviceVulkan : public RenderingDevice { <nl> uint32_t primitive_minimum ; <nl> uint32_t primitive_divisor ; <nl> Vector < uint32_t > set_hashes ; <nl> + uint32_t push_constant_size ; <nl> + uint32_t push_constant_stages ; <nl> / / Actual pipeline <nl> + VkPipelineLayout pipeline_layout ; / / not owned , needed for push constants <nl> VkPipeline pipeline ; <nl> } ; <nl> <nl> class RenderingDeviceVulkan : public RenderingDevice { <nl> uint32_t pipeline_primitive_divisor ; <nl> uint32_t pipeline_primitive_minimum ; <nl> Vector < uint32_t > pipeline_set_hashes ; <nl> + VkPipelineLayout pipeline_push_constant_layout ; <nl> + uint32_t pipeline_push_constant_size ; <nl> + uint32_t pipeline_push_constant_stages ; <nl> + bool pipeline_push_constant_suppplied ; <nl> <nl> Validation ( ) { <nl> active = true ; <nl> class RenderingDeviceVulkan : public RenderingDevice { <nl> pipeline_dynamic_state = 0 ; <nl> pipeline_vertex_format = INVALID_ID ; <nl> pipeline_uses_restart_indices = false ; <nl> + pipeline_push_constant_size = 0 ; <nl> + pipeline_push_constant_stages = 0 ; <nl> + pipeline_push_constant_suppplied = false ; <nl> } <nl> } validation ; <nl> } ; <nl> class RenderingDeviceVulkan : public RenderingDevice { <nl> virtual void draw_list_bind_uniform_set ( ID p_list , ID p_uniform_set , uint32_t p_index ) ; <nl> virtual void draw_list_bind_vertex_array ( ID p_list , ID p_vertex_array ) ; <nl> virtual void draw_list_bind_index_array ( ID p_list , ID p_index_array ) ; <nl> + virtual void draw_list_set_push_constant ( ID p_list , void * p_data , uint32_t p_data_size ) ; <nl> <nl> virtual void draw_list_draw ( ID p_list , bool p_use_indices , uint32_t p_instances = 1 ) ; <nl> <nl> mmm a / platform / x11 / os_x11 . cpp <nl> ppp b / platform / x11 / os_x11 . cpp <nl> Error OS_X11 : : initialize ( const VideoMode & p_desired , int p_video_driver , int p_a <nl> " layout ( location = 0 ) in vec2 uv_interp ; \ n " <nl> " layout ( location = 0 ) out vec4 uFragColor ; \ n " <nl> " layout ( binding = 0 ) uniform sampler2D t ; \ n " <nl> - " void main ( ) { uFragColor = texture ( t , uv_interp ) ; } \ n " ; <nl> + " layout ( push_constant , binding = 1 ) uniform ColorMultiplier { vec4 color_mult ; } color_multiplier ; \ n " <nl> + " void main ( ) { uFragColor = texture ( t , uv_interp ) * color_multiplier . color_mult ; } \ n " ; <nl> <nl> Vector < RenderingDevice : : ShaderStageSource > source ; <nl> source . push_back ( vert ) ; <nl> void OS_X11 : : swap_buffers ( ) { <nl> # endif <nl> <nl> Vector < Color > clear ; <nl> + float color [ 4 ] = { 1 , 0 , 1 , 1 } ; <nl> clear . push_back ( Color ( 0 . 5 , 0 . 8 , 0 . 2 ) ) ; <nl> RenderingDevice : : ID cmd_list = rendering_device - > draw_list_begin ( test_framebuffer , RenderingDevice : : INITIAL_ACTION_CLEAR , RenderingDevice : : FINAL_ACTION_READ_COLOR_DISCARD_DEPTH , clear ) ; <nl> rendering_device - > draw_list_bind_render_pipeline ( cmd_list , test_pipeline ) ; <nl> rendering_device - > draw_list_bind_index_array ( cmd_list , test_index_array ) ; <nl> rendering_device - > draw_list_bind_vertex_array ( cmd_list , test_vertex_array ) ; <nl> rendering_device - > draw_list_bind_uniform_set ( cmd_list , test_uniform_set , 0 ) ; <nl> + rendering_device - > draw_list_set_push_constant ( cmd_list , color , 4 * 4 ) ; <nl> rendering_device - > draw_list_draw ( cmd_list , true ) ; <nl> rendering_device - > draw_list_end ( ) ; <nl> <nl> void OS_X11 : : swap_buffers ( ) { <nl> rendering_device - > draw_list_bind_index_array ( cmd_list , test_index_array ) ; <nl> rendering_device - > draw_list_bind_vertex_array ( cmd_list , test_vertex_array ) ; <nl> rendering_device - > draw_list_bind_uniform_set ( cmd_list , test_framebuffer_uniform_set , 0 ) ; <nl> + rendering_device - > draw_list_set_push_constant ( cmd_list , color , 4 * 4 ) ; <nl> rendering_device - > draw_list_draw ( cmd_list , true ) ; <nl> rendering_device - > draw_list_end ( ) ; <nl> rendering_device - > finalize_frame ( ) ; <nl> mmm a / servers / visual / rendering_device . h <nl> ppp b / servers / visual / rendering_device . h <nl> class RenderingDevice : public Object { <nl> virtual void draw_list_bind_uniform_set ( ID p_list , ID p_uniform_set , uint32_t p_index ) = 0 ; <nl> virtual void draw_list_bind_vertex_array ( ID p_list , ID p_vertex_array ) = 0 ; <nl> virtual void draw_list_bind_index_array ( ID p_list , ID p_index_array ) = 0 ; <nl> + virtual void draw_list_set_push_constant ( ID p_list , void * p_data , uint32_t p_data_size ) = 0 ; <nl> <nl> virtual void draw_list_draw ( ID p_list , bool p_use_indices , uint32_t p_instances = 1 ) = 0 ; <nl> <nl>
Added support for push constants
godotengine/godot
1522d8c3ee6ddf43267f124940f4e43612058407
2020-02-11T10:53:26Z
mmm a / hphp / hack / src / errors / errors . ml <nl> ppp b / hphp / hack / src / errors / errors . ml <nl> let mutable_in_nonreactive_context pos = <nl> let invalid_argument_of_rx_mutable_function pos = <nl> add ( Typing . err_code Typing . InvalidArgumentOfRxMutableFunction ) pos ( <nl> " Single argument to \ \ HH \ \ Rx \ \ mutable should be an expression that yields new \ <nl> - mutably - owned value , like ' new A ( ) ' or ' f ( ) ' where f is function \ <nl> + mutably - owned value , like ' new A ( ) ' , Hack collection literal or ' f ( ) ' where f is function \ <nl> annotated with < < __MutableReturn > > attribute . " <nl> ) <nl> <nl> mmm a / hphp / hack / src / typing / typing_mutability . ml <nl> ppp b / hphp / hack / src / typing / typing_mutability . ml <nl> let rec expr_returns_owned_mutable <nl> ( env : Typing_env . env ) ( e : T . expr ) <nl> : bool = <nl> match snd e with <nl> - | T . New _ - > true <nl> + | T . New _ <nl> + | T . KeyValCollection ( ( ` Map | ` ImmMap ) , _ ) <nl> + | T . ValCollection ( ( ` Vector | ` ImmVector | ` Set | ` ImmSet ) , _ ) <nl> + | T . Pair _ - > <nl> + true <nl> ( * Function call * ) <nl> | T . Call ( _ , ( _ , T . Id id ) , _ , _ , _ ) <nl> | T . Call ( _ , ( _ , T . Fun_id id ) , _ , _ , _ ) - > <nl> mmm a / hphp / hack / test / typecheck / reactive / rx_mutable1 . php . exp <nl> ppp b / hphp / hack / test / typecheck / reactive / rx_mutable1 . php . exp <nl> <nl> File " rx_mutable1 . php " , line 6 , characters 8 - 25 : <nl> - Single argument to \ HH \ Rx \ mutable should be an expression that yields new mutably - owned value , like ' new A ( ) ' or ' f ( ) ' where f is function annotated with < < __MutableReturn > > attribute . ( Typing [ 4243 ] ) <nl> + Single argument to \ HH \ Rx \ mutable should be an expression that yields new mutably - owned value , like ' new A ( ) ' , Hack collection literal or ' f ( ) ' where f is function annotated with < < __MutableReturn > > attribute . ( Typing [ 4243 ] ) <nl> mmm a / hphp / hack / test / typecheck / reactive / rx_mutable2 . php . exp <nl> ppp b / hphp / hack / test / typecheck / reactive / rx_mutable2 . php . exp <nl> <nl> File " rx_mutable2 . php " , line 14 , characters 8 - 26 : <nl> - Single argument to \ HH \ Rx \ mutable should be an expression that yields new mutably - owned value , like ' new A ( ) ' or ' f ( ) ' where f is function annotated with < < __MutableReturn > > attribute . ( Typing [ 4243 ] ) <nl> + Single argument to \ HH \ Rx \ mutable should be an expression that yields new mutably - owned value , like ' new A ( ) ' , Hack collection literal or ' f ( ) ' where f is function annotated with < < __MutableReturn > > attribute . ( Typing [ 4243 ] ) <nl> new file mode 100644 <nl> index 00000000000 . . 06c2b7de53e <nl> mmm / dev / null <nl> ppp b / hphp / hack / test / typecheck / reactive / rx_mutable8 . php <nl> <nl> + < ? hh / / strict <nl> + <nl> + < < __Rx > > <nl> + function f ( ) : void { <nl> + $ a = HH \ Rx \ mutable ( Map { } ) ; <nl> + $ b = HH \ Rx \ mutable ( ImmMap { } ) ; <nl> + $ c = HH \ Rx \ mutable ( Set { } ) ; <nl> + $ d = HH \ Rx \ mutable ( Vector { } ) ; <nl> + $ e = HH \ Rx \ mutable ( ImmVector { } ) ; <nl> + $ f = HH \ Rx \ mutable ( ImmSet { } ) ; <nl> + $ g = HH \ Rx \ mutable ( Pair { 1 , 2 } ) ; <nl> + } <nl> new file mode 100644 <nl> index 00000000000 . . 4269126fceb <nl> mmm / dev / null <nl> ppp b / hphp / hack / test / typecheck / reactive / rx_mutable8 . php . exp <nl> @ @ - 0 , 0 + 1 @ @ <nl> + No errors <nl>
Allow collection literals are arguments to HH \ Rx \ mutable
facebook/hhvm
39faf1528e93958e777fc837bd1edd0a00796d5b
2018-06-14T21:06:41Z
mmm a / tensorflow / python / ops / numpy_ops / np_array_ops . py <nl> ppp b / tensorflow / python / ops / numpy_ops / np_array_ops . py <nl> <nl> from tensorflow . python . ops import sort_ops <nl> from tensorflow . python . ops . numpy_ops import np_arrays <nl> from tensorflow . python . ops . numpy_ops import np_dtypes <nl> + from tensorflow . python . ops . numpy_ops import np_export <nl> from tensorflow . python . ops . numpy_ops import np_utils <nl> from tensorflow . python . util import nest <nl> <nl> <nl> + newaxis = np_export . np_export_constant ( __name__ , ' newaxis ' , np . newaxis ) <nl> + <nl> + <nl> @ np_utils . np_doc ( ' empty ' ) <nl> def empty ( shape , dtype = float ) : # pylint : disable = redefined - outer - name <nl> return zeros ( shape , dtype ) <nl> mmm a / third_party / py / numpy / tf_numpy_api / tensorflow . experimental . numpy . pbtxt <nl> ppp b / third_party / py / numpy / tf_numpy_api / tensorflow . experimental . numpy . pbtxt <nl> tf_module { <nl> name : " ndarray " <nl> mtype : " < type \ ' type \ ' > " <nl> } <nl> + member { <nl> + name : " newaxis " <nl> + mtype : " < type \ ' NoneType \ ' > " <nl> + } <nl> member { <nl> name : " object_ " <nl> mtype : " < type \ ' type \ ' > " <nl>
[ TF - numpy ] Exports ` np . newaxis ` .
tensorflow/tensorflow
90da05cd1c07b0c84e102944a9a634127ecdc52b
2020-07-17T01:19:35Z
mmm a / android / sdk / src / main / java / com / taobao / weex / WXSDKInstance . java <nl> ppp b / android / sdk / src / main / java / com / taobao / weex / WXSDKInstance . java <nl> public void run ( ) { <nl> if ( mRenderListener ! = null & & mContext ! = null ) { <nl> mGodCom = component ; <nl> onViewAppear ( ) ; <nl> - View wxView = component . getView ( ) ; <nl> + View wxView = component . getHostView ( ) ; <nl> if ( WXEnvironment . isApkDebugable ( ) & & WXSDKManager . getInstance ( ) . getIWXDebugAdapter ( ) ! = null ) { <nl> wxView = WXSDKManager . getInstance ( ) . getIWXDebugAdapter ( ) . wrapContainer ( WXSDKInstance . this , wxView ) ; <nl> } <nl> private void destroyView ( View rootView ) { <nl> public void destroy ( ) { <nl> WXSDKManager . getInstance ( ) . destroyInstance ( mInstanceId ) ; <nl> <nl> - if ( mGodCom ! = null & & mGodCom . getView ( ) ! = null ) { <nl> + if ( mGodCom ! = null & & mGodCom . getHostView ( ) ! = null ) { <nl> mGodCom . destroy ( ) ; <nl> - destroyView ( mGodCom . getView ( ) ) ; <nl> + destroyView ( mGodCom . getHostView ( ) ) ; <nl> mGodCom = null ; <nl> } <nl> <nl> mmm a / android / sdk / src / main / java / com / taobao / weex / ui / WXRenderStatement . java <nl> ppp b / android / sdk / src / main / java / com / taobao / weex / ui / WXRenderStatement . java <nl> WXComponent createBodyOnDomThread ( WXDomObject dom ) { <nl> / / TODO error callback <nl> return null ; <nl> } <nl> - FrameLayout frameLayout = ( FrameLayout ) mGodComponent . getView ( ) ; <nl> + FrameLayout frameLayout = ( FrameLayout ) mGodComponent . getHostView ( ) ; <nl> ViewGroup . LayoutParams layoutParams = new LayoutParams ( LayoutParams . WRAP_CONTENT , LayoutParams . WRAP_CONTENT ) ; <nl> frameLayout . setLayoutParams ( layoutParams ) ; <nl> frameLayout . setBackgroundColor ( Color . TRANSPARENT ) ; <nl> mmm a / android / sdk / src / main / java / com / taobao / weex / ui / animation / WXAnimationModule . java <nl> ppp b / android / sdk / src / main / java / com / taobao / weex / ui / animation / WXAnimationModule . java <nl> public void transition ( @ Nullable String ref , @ Nullable String animation , @ Nullab <nl> public static void startAnimation ( WXSDKInstance mWXSDKInstance , WXComponent component , <nl> @ NonNull WXAnimationBean animationBean , @ Nullable String callback ) { <nl> try { <nl> - Animator animator = createAnimator ( animationBean , component . getView ( ) ) ; <nl> + Animator animator = createAnimator ( animationBean , component . getHostView ( ) ) ; <nl> if ( animator ! = null ) { <nl> Animator . AnimatorListener animatorCallback = createAnimatorListener ( mWXSDKInstance , callback ) ; <nl> - if ( Build . VERSION . SDK_INT < Build . VERSION_CODES . JELLY_BEAN_MR2 ) { <nl> - component . getView ( ) . setLayerType ( View . LAYER_TYPE_HARDWARE , null ) ; <nl> - } <nl> + component . getHostView ( ) . setLayerType ( View . LAYER_TYPE_HARDWARE , null ) ; <nl> Interpolator interpolator = createTimeInterpolator ( animationBean ) ; <nl> if ( animatorCallback ! = null ) { <nl> animator . addListener ( animatorCallback ) ; <nl> mmm a / android / sdk / src / main / java / com / taobao / weex / ui / component / AbstractEditComponent . java <nl> ppp b / android / sdk / src / main / java / com / taobao / weex / ui / component / AbstractEditComponent . java <nl> public void setSingleLine ( boolean singleLine ) { <nl> <nl> @ WXComponentProp ( name = WXDomPropConstant . WX_ATTR_INPUT_LINES ) <nl> public void setLines ( int lines ) { <nl> - if ( getView ( ) = = null ) { <nl> + if ( getHostView ( ) = = null ) { <nl> return ; <nl> } <nl> - getView ( ) . setLines ( lines ) ; <nl> + getHostView ( ) . setLines ( lines ) ; <nl> } <nl> <nl> @ WXComponentProp ( name = WXDomPropConstant . WX_ATTR_INPUT_MAXLENGTH ) <nl> public void setMaxLength ( int maxLength ) { <nl> - if ( getView ( ) = = null ) { <nl> + if ( getHostView ( ) = = null ) { <nl> return ; <nl> } <nl> - getView ( ) . setFilters ( new InputFilter [ ] { new InputFilter . LengthFilter ( maxLength ) } ) ; <nl> + getHostView ( ) . setFilters ( new InputFilter [ ] { new InputFilter . LengthFilter ( maxLength ) } ) ; <nl> } <nl> <nl> @ Override <nl> private int getInputType ( String type ) { <nl> break ; <nl> case WXDomPropConstant . WX_ATTR_INPUT_TYPE_PASSWORD : <nl> inputType = InputType . TYPE_CLASS_TEXT | InputType . TYPE_TEXT_VARIATION_PASSWORD ; <nl> - getView ( ) . setTransformationMethod ( PasswordTransformationMethod . getInstance ( ) ) ; <nl> + getHostView ( ) . setTransformationMethod ( PasswordTransformationMethod . getInstance ( ) ) ; <nl> break ; <nl> case WXDomPropConstant . WX_ATTR_INPUT_TYPE_TEL : <nl> inputType = InputType . TYPE_CLASS_PHONE ; <nl> mmm a / android / sdk / src / main / java / com / taobao / weex / ui / component / Textarea . java <nl> ppp b / android / sdk / src / main / java / com / taobao / weex / ui / component / Textarea . java <nl> <nl> package com . taobao . weex . ui . component ; <nl> <nl> import android . text . TextUtils ; <nl> - import android . widget . EditText ; <nl> import com . taobao . weex . WXSDKInstance ; <nl> import com . taobao . weex . common . WXDomPropConstant ; <nl> import com . taobao . weex . dom . TextAreaEditTextDomObject ; <nl> protected void appleStyleAfterCreated ( WXEditText editText ) { <nl> <nl> @ WXComponentProp ( name = WXDomPropConstant . WX_ATTR_TEXTAREA_ROWS ) <nl> public void setRows ( int rows ) { <nl> - WXEditText text = getView ( ) ; <nl> + WXEditText text = getHostView ( ) ; <nl> if ( text = = null | | rows < = 0 ) { <nl> return ; <nl> } <nl> mmm a / android / sdk / src / main / java / com / taobao / weex / ui / component / WXComponent . java <nl> ppp b / android / sdk / src / main / java / com / taobao / weex / ui / component / WXComponent . java <nl> protected void onHostViewInitialized ( T host ) { <nl> <nl> } <nl> <nl> - public T getView ( ) { <nl> + public T getHostView ( ) { <nl> + return mHost ; <nl> + } <nl> + <nl> + / * * <nl> + * use { @ link # getHostView ( ) } instead <nl> + * @ return <nl> + * / <nl> + @ Deprecated <nl> + public View getView ( ) { <nl> return mHost ; <nl> } <nl> <nl> public void destroy ( ) { <nl> <nl> / * * <nl> * Detach view from its component . Components , <nl> - * which have difference between getView and getRealView or have temp calculation results , <nl> + * which have difference between getHostView and getRealView or have temp calculation results , <nl> * must < strong > override < / strong > this method with their own implementation . <nl> * <nl> * @ return the original View <nl> mmm a / android / sdk / src / main / java / com / taobao / weex / ui / component / WXEmbed . java <nl> ppp b / android / sdk / src / main / java / com / taobao / weex / ui / component / WXEmbed . java <nl> private WXSDKInstance createInstance ( ) { <nl> sdkInstance . registerRenderListener ( new IWXRenderListener ( ) { <nl> @ Override <nl> public void onViewCreated ( WXSDKInstance instance , View view ) { <nl> - getView ( ) . removeAllViews ( ) ; <nl> - getView ( ) . addView ( view ) ; <nl> + getHostView ( ) . removeAllViews ( ) ; <nl> + getHostView ( ) . addView ( view ) ; <nl> } <nl> <nl> @ Override <nl> public void onClick ( View v ) { <nl> WXEmbed . this . instance = createInstance ( ) ; <nl> } <nl> } ) ; <nl> - getView ( ) . removeAllViews ( ) ; <nl> - getView ( ) . addView ( imageView ) ; <nl> + getHostView ( ) . removeAllViews ( ) ; <nl> + getHostView ( ) . addView ( imageView ) ; <nl> WXLogUtils . e ( " WXEmbed " , " NetWork failure : " + errCode + " , \ n error message : " + msg ) ; <nl> } <nl> } <nl> } ) ; <nl> - ViewGroup . LayoutParams layoutParams = getView ( ) . getLayoutParams ( ) ; <nl> + ViewGroup . LayoutParams layoutParams = getHostView ( ) . getLayoutParams ( ) ; <nl> sdkInstance . renderByUrl ( WXPerformance . DEFAULT , <nl> src , <nl> null , null , layoutParams . width , <nl> mmm a / android / sdk / src / main / java / com / taobao / weex / ui / component / WXImage . java <nl> ppp b / android / sdk / src / main / java / com / taobao / weex / ui / component / WXImage . java <nl> <nl> <nl> import android . content . Context ; <nl> import android . text . TextUtils ; <nl> - import android . view . View ; <nl> import android . widget . ImageView ; <nl> import android . widget . ImageView . ScaleType ; <nl> <nl> public void setBorderColor ( String borderColor ) { <nl> <nl> @ WXComponentProp ( name = WXDomPropConstant . WX_RESIZE_MODE ) <nl> public void setResizeMode ( String resizeMode ) { <nl> - ( ( ImageView ) getView ( ) ) . setScaleType ( getResizeMode ( resizeMode ) ) ; <nl> + ( ( ImageView ) getHostView ( ) ) . setScaleType ( getResizeMode ( resizeMode ) ) ; <nl> } <nl> <nl> private ScaleType getResizeMode ( String resizeMode ) { <nl> private ScaleType getResizeMode ( String resizeMode ) { <nl> <nl> @ WXComponentProp ( name = WXDomPropConstant . WX_RESIZE ) <nl> public void setResize ( String resize ) { <nl> - ( ( ImageView ) getView ( ) ) . setScaleType ( getResizeMode ( resize ) ) ; <nl> + ( ( ImageView ) getHostView ( ) ) . setScaleType ( getResizeMode ( resize ) ) ; <nl> } <nl> <nl> @ WXComponentProp ( name = WXDomPropConstant . WX_ATTR_SRC ) <nl> public void setSrc ( String src ) { <nl> <nl> IWXImgLoaderAdapter imgLoaderAdapter = mInstance . getImgLoaderAdapter ( ) ; <nl> if ( imgLoaderAdapter ! = null ) { <nl> - imgLoaderAdapter . setImage ( src , ( ( ImageView ) getView ( ) ) , <nl> + imgLoaderAdapter . setImage ( src , ( ( ImageView ) getHostView ( ) ) , <nl> mDomObj . attr . getImageQuality ( ) , imageStrategy ) ; <nl> } <nl> } <nl> mmm a / android / sdk / src / main / java / com / taobao / weex / ui / component / WXIndicator . java <nl> ppp b / android / sdk / src / main / java / com / taobao / weex / ui / component / WXIndicator . java <nl> public void setItemColor ( String itemColor ) { <nl> if ( ! TextUtils . isEmpty ( itemColor ) ) { <nl> int colorInt = WXResourceUtils . getColor ( itemColor ) ; <nl> if ( colorInt ! = Integer . MIN_VALUE ) { <nl> - getView ( ) . setPageColor ( colorInt ) ; <nl> - getView ( ) . forceLayout ( ) ; <nl> - getView ( ) . requestLayout ( ) ; <nl> + getHostView ( ) . setPageColor ( colorInt ) ; <nl> + getHostView ( ) . forceLayout ( ) ; <nl> + getHostView ( ) . requestLayout ( ) ; <nl> } <nl> } <nl> } <nl> public void setItemSelectedColor ( String itemSelectedColor ) { <nl> if ( ! TextUtils . isEmpty ( itemSelectedColor ) ) { <nl> int colorInt = WXResourceUtils . getColor ( itemSelectedColor ) ; <nl> if ( colorInt ! = Integer . MIN_VALUE ) { <nl> - getView ( ) . setFillColor ( colorInt ) ; <nl> - getView ( ) . forceLayout ( ) ; <nl> - getView ( ) . requestLayout ( ) ; <nl> + getHostView ( ) . setFillColor ( colorInt ) ; <nl> + getHostView ( ) . forceLayout ( ) ; <nl> + getHostView ( ) . requestLayout ( ) ; <nl> } <nl> } <nl> } <nl> public void setItemSize ( int itemSize ) { <nl> if ( itemSize < 0 ) { <nl> return ; <nl> } <nl> - getView ( ) . setRadius ( WXViewUtils . getRealPxByWidth ( itemSize ) / 2 . 0f ) ; <nl> - getView ( ) . forceLayout ( ) ; <nl> - getView ( ) . requestLayout ( ) ; <nl> + getHostView ( ) . setRadius ( WXViewUtils . getRealPxByWidth ( itemSize ) / 2 . 0f ) ; <nl> + getHostView ( ) . forceLayout ( ) ; <nl> + getHostView ( ) . requestLayout ( ) ; <nl> } <nl> <nl> public void setShowIndicators ( boolean show ) { <nl> mmm a / android / sdk / src / main / java / com / taobao / weex / ui / component / WXLoading . java <nl> ppp b / android / sdk / src / main / java / com / taobao / weex / ui / component / WXLoading . java <nl> public void setDisplay ( String display ) { <nl> if ( ! TextUtils . isEmpty ( display ) ) { <nl> if ( display . equals ( " hide " ) ) { <nl> if ( getParent ( ) instanceof WXListComponent | | getParent ( ) instanceof WXScroller ) { <nl> - if ( ( ( BaseBounceView ) getParent ( ) . getView ( ) ) . getSwipeLayout ( ) . isRefreshing ( ) ) { <nl> - ( ( BaseBounceView ) getParent ( ) . getView ( ) ) . finishPullLoad ( ) ; <nl> - ( ( BaseBounceView ) getParent ( ) . getView ( ) ) . onLoadmoreComplete ( ) ; <nl> + if ( ( ( BaseBounceView ) getParent ( ) . getHostView ( ) ) . getSwipeLayout ( ) . isRefreshing ( ) ) { <nl> + ( ( BaseBounceView ) getParent ( ) . getHostView ( ) ) . finishPullLoad ( ) ; <nl> + ( ( BaseBounceView ) getParent ( ) . getHostView ( ) ) . onLoadmoreComplete ( ) ; <nl> } <nl> } <nl> } <nl> mmm a / android / sdk / src / main / java / com / taobao / weex / ui / component / WXLoadingIndicator . java <nl> ppp b / android / sdk / src / main / java / com / taobao / weex / ui / component / WXLoadingIndicator . java <nl> protected CircleProgressBar initComponentHostView ( Context context ) { <nl> public void setColor ( String color ) { <nl> if ( color ! = null & & ! color . equals ( " " ) ) { <nl> int parseColor = WXResourceUtils . getColor ( color , Color . RED ) ; <nl> - getView ( ) . setColorSchemeColors ( parseColor ) ; <nl> + getHostView ( ) . setColorSchemeColors ( parseColor ) ; <nl> } <nl> } <nl> } <nl> mmm a / android / sdk / src / main / java / com / taobao / weex / ui / component / WXRefresh . java <nl> ppp b / android / sdk / src / main / java / com / taobao / weex / ui / component / WXRefresh . java <nl> public void setDisplay ( String display ) { <nl> if ( ! TextUtils . isEmpty ( display ) ) { <nl> if ( display . equals ( " hide " ) ) { <nl> if ( getParent ( ) instanceof WXListComponent | | getParent ( ) instanceof WXScroller ) { <nl> - if ( ( ( BaseBounceView ) getParent ( ) . getView ( ) ) . getSwipeLayout ( ) . isRefreshing ( ) ) { <nl> - ( ( BaseBounceView ) getParent ( ) . getView ( ) ) . finishPullRefresh ( ) ; <nl> - ( ( BaseBounceView ) getParent ( ) . getView ( ) ) . onRefreshingComplete ( ) ; <nl> + if ( ( ( BaseBounceView ) getParent ( ) . getHostView ( ) ) . getSwipeLayout ( ) . isRefreshing ( ) ) { <nl> + ( ( BaseBounceView ) getParent ( ) . getHostView ( ) ) . finishPullRefresh ( ) ; <nl> + ( ( BaseBounceView ) getParent ( ) . getHostView ( ) ) . onRefreshingComplete ( ) ; <nl> } <nl> } <nl> } <nl> mmm a / android / sdk / src / main / java / com / taobao / weex / ui / component / WXScroller . java <nl> ppp b / android / sdk / src / main / java / com / taobao / weex / ui / component / WXScroller . java <nl> public ViewGroup getInnerView ( ) { <nl> if ( mHost instanceof BounceScrollerView ) { <nl> return ( ( BounceScrollerView ) mHost ) . getInnerView ( ) ; <nl> } else { <nl> - return getView ( ) ; <nl> + return getHostView ( ) ; <nl> } <nl> } <nl> <nl> private void checkRefreshOrLoading ( WXComponent child ) { <nl> Runnable runnable = new Runnable ( ) { <nl> @ Override <nl> public void run ( ) { <nl> - ( ( BaseBounceView ) mHost ) . setHeaderView ( temp . getView ( ) ) ; <nl> + ( ( BaseBounceView ) mHost ) . setHeaderView ( temp . getHostView ( ) ) ; <nl> } <nl> } ; <nl> handler . postDelayed ( runnable , 100 ) ; <nl> public void run ( ) { <nl> Runnable runnable = new Runnable ( ) { <nl> @ Override <nl> public void run ( ) { <nl> - ( ( BaseBounceView ) mHost ) . setFooterView ( temp . getView ( ) ) ; <nl> + ( ( BaseBounceView ) mHost ) . setFooterView ( temp . getHostView ( ) ) ; <nl> } <nl> } ; <nl> handler . postDelayed ( runnable , 100 ) ; <nl> private void procAppear ( View scrollView , int x , int y , int oldx , <nl> while ( iterator . hasNext ( ) ) { <nl> entry = iterator . next ( ) ; <nl> appearData = entry . getValue ( ) ; <nl> - if ( ! appearData . mAppear & & appearData . mAppearComponent . getView ( ) . getLocalVisibleRect ( mScrollRect ) ) { <nl> + if ( ! appearData . mAppear & & appearData . mAppearComponent . getHostView ( ) . getLocalVisibleRect ( mScrollRect ) ) { <nl> appearData . mAppear = true ; <nl> if ( appearData . hasAppear ) { <nl> Map < String , Object > params = new HashMap < > ( ) ; <nl> private void procAppear ( View scrollView , int x , int y , int oldx , <nl> WXSDKManager . getInstance ( ) . fireEvent ( mInstanceId , appearData . mAppearComponent . getRef ( ) , WXEventType . APPEAR , params ) ; <nl> } <nl> <nl> - } else if ( appearData . mAppear & & ! appearData . mAppearComponent . getView ( ) . getLocalVisibleRect ( mScrollRect ) ) { <nl> + } else if ( appearData . mAppear & & ! appearData . mAppearComponent . getHostView ( ) . getLocalVisibleRect ( mScrollRect ) ) { <nl> appearData . mAppear = false ; <nl> if ( appearData . hasDisappear ) { <nl> Map < String , Object > params = new HashMap < > ( ) ; <nl> mmm a / android / sdk / src / main / java / com / taobao / weex / ui / component / WXSlider . java <nl> ppp b / android / sdk / src / main / java / com / taobao / weex / ui / component / WXSlider . java <nl> <nl> import com . taobao . weex . ui . view . WXCircleIndicator ; <nl> import com . taobao . weex . ui . view . WXCirclePageAdapter ; <nl> import com . taobao . weex . ui . view . WXCircleViewPager ; <nl> - import com . taobao . weex . ui . view . WXEditText ; <nl> import com . taobao . weex . utils . WXLogUtils ; <nl> import com . taobao . weex . utils . WXViewUtils ; <nl> <nl> protected void addSubView ( View view , int index ) { <nl> mAdapter . addPageView ( view ) ; <nl> mAdapter . notifyDataSetChanged ( ) ; <nl> if ( mIndicator ! = null ) { <nl> - mIndicator . getView ( ) . forceLayout ( ) ; <nl> - mIndicator . getView ( ) . requestLayout ( ) ; <nl> + mIndicator . getHostView ( ) . forceLayout ( ) ; <nl> + mIndicator . getHostView ( ) . requestLayout ( ) ; <nl> } <nl> } <nl> <nl> public void remove ( WXComponent child ) { <nl> <nl> @ Override <nl> public void remove ( WXComponent child , boolean destroy ) { <nl> - if ( child = = null | | child . getView ( ) = = null | | mAdapter = = null ) { <nl> + if ( child = = null | | child . getHostView ( ) = = null | | mAdapter = = null ) { <nl> return ; <nl> } <nl> <nl> - mAdapter . removePageView ( child . getView ( ) ) ; <nl> + mAdapter . removePageView ( child . getHostView ( ) ) ; <nl> mAdapter . notifyDataSetChanged ( ) ; <nl> } <nl> <nl> public void onActivityStop ( ) { <nl> } <nl> <nl> public void addIndicator ( WXIndicator indicator ) { <nl> - FrameLayout root = getView ( ) ; <nl> + FrameLayout root = getHostView ( ) ; <nl> if ( root = = null ) { <nl> return ; <nl> } <nl> mIndicator = indicator ; <nl> - WXCircleIndicator indicatorView = indicator . getView ( ) ; <nl> + WXCircleIndicator indicatorView = indicator . getHostView ( ) ; <nl> if ( indicatorView ! = null ) { <nl> indicatorView . setCircleViewPager ( mViewPager ) ; <nl> indicatorView . setOnPageChangeListener ( this ) ; <nl> mmm a / android / sdk / src / main / java / com / taobao / weex / ui / component / WXSwitch . java <nl> ppp b / android / sdk / src / main / java / com / taobao / weex / ui / component / WXSwitch . java <nl> protected WXSwitchView initComponentHostView ( Context context ) { <nl> @ Override <nl> public void addEvent ( String type ) { <nl> super . addEvent ( type ) ; <nl> - if ( type ! = null & & type . equals ( WXEventType . CHANGE ) & & getView ( ) ! = null ) { <nl> - getView ( ) . setOnCheckedChangeListener ( new CompoundButton . OnCheckedChangeListener ( ) { <nl> + if ( type ! = null & & type . equals ( WXEventType . CHANGE ) & & getHostView ( ) ! = null ) { <nl> + getHostView ( ) . setOnCheckedChangeListener ( new CompoundButton . OnCheckedChangeListener ( ) { <nl> @ Override <nl> public void onCheckedChanged ( CompoundButton buttonView , boolean isChecked ) { <nl> Map < String , Object > params = new HashMap < > ( 2 ) ; <nl> public void onCheckedChanged ( CompoundButton buttonView , boolean isChecked ) { <nl> @ Override <nl> protected void removeEventFromView ( String type ) { <nl> super . removeEventFromView ( type ) ; <nl> - if ( getView ( ) ! = null ) { <nl> - getView ( ) . setOnCheckedChangeListener ( null ) ; <nl> + if ( getHostView ( ) ! = null ) { <nl> + getHostView ( ) . setOnCheckedChangeListener ( null ) ; <nl> } <nl> } <nl> <nl> @ WXComponentProp ( name = WXDomPropConstant . WX_ATTR_SWITCH_CHECKED ) <nl> public void setChecked ( boolean checked ) { <nl> - getView ( ) . setChecked ( checked ) ; <nl> + getHostView ( ) . setChecked ( checked ) ; <nl> } <nl> } <nl> mmm a / android / sdk / src / main / java / com / taobao / weex / ui / component / WXText . java <nl> ppp b / android / sdk / src / main / java / com / taobao / weex / ui / component / WXText . java <nl> <nl> <nl> import android . content . Context ; <nl> import android . text . Layout ; <nl> - import android . view . View ; <nl> import android . view . ViewGroup ; <nl> <nl> import com . taobao . weex . WXSDKInstance ; <nl> protected WXTextView initComponentHostView ( Context context ) { <nl> @ Override <nl> public void updateExtra ( Object extra ) { <nl> if ( extra instanceof Layout & & <nl> - getView ( ) ! = null & & ! extra . equals ( getView ( ) . getTextLayout ( ) ) ) { <nl> + getHostView ( ) ! = null & & ! extra . equals ( getHostView ( ) . getTextLayout ( ) ) ) { <nl> final Layout layout = ( Layout ) extra ; <nl> - getView ( ) . setTextLayout ( layout ) ; <nl> - getView ( ) . invalidate ( ) ; <nl> + getHostView ( ) . setTextLayout ( layout ) ; <nl> + getHostView ( ) . invalidate ( ) ; <nl> } <nl> } <nl> <nl> public void updateExtra ( Object extra ) { <nl> * / <nl> private void flushView ( Object extra ) { <nl> if ( extra instanceof Layout & & <nl> - getView ( ) ! = null & & ! extra . equals ( getView ( ) . getTextLayout ( ) ) ) { <nl> + getHostView ( ) ! = null & & ! extra . equals ( getHostView ( ) . getTextLayout ( ) ) ) { <nl> final Layout layout = ( Layout ) extra ; <nl> / * * The following if block change the height of the width of the textView . <nl> * other part of the code is the same to updateExtra <nl> * / <nl> - ViewGroup . LayoutParams layoutParams = getView ( ) . getLayoutParams ( ) ; <nl> + ViewGroup . LayoutParams layoutParams = getHostView ( ) . getLayoutParams ( ) ; <nl> if ( layoutParams ! = null ) { <nl> layoutParams . height = layout . getHeight ( ) ; <nl> layoutParams . width = layout . getWidth ( ) ; <nl> - getView ( ) . setLayoutParams ( layoutParams ) ; <nl> + getHostView ( ) . setLayoutParams ( layoutParams ) ; <nl> } <nl> - getView ( ) . setTextLayout ( layout ) ; <nl> - getView ( ) . invalidate ( ) ; <nl> + getHostView ( ) . setTextLayout ( layout ) ; <nl> + getHostView ( ) . invalidate ( ) ; <nl> } <nl> } <nl> } <nl> mmm a / android / sdk / src / main / java / com / taobao / weex / ui / component / WXVContainer . java <nl> ppp b / android / sdk / src / main / java / com / taobao / weex / ui / component / WXVContainer . java <nl> <nl> <nl> import com . taobao . weex . WXSDKInstance ; <nl> import com . taobao . weex . dom . WXDomObject ; <nl> - import com . taobao . weex . ui . view . WXFrameLayout ; <nl> <nl> import java . util . ArrayList ; <nl> <nl> public WXVContainer ( WXSDKInstance instance , WXDomObject node , WXVContainer paren <nl> super ( instance , node , parent , lazy ) ; <nl> } <nl> <nl> + / * * <nl> + * use { @ link # getHostView ( ) } instead <nl> + * @ return <nl> + * / <nl> + @ Deprecated <nl> + public ViewGroup getView ( ) { <nl> + return mHost ; <nl> + } <nl> <nl> @ Override <nl> public void applyLayoutAndEvent ( WXComponent component ) { <nl> public void createViewImpl ( WXVContainer parent , int index ) { <nl> for ( int i = 0 ; i < count ; + + i ) { <nl> getChild ( i ) . createViewImpl ( this , i ) ; <nl> } <nl> - if ( getView ( ) ! = null ) { <nl> - getView ( ) . setClipToPadding ( false ) ; <nl> + if ( getHostView ( ) ! = null ) { <nl> + getHostView ( ) . setClipToPadding ( false ) ; <nl> } <nl> } <nl> <nl> public void remove ( WXComponent child , boolean destroy ) { <nl> if ( mInstance ! = null <nl> & & mInstance . getRootView ( ) ! = null <nl> & & child . mDomObj . isFixed ( ) ) { <nl> - mInstance . getRootView ( ) . removeView ( child . getView ( ) ) ; <nl> + mInstance . getRootView ( ) . removeView ( child . getHostView ( ) ) ; <nl> } else if ( getRealView ( ) ! = null ) { <nl> - getRealView ( ) . removeView ( child . getView ( ) ) ; <nl> + getRealView ( ) . removeView ( child . getHostView ( ) ) ; <nl> } <nl> if ( destroy ) { <nl> child . destroy ( ) ; <nl> public void remove ( WXComponent child , boolean destroy ) { <nl> @ Override <nl> public void notifyAppearStateChange ( String wxEventType , String direction ) { <nl> super . notifyAppearStateChange ( wxEventType , direction ) ; <nl> - if ( getView ( ) = = null | | mChildren = = null ) { <nl> + if ( getHostView ( ) = = null | | mChildren = = null ) { <nl> return ; <nl> } <nl> for ( WXComponent component : mChildren ) { <nl> - if ( component . getView ( ) ! = null & & ! ( component . getView ( ) . getVisibility ( ) = = View . VISIBLE ) ) { <nl> + if ( component . getHostView ( ) ! = null & & ! ( component . getHostView ( ) . getVisibility ( ) = = View . VISIBLE ) ) { <nl> wxEventType = WXEventType . DISAPPEAR ; <nl> } <nl> component . notifyAppearStateChange ( wxEventType , direction ) ; <nl> mmm a / android / sdk / src / main / java / com / taobao / weex / ui / component / WXVideo . java <nl> ppp b / android / sdk / src / main / java / com / taobao / weex / ui / component / WXVideo . java <nl> public void destroy ( ) { <nl> <nl> @ WXComponentProp ( name = " src " ) <nl> public void setSrc ( String src ) { <nl> - if ( TextUtils . isEmpty ( src ) | | getView ( ) = = null ) { <nl> + if ( TextUtils . isEmpty ( src ) | | getHostView ( ) = = null ) { <nl> return ; <nl> } <nl> <nl> mmm a / android / sdk / src / main / java / com / taobao / weex / ui / component / list / WXCell . java <nl> ppp b / android / sdk / src / main / java / com / taobao / weex / ui / component / list / WXCell . java <nl> public ViewGroup getRealView ( ) { <nl> } <nl> <nl> public void removeSticky ( ) { <nl> - mHeadView = getView ( ) . getChildAt ( 0 ) ; <nl> + mHeadView = getHostView ( ) . getChildAt ( 0 ) ; <nl> int [ ] location = new int [ 2 ] ; <nl> int [ ] parentLocation = new int [ 2 ] ; <nl> - getView ( ) . getLocationOnScreen ( location ) ; <nl> + getHostView ( ) . getLocationOnScreen ( location ) ; <nl> getParentScroller ( ) . getView ( ) . getLocationOnScreen ( parentLocation ) ; <nl> int headerViewOffsetX = location [ 0 ] - parentLocation [ 0 ] ; <nl> - int headerViewOffsetY = getParent ( ) . getView ( ) . getTop ( ) ; <nl> - getView ( ) . removeView ( mHeadView ) ; <nl> + int headerViewOffsetY = getParent ( ) . getHostView ( ) . getTop ( ) ; <nl> + getHostView ( ) . removeView ( mHeadView ) ; <nl> mRealView = ( ViewGroup ) mHeadView ; <nl> mTempStickyView = new FrameLayout ( mContext ) ; <nl> FrameLayout . LayoutParams lp = new FrameLayout . LayoutParams ( ( int ) getDomObject ( ) . csslayout . dimensions [ CSSLayout . DIMENSION_WIDTH ] , <nl> ( int ) getDomObject ( ) . csslayout . dimensions [ CSSLayout . DIMENSION_HEIGHT ] ) ; <nl> - getView ( ) . addView ( mTempStickyView , lp ) ; <nl> + getHostView ( ) . addView ( mTempStickyView , lp ) ; <nl> mHeadView . setTranslationX ( headerViewOffsetX ) ; <nl> mHeadView . setTranslationY ( headerViewOffsetY ) ; <nl> } <nl> <nl> public void recoverySticky ( ) { <nl> - getView ( ) . removeView ( mTempStickyView ) ; <nl> - getView ( ) . addView ( mHeadView ) ; <nl> + getHostView ( ) . removeView ( mTempStickyView ) ; <nl> + getHostView ( ) . addView ( mHeadView ) ; <nl> mHeadView . setTranslationX ( 0 ) ; <nl> mHeadView . setTranslationY ( 0 ) ; <nl> } <nl> mmm a / android / sdk / src / main / java / com / taobao / weex / ui / component / list / WXListComponent . java <nl> ppp b / android / sdk / src / main / java / com / taobao / weex / ui / component / list / WXListComponent . java <nl> public void scrollTo ( WXComponent component , final int offset ) { <nl> @ Override <nl> public void run ( ) { <nl> if ( mOrientation = = VERTICAL ) { <nl> - int scrollY = cellComp . getView ( ) . getTop ( ) + offset ; <nl> + int scrollY = cellComp . getHostView ( ) . getTop ( ) + offset ; <nl> view . smoothScrollBy ( 0 , scrollY ) ; <nl> } else { <nl> - int scrollX = cellComp . getView ( ) . getLeft ( ) + offset ; <nl> + int scrollX = cellComp . getHostView ( ) . getLeft ( ) + offset ; <nl> view . smoothScrollBy ( scrollX , 0 ) ; <nl> } <nl> } <nl> public void checkLastSticky ( final int position ) { <nl> for ( int i = 0 ; i < = position ; i + + ) { <nl> WXComponent component = getChild ( i ) ; <nl> if ( component . isSticky ( ) & & component instanceof WXCell ) { <nl> - if ( component . getView ( ) = = null ) { <nl> + if ( component . getHostView ( ) = = null ) { <nl> return ; <nl> } <nl> bounceRecyclerView . notifyStickyShow ( ( WXCell ) component ) ; <nl> public void onBeforeScroll ( int dx , int dy ) { <nl> <nl> if ( stickyComponent ! = null & & stickyComponent . getDomObject ( ) ! = null <nl> & & stickyComponent instanceof WXCell ) { <nl> - if ( stickyComponent . getView ( ) = = null ) { <nl> + if ( stickyComponent . getHostView ( ) = = null ) { <nl> return ; <nl> } <nl> <nl> int [ ] location = new int [ 2 ] ; <nl> - stickyComponent . getView ( ) . getLocationOnScreen ( location ) ; <nl> + stickyComponent . getHostView ( ) . getLocationOnScreen ( location ) ; <nl> int [ ] parentLocation = new int [ 2 ] ; <nl> stickyComponent . getParentScroller ( ) . getView ( ) . getLocationOnScreen ( parentLocation ) ; <nl> <nl> public void addChild ( WXComponent child , int index ) { <nl> super . addChild ( child , index ) ; <nl> <nl> int adapterPosition = index = = - 1 ? mChildren . size ( ) - 1 : index ; <nl> - BounceRecyclerView view = getView ( ) ; <nl> + BounceRecyclerView view = getHostView ( ) ; <nl> if ( view ! = null ) { <nl> view . getAdapter ( ) . notifyItemInserted ( adapterPosition ) ; <nl> } <nl> private boolean hasAppearAndDisAppearEvent ( WXComponent child ) { <nl> * / <nl> @ Override <nl> protected void addSubView ( View child , int index ) { <nl> - BounceRecyclerView view = getView ( ) ; <nl> + BounceRecyclerView view = getHostView ( ) ; <nl> if ( view = = null ) { <nl> return ; <nl> } <nl> public void remove ( WXComponent child , boolean destroy ) { <nl> if ( destroy ) { <nl> child . detachViewAndClearPreInfo ( ) ; <nl> } <nl> - getView ( ) . getAdapter ( ) . notifyItemRemoved ( index ) ; <nl> + getHostView ( ) . getAdapter ( ) . notifyItemRemoved ( index ) ; <nl> if ( WXEnvironment . isApkDebugable ( ) ) { <nl> WXLogUtils . d ( TAG , " removeChild child at " + index ) ; <nl> } <nl> public void remove ( WXComponent child , boolean destroy ) { <nl> <nl> @ Override <nl> public void computeVisiblePointInViewCoordinate ( PointF pointF ) { <nl> - RecyclerView view = getView ( ) . getInnerView ( ) ; <nl> + RecyclerView view = getHostView ( ) . getInnerView ( ) ; <nl> pointF . set ( view . computeHorizontalScrollOffset ( ) , view . computeVerticalScrollOffset ( ) ) ; <nl> } <nl> <nl> public ListBaseViewHolder onCreateViewHolder ( ViewGroup parent , int viewType ) { <nl> continue ; <nl> if ( component instanceof WXRefresh ) { <nl> if ( getOrientation ( ) = = VERTICAL ) <nl> - bounceRecyclerView . setHeaderView ( component . getView ( ) ) ; <nl> + bounceRecyclerView . setHeaderView ( component . getHostView ( ) ) ; <nl> return createVHForWXRefresh ( component , viewType ) ; <nl> } else if ( component instanceof WXLoading ) { <nl> if ( getOrientation ( ) = = VERTICAL ) <nl> - bounceRecyclerView . setFooterView ( component . getView ( ) ) ; <nl> + bounceRecyclerView . setFooterView ( component . getHostView ( ) ) ; <nl> return createVHForWXLoading ( component , viewType ) ; <nl> } else if ( component . mDomObj ! = null & & component . mDomObj . isFixed ( ) ) { <nl> return createVHForFakeComponent ( viewType ) ; <nl> mmm a / android / sdk / src / main / java / com / taobao / weex / ui / view / WXScrollView . java <nl> ppp b / android / sdk / src / main / java / com / taobao / weex / ui / view / WXScrollView . java <nl> private View procSticky ( Map < String , HashMap < String , WXComponent > > mStickyMap ) { <nl> stickyData = entry . getValue ( ) ; <nl> <nl> getLocationOnScreen ( stickyScrollerP ) ; <nl> - stickyData . getView ( ) . getLocationOnScreen ( stickyViewP ) ; <nl> + stickyData . getHostView ( ) . getLocationOnScreen ( stickyViewP ) ; <nl> int parentH = 0 ; <nl> if ( stickyData . getParent ( ) ! = null & & stickyData . getParent ( ) . getRealView ( ) ! = null ) { <nl> parentH = stickyData . getParent ( ) . getRealView ( ) . getHeight ( ) ; <nl> } <nl> - int stickyViewH = stickyData . getView ( ) . getHeight ( ) ; <nl> + int stickyViewH = stickyData . getHostView ( ) . getHeight ( ) ; <nl> int stickyShowPos = stickyScrollerP [ 1 ] ; <nl> int stickyStartHidePos = - parentH + stickyScrollerP [ 1 ] + stickyViewH ; <nl> if ( stickyViewP [ 1 ] < = stickyShowPos & & stickyViewP [ 1 ] > = ( stickyStartHidePos - stickyViewH ) ) { <nl> mStickyOffset = stickyViewP [ 1 ] - stickyStartHidePos ; <nl> - return stickyData . getView ( ) ; <nl> + return stickyData . getHostView ( ) ; <nl> } <nl> } <nl> return null ; <nl> mmm a / android / sdk / src / main / java / com / taobao / weex / ui / view / listview / adapter / ListBaseViewHolder . java <nl> ppp b / android / sdk / src / main / java / com / taobao / weex / ui / view / listview / adapter / ListBaseViewHolder . java <nl> <nl> <nl> import com . taobao . weex . ui . component . WXComponent ; <nl> <nl> - import java . lang . ref . WeakReference ; <nl> - <nl> <nl> / * * <nl> * There are bi - directional association between ViewHolder and View . <nl> <nl> private WXComponent mComponent ; <nl> <nl> public ListBaseViewHolder ( WXComponent component , int viewType ) { <nl> - super ( component . getView ( ) ) ; <nl> + super ( component . getHostView ( ) ) ; <nl> mViewType = viewType ; <nl> mComponent = component ; <nl> } <nl> mmm a / android / sdk / src / test / java / com / taobao / weex / ui / component / WXTextTest . java <nl> ppp b / android / sdk / src / test / java / com / taobao / weex / ui / component / WXTextTest . java <nl> public void setUp ( ) throws Exception { <nl> @ Test <nl> public void testCreateView ( ) { <nl> mWXText . createView ( mParent , - 1 ) ; <nl> - assertNotNull ( mWXText . getView ( ) ) ; <nl> + assertNotNull ( mWXText . getHostView ( ) ) ; <nl> } <nl> <nl> @ Test <nl> public void testSetLayout ( ) { <nl> testCreateView ( ) ; <nl> mWXText . setLayout ( mDomObject ) ; <nl> - assertNotNull ( mWXText . getView ( ) . getLayoutParams ( ) ) ; <nl> - assertEquals ( 100 , mWXText . getView ( ) . getLayoutParams ( ) . height ) ; <nl> - assertEquals ( 100 , mWXText . getView ( ) . getLayoutParams ( ) . width ) ; <nl> + assertNotNull ( mWXText . getHostView ( ) . getLayoutParams ( ) ) ; <nl> + assertEquals ( 100 , mWXText . getHostView ( ) . getLayoutParams ( ) . height ) ; <nl> + assertEquals ( 100 , mWXText . getHostView ( ) . getLayoutParams ( ) . width ) ; <nl> } <nl> <nl> @ Test <nl> public void testSetPadding ( ) { <nl> testCreateView ( ) ; <nl> mWXText . setPadding ( mDomObject . getPadding ( ) , mDomObject . getBorder ( ) ) ; <nl> - assertEquals ( 0 , mWXText . getView ( ) . getPaddingLeft ( ) ) ; <nl> - assertEquals ( 0 , mWXText . getView ( ) . getPaddingTop ( ) ) ; <nl> - assertEquals ( 0 , mWXText . getView ( ) . getPaddingRight ( ) ) ; <nl> - assertEquals ( 0 , mWXText . getView ( ) . getPaddingBottom ( ) ) ; <nl> + assertEquals ( 0 , mWXText . getHostView ( ) . getPaddingLeft ( ) ) ; <nl> + assertEquals ( 0 , mWXText . getHostView ( ) . getPaddingTop ( ) ) ; <nl> + assertEquals ( 0 , mWXText . getHostView ( ) . getPaddingRight ( ) ) ; <nl> + assertEquals ( 0 , mWXText . getHostView ( ) . getPaddingBottom ( ) ) ; <nl> } <nl> <nl> @ Test <nl> public void testBind ( ) { <nl> mWXText . applyLayoutAndEvent ( mWXText ) ; <nl> mWXText . bindData ( mWXText ) ; <nl> <nl> - assertNotNull ( mWXText . getView ( ) . getLayoutParams ( ) ) ; <nl> - assertEquals ( 100 , mWXText . getView ( ) . getLayoutParams ( ) . height ) ; <nl> - assertEquals ( 100 , mWXText . getView ( ) . getLayoutParams ( ) . width ) ; <nl> + assertNotNull ( mWXText . getHostView ( ) . getLayoutParams ( ) ) ; <nl> + assertEquals ( 100 , mWXText . getHostView ( ) . getLayoutParams ( ) . height ) ; <nl> + assertEquals ( 100 , mWXText . getHostView ( ) . getLayoutParams ( ) . width ) ; <nl> <nl> - assertEquals ( 0 , mWXText . getView ( ) . getPaddingLeft ( ) ) ; <nl> - assertEquals ( 0 , mWXText . getView ( ) . getPaddingTop ( ) ) ; <nl> - assertEquals ( 0 , mWXText . getView ( ) . getPaddingRight ( ) ) ; <nl> - assertEquals ( 0 , mWXText . getView ( ) . getPaddingBottom ( ) ) ; <nl> + assertEquals ( 0 , mWXText . getHostView ( ) . getPaddingLeft ( ) ) ; <nl> + assertEquals ( 0 , mWXText . getHostView ( ) . getPaddingTop ( ) ) ; <nl> + assertEquals ( 0 , mWXText . getHostView ( ) . getPaddingRight ( ) ) ; <nl> + assertEquals ( 0 , mWXText . getHostView ( ) . getPaddingBottom ( ) ) ; <nl> } <nl> <nl> @ Test <nl> public void testAddEvent ( ) { <nl> testBind ( ) ; <nl> mWXText . addEvent ( WXEventType . CLICK ) ; <nl> - assertTrue ( mWXText . getView ( ) . isEnabled ( ) ) ; <nl> - mWXText . getView ( ) . performClick ( ) ; <nl> + assertTrue ( mWXText . getHostView ( ) . isEnabled ( ) ) ; <nl> + mWXText . getHostView ( ) . performClick ( ) ; <nl> } <nl> <nl> @ Test <nl> public void testUpdateProperties ( ) { <nl> prop . put ( WXDomPropConstant . WX_ATTR_DISABLED , " false " ) ; <nl> prop . put ( WXDomPropConstant . WX_OPACITY , 0 . 8f ) ; <nl> mWXText . updateProperties ( prop ) ; <nl> - assertTrue ( mWXText . getView ( ) . isEnabled ( ) ) ; <nl> - assertTrue ( mWXText . getView ( ) . getAlpha ( ) > = 0 . 799 & & mWXText . getView ( ) . getAlpha ( ) < = 0 . 811 ) ; <nl> + assertTrue ( mWXText . getHostView ( ) . isEnabled ( ) ) ; <nl> + assertTrue ( mWXText . getHostView ( ) . getAlpha ( ) > = 0 . 799 & & mWXText . getHostView ( ) . getAlpha ( ) < = 0 . 811 ) ; <nl> <nl> prop . put ( WXDomPropConstant . WX_ATTR_DISABLED , " true " ) ; <nl> mWXText . updateProperties ( prop ) ; <nl> - assertFalse ( mWXText . getView ( ) . isEnabled ( ) ) ; <nl> + assertFalse ( mWXText . getHostView ( ) . isEnabled ( ) ) ; <nl> } <nl> } <nl> \ No newline at end of file <nl>
* [ android ] deprecated getView of Component
apache/incubator-weex
3ac1a3fe1bc58ad71cbdb90ee66ca2fecf1251ac
2016-07-21T07:02:17Z
new file mode 100644 <nl> index 00000000000 . . cc4c416ff6e <nl> mmm / dev / null <nl> ppp b / . travis . yml <nl> <nl> + language : cpp <nl> + script : make static_c <nl> + notifications : <nl> + email : false <nl>
Adding a basic travis . yml file .
grpc/grpc
9f1baf7660a24d8f8afb40f7600820c3105f645a
2015-02-26T20:26:01Z
mmm a / tensorflow / core / profiler / utils / BUILD <nl> ppp b / tensorflow / core / profiler / utils / BUILD <nl> <nl> load ( " / / tensorflow : tensorflow . bzl " , " tf_cc_test " ) <nl> <nl> package ( <nl> - default_visibility = [ " / / visibility : public " ] , <nl> + default_visibility = [ " / / tensorflow / core / profiler : internal " ] , <nl> licenses = [ " notice " ] , # Apache 2 . 0 <nl> ) <nl> <nl> + package_group ( <nl> + name = " friends " , <nl> + includes = [ <nl> + " / / tensorflow / core / profiler : friends " , <nl> + ] , <nl> + ) <nl> + <nl> cc_library ( <nl> name = " timespan " , <nl> hdrs = [ " timespan . h " ] , <nl> cc_library ( <nl> name = " xplane_builder " , <nl> srcs = [ " xplane_builder . cc " ] , <nl> hdrs = [ " xplane_builder . h " ] , <nl> + visibility = [ " : friends " ] , <nl> deps = [ <nl> " / / tensorflow / core / platform : logging " , <nl> " / / tensorflow / core / platform : types " , <nl> mmm a / tensorflow / core / profiler / utils / timespan . h <nl> ppp b / tensorflow / core / profiler / utils / timespan . h <nl> limitations under the License . <nl> <nl> namespace tensorflow { <nl> namespace profiler { <nl> - namespace utils { <nl> <nl> / / A Timespan is the time extent of an event : a pair of ( begin , duration ) . <nl> / / Events may have duration 0 ( " instant events " ) but duration can ' t be negative . <nl> class Timespan { <nl> uint64 duration_ps_ ; / / 0 for an instant event . <nl> } ; <nl> <nl> - } / / namespace utils <nl> } / / namespace profiler <nl> } / / namespace tensorflow <nl> <nl> mmm a / tensorflow / core / profiler / utils / timespan_test . cc <nl> ppp b / tensorflow / core / profiler / utils / timespan_test . cc <nl> limitations under the License . <nl> <nl> namespace tensorflow { <nl> namespace profiler { <nl> - namespace utils { <nl> <nl> TEST ( TimespanTests , NonInstantSpanIncludesSingleTimeTests ) { <nl> EXPECT_TRUE ( Timespan ( 10 , 2 ) . Includes ( 12 ) ) ; <nl> TEST ( TimespanTests , InstantSpanNonInstantSpanOverlappedDuration ) { <nl> EXPECT_EQ ( 0 , Timespan ( 12 , 0 ) . OverlappedDurationPs ( Timespan ( 8 , 16 ) ) ) ; <nl> } <nl> <nl> - } / / namespace utils <nl> } / / namespace profiler <nl> } / / namespace tensorflow <nl>
Make Timespan namespace shorter .
tensorflow/tensorflow
9b00382f637b84da5f1a1f51c2b91909b1fcfaa8
2019-11-12T01:41:40Z
mmm a / lib / ClangImporter / ImportDecl . cpp <nl> ppp b / lib / ClangImporter / ImportDecl . cpp <nl> namespace { <nl> DeclName name = importedName . Imported ; <nl> bool hasCustomName = importedName . HasCustomName ; <nl> <nl> - / / TODO : refactor into separate function and share with other kinds of <nl> - / / import - as - member <nl> - if ( name . getBaseName ( ) . str ( ) = = " init " ) { <nl> - bool allowNSUIntegerAsInt = <nl> - Impl . shouldAllowNSUIntegerAsInt ( isInSystemModule ( dc ) , decl ) ; <nl> - <nl> - ArrayRef < Identifier > argNames = name . getArgumentNames ( ) ; <nl> - auto parameterList = Impl . importFunctionParameterList ( <nl> - decl , { decl - > param_begin ( ) , decl - > param_end ( ) } , decl - > isVariadic ( ) , <nl> - allowNSUIntegerAsInt , argNames ) ; <nl> - <nl> - if ( ! parameterList ) <nl> - return nullptr ; <nl> - <nl> - SourceLoc noLoc { } ; <nl> - auto selfParam = ParamDecl : : createSelf ( noLoc , dc , false ) ; <nl> + if ( dc - > isTypeContext ( ) ) { <nl> + / / Import as member <nl> <nl> - auto & SwiftCtx = Impl . SwiftContext ; <nl> - name = { SwiftCtx , SwiftCtx . Id_init , parameterList } ; <nl> + / / TODO : refactor into separate function and share with other kinds of <nl> + / / import - as - member <nl> + if ( name . getBaseName ( ) . str ( ) = = " init " ) { <nl> + bool allowNSUIntegerAsInt = <nl> + Impl . shouldAllowNSUIntegerAsInt ( isInSystemModule ( dc ) , decl ) ; <nl> <nl> - OptionalTypeKind initOptionality ; <nl> - { <nl> + ArrayRef < Identifier > argNames = name . getArgumentNames ( ) ; <nl> + auto parameterList = Impl . importFunctionParameterList ( <nl> + decl , { decl - > param_begin ( ) , decl - > param_end ( ) } , <nl> + decl - > isVariadic ( ) , allowNSUIntegerAsInt , argNames ) ; <nl> <nl> - bool isAuditedResult = <nl> - ( decl & & ( decl - > hasAttr < clang : : CFAuditedTransferAttr > ( ) | | <nl> - decl - > hasAttr < clang : : CFReturnsRetainedAttr > ( ) | | <nl> - decl - > hasAttr < clang : : CFReturnsNotRetainedAttr > ( ) ) ) ; <nl> - / / Check if we know more about the type from our whitelists . <nl> - OptionalTypeKind returnOptKind ; <nl> - if ( decl - > hasAttr < clang : : ReturnsNonNullAttr > ( ) ) { <nl> - returnOptKind = OTK_None ; <nl> - } else { <nl> - returnOptKind = OTK_ImplicitlyUnwrappedOptional ; <nl> - } <nl> + if ( ! parameterList ) <nl> + return nullptr ; <nl> <nl> - / / Import the result type . <nl> - auto swiftResultTy = <nl> - Impl . importType ( decl - > getReturnType ( ) , <nl> - ( isAuditedResult ? ImportTypeKind : : AuditedResult <nl> - : ImportTypeKind : : Result ) , <nl> - allowNSUIntegerAsInt , <nl> - / * isFullyBridgeable * / true , returnOptKind ) ; <nl> + SourceLoc noLoc { } ; <nl> + auto selfParam = ParamDecl : : createSelf ( noLoc , dc , false ) ; <nl> <nl> + OptionalTypeKind initOptionality ; <nl> + auto swiftResultTy = Impl . importFunctionReturnType ( <nl> + decl , decl - > getReturnType ( ) , allowNSUIntegerAsInt ) ; <nl> swiftResultTy - > getAnyOptionalObjectType ( initOptionality ) ; <nl> - } <nl> <nl> - auto result = Impl . createDeclWithClangNode < ConstructorDecl > ( <nl> - decl , name , noLoc , initOptionality , noLoc , selfParam , parameterList , <nl> - / * GenericParams = * / nullptr , noLoc , dc ) ; <nl> + auto result = Impl . createDeclWithClangNode < ConstructorDecl > ( <nl> + decl , name , noLoc , initOptionality , noLoc , selfParam , <nl> + parameterList , <nl> + / * GenericParams = * / nullptr , noLoc , dc ) ; <nl> <nl> - finishFuncDecl ( decl , result ) ; <nl> - return result ; <nl> - } <nl> + finishFuncDecl ( decl , result ) ; <nl> + return result ; <nl> + } <nl> <nl> - / / FIXME : Cannot import anything into a type context from here on . <nl> - if ( dc - > isTypeContext ( ) ) <nl> + / / TODO : properties and methods <nl> return nullptr ; <nl> + } <nl> <nl> / / Import the function type . If we have parameters , make sure their names <nl> / / get into the resulting function type . <nl> mmm a / lib / ClangImporter / ImportType . cpp <nl> ppp b / lib / ClangImporter / ImportType . cpp <nl> bool ClangImporter : : Implementation : : shouldAllowNSUIntegerAsInt ( <nl> bool isFromSystemModule , const clang : : NamedDecl * decl ) { <nl> if ( isFromSystemModule ) <nl> if ( auto identInfo = decl - > getIdentifier ( ) ) <nl> - return ! nameContainsUnsigned ( decl - > getName ( ) ) ; <nl> + return ! nameContainsUnsigned ( identInfo - > getName ( ) ) ; <nl> return false ; <nl> } <nl> <nl> static OptionalTypeKind getParamOptionality ( <nl> return OTK_ImplicitlyUnwrappedOptional ; <nl> } <nl> <nl> - Type ClangImporter : : Implementation : : <nl> - importFunctionType ( const clang : : FunctionDecl * clangDecl , <nl> - clang : : QualType resultType , <nl> - ArrayRef < const clang : : ParmVarDecl * > params , <nl> - bool isVariadic , bool isNoReturn , <nl> - bool isFromSystemModule , bool hasCustomName , <nl> - ParameterList * & parameterList , DeclName & name ) { <nl> - <nl> - bool allowNSUIntegerAsInt = <nl> - shouldAllowNSUIntegerAsInt ( isFromSystemModule , clangDecl ) ; <nl> - <nl> - / / CF function results can be managed if they are audited or <nl> + Type ClangImporter : : Implementation : : importFunctionReturnType ( <nl> + const clang : : FunctionDecl * clangDecl , clang : : QualType resultType , <nl> + bool allowNSUIntegerAsInt ) { <nl> + / / CF function results can be managed if they are audited or <nl> / / the ownership convention is explicitly declared . <nl> bool isAuditedResult = <nl> ( clangDecl & & <nl> importFunctionType ( const clang : : FunctionDecl * clangDecl , <nl> } <nl> <nl> / / Import the result type . <nl> - auto swiftResultTy = importType ( resultType , <nl> - ( isAuditedResult <nl> - ? ImportTypeKind : : AuditedResult <nl> - : ImportTypeKind : : Result ) , <nl> - allowNSUIntegerAsInt , <nl> - / * isFullyBridgeable * / true , <nl> - OptionalityOfReturn ) ; <nl> + return importType ( resultType , ( isAuditedResult ? ImportTypeKind : : AuditedResult <nl> + : ImportTypeKind : : Result ) , <nl> + allowNSUIntegerAsInt , <nl> + / * isFullyBridgeable * / true , OptionalityOfReturn ) ; <nl> + } <nl> + <nl> + Type ClangImporter : : Implementation : : <nl> + importFunctionType ( const clang : : FunctionDecl * clangDecl , <nl> + clang : : QualType resultType , <nl> + ArrayRef < const clang : : ParmVarDecl * > params , <nl> + bool isVariadic , bool isNoReturn , <nl> + bool isFromSystemModule , bool hasCustomName , <nl> + ParameterList * & parameterList , DeclName & name ) { <nl> + <nl> + bool allowNSUIntegerAsInt = <nl> + shouldAllowNSUIntegerAsInt ( isFromSystemModule , clangDecl ) ; <nl> + <nl> + auto swiftResultTy = <nl> + importFunctionReturnType ( clangDecl , resultType , allowNSUIntegerAsInt ) ; <nl> if ( ! swiftResultTy ) <nl> return Type ( ) ; <nl> <nl> mmm a / lib / ClangImporter / ImporterImpl . h <nl> ppp b / lib / ClangImporter / ImporterImpl . h <nl> class LLVM_LIBRARY_VISIBILITY ClangImporter : : Implementation <nl> ParameterList * & parameterList , <nl> DeclName & name ) ; <nl> <nl> + / / / \ brief Import the given function return type . <nl> + / / / <nl> + / / / \ param clangDecl The underlying declaration , if any ; should only be <nl> + / / / considered for any attributes it might carry . <nl> + / / / \ param resultType The result type of the function . <nl> + / / / \ param allowNSUIntegerAsInt If true , NSUInteger will be imported as Int <nl> + / / / in certain contexts . If false , it will always be imported as UInt . <nl> + / / / <nl> + / / / \ returns the imported function return type , or null if the type cannot be <nl> + / / / imported . <nl> + Type importFunctionReturnType ( const clang : : FunctionDecl * clangDecl , <nl> + clang : : QualType resultType , <nl> + bool allowNSUIntegerAsInt ) ; <nl> + <nl> / / / \ brief Import the parameter list for a function <nl> / / / <nl> / / / \ param clangDecl The underlying declaration , if any ; should only be <nl> mmm a / test / IDE / import_as_member_silgen . swift <nl> ppp b / test / IDE / import_as_member_silgen . swift <nl> <nl> / / RUN : % target - swift - frontend - emit - sil - I % S / Inputs / custom - modules % s 2 > & 1 | FileCheck - - check - prefix = SIL % s <nl> import ImportAsMember <nl> <nl> - public func returnGlobalVar ( ) - > Int32 { <nl> + public func returnGlobalVar ( ) - > Double { <nl> return Struct1 . globalVar <nl> } <nl> / / SIL - LABEL : sil { { . * } } returnGlobalVar { { . * } } ( ) - > Int32 { <nl>
[ Clang Importer ] cleanup import - as - init code
apple/swift
98e1d2d1ae41872e23faa9992284ffbf0fa74a46
2016-03-04T02:13:53Z
mmm a / dlib / image_transforms / fhog . h <nl> ppp b / dlib / image_transforms / fhog . h <nl> namespace dlib <nl> const int cells_nr = ( int ) ( ( double ) img . nr ( ) / ( double ) cell_size + 0 . 5 ) ; <nl> const int cells_nc = ( int ) ( ( double ) img . nc ( ) / ( double ) cell_size + 0 . 5 ) ; <nl> <nl> + if ( cells_nr = = 0 | | cells_nc = = 0 ) <nl> + { <nl> + hog . clear ( ) ; <nl> + return ; <nl> + } <nl> + <nl> array2d < matrix < float , 18 , 1 > > hist ( cells_nr , cells_nc ) ; <nl> for ( long r = 0 ; r < hist . nr ( ) ; + + r ) <nl> { <nl> namespace dlib <nl> / / memory for HOG features <nl> const int hog_nr = std : : max ( cells_nr - 2 , 0 ) ; <nl> const int hog_nc = std : : max ( cells_nc - 2 , 0 ) ; <nl> + if ( hog_nr = = 0 | | hog_nc = = 0 ) <nl> + { <nl> + hog . clear ( ) ; <nl> + return ; <nl> + } <nl> init_hog ( hog , hog_nr , hog_nc ) ; <nl> <nl> const int visible_nr = cells_nr * cell_size ; <nl> mmm a / dlib / test / fhog . cpp <nl> ppp b / dlib / test / fhog . cpp <nl> namespace <nl> } <nl> } <nl> <nl> + void test_on_small ( ) <nl> + { <nl> + print_spinner ( ) ; <nl> + array2d < unsigned char > img ; <nl> + dlib : : array < array2d < float > > hog ; <nl> + <nl> + / / do this just to make sure it doesn ' t crash on small images <nl> + for ( int i = 0 ; i < 10 ; + + i ) <nl> + { <nl> + img . set_size ( i , i ) ; <nl> + assign_all_pixels ( img , i ) ; <nl> + extract_fhog_features ( img , hog ) ; <nl> + } <nl> + for ( int i = 1 ; i < 10 ; + + i ) <nl> + { <nl> + img . set_size ( i , i + 1 ) ; <nl> + assign_all_pixels ( img , i ) ; <nl> + extract_fhog_features ( img , hog ) ; <nl> + } <nl> + for ( int i = 1 ; i < 10 ; + + i ) <nl> + { <nl> + img . set_size ( i + 1 , i ) ; <nl> + assign_all_pixels ( img , i ) ; <nl> + extract_fhog_features ( img , hog ) ; <nl> + } <nl> + } <nl> + <nl> void perform_test ( <nl> ) <nl> { <nl> + test_on_small ( ) ; <nl> + <nl> print_spinner ( ) ; <nl> / / load the testing data <nl> array2d < rgb_pixel > img ; <nl>
Fixed a bug in extract_fhog_features ( ) that happened when very small
davisking/dlib
0bdcb98c2c6dcd5e09692d506a4029cde0e14a09
2013-10-04T02:17:58Z
mmm a / table / table_test . cc <nl> ppp b / table / table_test . cc <nl> class HarnessTest : public testing : : Test { <nl> std : : vector < TestArgs > args = GenerateArgList ( ) ; <nl> assert ( part ) ; <nl> assert ( part < = total ) ; <nl> - size_t start_i = ( part - 1 ) * args . size ( ) / total ; <nl> - size_t end_i = part * args . size ( ) / total ; <nl> - for ( unsigned int i = static_cast < unsigned int > ( start_i ) ; i < end_i ; i + + ) { <nl> + for ( unsigned int i = 0 ; i < args . size ( ) ; i + + ) { <nl> + if ( ( i % total ) + 1 ! = part ) { <nl> + continue ; <nl> + } <nl> Init ( args [ i ] ) ; <nl> Random rnd ( test : : RandomSeed ( ) + 5 ) ; <nl> for ( int num_entries = 0 ; num_entries < 2000 ; <nl> num_entries + = ( num_entries < 50 ? 1 : 200 ) ) { <nl> - if ( ( num_entries % 10 ) = = 0 ) { <nl> - fprintf ( stderr , " case % d of % d : num_entries = % d \ n " , ( i + 1 ) , <nl> - static_cast < int > ( args . size ( ) ) , num_entries ) ; <nl> - } <nl> for ( int e = 0 ; e < num_entries ; e + + ) { <nl> std : : string v ; <nl> Add ( test : : RandomKey ( & rnd , rnd . Skewed ( 4 ) ) , <nl> TEST_F ( GeneralTableTest , ApproximateOffsetOfCompressed ) { <nl> <nl> / / RandomizedHarnessTest is very slow for certain combination of arguments <nl> / / Split into 8 pieces to reduce the time individual tests take . <nl> - TEST_F ( HarnessTest , Randomized1n2 ) { <nl> - / / part 1 , 2 out of 8 <nl> + TEST_F ( HarnessTest , Randomized1 ) { <nl> + / / part 1 out of 8 <nl> const size_t part = 1 ; <nl> const size_t total = 8 ; <nl> RandomizedHarnessTest ( part , total ) ; <nl> - RandomizedHarnessTest ( part + 1 , total ) ; <nl> } <nl> <nl> - TEST_F ( HarnessTest , Randomized3n4 ) { <nl> - / / part 3 , 4 out of 8 <nl> + TEST_F ( HarnessTest , Randomized2 ) { <nl> + / / part 2 out of 8 <nl> + const size_t part = 2 ; <nl> + const size_t total = 8 ; <nl> + RandomizedHarnessTest ( part , total ) ; <nl> + } <nl> + <nl> + TEST_F ( HarnessTest , Randomized3 ) { <nl> + / / part 3 out of 8 <nl> const size_t part = 3 ; <nl> const size_t total = 8 ; <nl> RandomizedHarnessTest ( part , total ) ; <nl> - RandomizedHarnessTest ( part + 1 , total ) ; <nl> + } <nl> + <nl> + TEST_F ( HarnessTest , Randomized4 ) { <nl> + / / part 4 out of 8 <nl> + const size_t part = 4 ; <nl> + const size_t total = 8 ; <nl> + RandomizedHarnessTest ( part , total ) ; <nl> } <nl> <nl> TEST_F ( HarnessTest , Randomized5 ) { <nl>
Evenly split HarnessTest . Randomized
facebook/rocksdb
fc522bdb3ebbcc9354407717733e1f886504a2b8
2018-05-04T22:28:06Z
mmm a / src / flag - definitions . h <nl> ppp b / src / flag - definitions . h <nl> DEFINE_BOOL ( trace_turbo_types , true , " trace TurboFan ' s types " ) <nl> DEFINE_BOOL ( trace_turbo_scheduler , false , " trace TurboFan ' s scheduler " ) <nl> DEFINE_BOOL ( trace_turbo_reduction , false , " trace TurboFan ' s various reducers " ) <nl> DEFINE_BOOL ( trace_turbo_jt , false , " trace TurboFan ' s jump threading " ) <nl> - DEFINE_BOOL ( turbo_asm , false , " enable TurboFan for asm . js code " ) <nl> + DEFINE_BOOL ( turbo_asm , true , " enable TurboFan for asm . js code " ) <nl> DEFINE_BOOL ( turbo_verify , false , " verify TurboFan graphs at each phase " ) <nl> DEFINE_BOOL ( turbo_stats , false , " print TurboFan statistics " ) <nl> DEFINE_BOOL ( turbo_types , true , " use typed lowering in TurboFan " ) <nl>
[ turbofan ] Enable stage 1 .
v8/v8
10750f0e80b0cdf5751ae259277f2890ce5631ee
2014-12-02T11:07:26Z
mmm a / documentation / sphinx / source / mr - status - json - schemas . rst . inc <nl> ppp b / documentation / sphinx / source / mr - status - json - schemas . rst . inc <nl> <nl> ] , <nl> " command_line " : " - r simulation " , <nl> " memory " : { <nl> - " available_bytes " : 0 , / / an estimate of the process ' fair share of the memory available to fdbservers <nl> + " available_bytes " : 0 , / / an estimate of the process ' fair share of the memory available to fdbservers <nl> " limit_bytes " : 0 , / / memory limit per process <nl> " unused_allocated_memory " : 0 , <nl> - " used_bytes " : 0 <nl> + " used_bytes " : 0 / / virtual memory size of the process <nl> } , <nl> " messages " : [ <nl> { <nl> <nl> " counter " : 0 , <nl> " roughness " : 0 . 0 <nl> } , <nl> - " reads " : { <nl> + " reads " : { / / measures number of completed read requests <nl> " hz " : 0 . 0 , <nl> " counter " : 0 , <nl> " roughness " : 0 . 0 <nl> } , <nl> - " read_requests " : { <nl> + " read_requests " : { / / measures number of incoming read requests <nl> " hz " : 0 . 0 , <nl> " counter " : 0 , <nl> " roughness " : 0 . 0 <nl> } <nl> } , <nl> - " bytes " : { / / of operations ( independent of hz ) . Perfectly spaced operations will have a roughness of 1 . 0 . Randomly spaced ( Poisson - distributed ) operations will have a roughness of 2 . 0 , with increased bunching resulting in increased values . Higher roughness can result in increased latency due to increased queuing . <nl> + " bytes " : { / / measures number of logical bytes read / written ( ignoring replication factor and overhead on disk ) . Perfectly spaced operations will have a roughness of 1 . 0 . Randomly spaced ( Poisson - distributed ) operations will have a roughness of 2 . 0 , with increased bunching resulting in increased values . Higher roughness can result in increased latency due to increased queuing . <nl> " written " : { <nl> " hz " : 0 . 0 , <nl> " counter " : 0 , <nl> <nl> " system_kv_size_bytes " : 0 , / / estimated <nl> " partitions_count " : 2 , <nl> " moving_data " : { <nl> - " total_written_bytes " : 0 , <nl> - " in_flight_bytes " : 0 , <nl> - " in_queue_bytes " : 0 , <nl> + " total_written_bytes " : 0 , / / reset whenever data distributor is re - recruited <nl> + " in_flight_bytes " : 0 , / / number of bytes currently being moved between storage servers <nl> + " in_queue_bytes " : 0 , / / number of bytes in the data distributor queue that should be moved ( but are not yet being transferred between storage servers ) <nl> " highest_priority " : 0 <nl> } , <nl> " team_trackers " : [ <nl> <nl> } , <nl> " memory " : { <nl> " free_bytes " : 0 , / / an estimate of how many bytes are free to allocate to fdbservers without swapping <nl> - " committed_bytes " : 0 , <nl> + " committed_bytes " : 0 , / / an estimate of the number of bytes of memory on the machine that are not free for allocation <nl> " total_bytes " : 0 / / an estimate of total physical RAM <nl> } , <nl> " contributing_workers " : 4 , <nl>
Merge pull request from tclinken / add - status - doc - comments - to - 6 . 2
apple/foundationdb
d12dd1b6a2cc83fbe9e856348aa51a7909d96345
2019-12-19T16:47:51Z
mmm a / imgui . cpp <nl> ppp b / imgui . cpp <nl> <nl> - / / ImGui library v1 . 15 wip <nl> + / / ImGui library v1 . 15 <nl> / / See ImGui : : ShowTestWindow ( ) for sample code . <nl> / / Read ' Programmer guide ' below for notes on how to setup ImGui in your codebase . <nl> / / Get latest version at https : / / github . com / ocornut / imgui <nl> mmm a / imgui . h <nl> ppp b / imgui . h <nl> <nl> - / / ImGui library v1 . 15 wip <nl> + / / ImGui library v1 . 15 <nl> / / See . cpp file for commentary . <nl> / / See ImGui : : ShowTestWindow ( ) for sample code . <nl> / / Read ' Programmer guide ' in . cpp for notes on how to setup ImGui in your codebase . <nl>
Version number
ocornut/imgui
ee3355fe8e2d51c8e88b169c7275dcf3048734f8
2014-11-07T08:43:00Z
mmm a / folly / futures / Future - inl . h <nl> ppp b / folly / futures / Future - inl . h <nl> std : : shared_ptr < Timekeeper > getTimekeeperSingleton ( ) ; <nl> <nl> namespace futures { <nl> namespace detail { <nl> + <nl> + / / InvokeResultWrapper and wrapInvoke enable wrapping a result value in its <nl> + / / nearest Future - type counterpart capable of also carrying an exception . <nl> + / / e . g . <nl> + / / ( semi ) Future < T > - > ( semi ) Future < T > ( no change ) <nl> + / / Try < T > - > Try < T > ( no change ) <nl> + / / void - > Try < folly : : Unit > <nl> + / / T - > Try < T > <nl> + template < typename T > <nl> + struct InvokeResultWrapperBase { <nl> + template < typename F > <nl> + static T wrapResult ( F fn ) { <nl> + return T ( fn ( ) ) ; <nl> + } <nl> + static T wrapException ( exception_wrapper & & e ) { <nl> + return T ( std : : move ( e ) ) ; <nl> + } <nl> + } ; <nl> + template < typename T > <nl> + struct InvokeResultWrapper : InvokeResultWrapperBase < Try < T > > { } ; <nl> + template < typename T > <nl> + struct InvokeResultWrapper < Try < T > > : InvokeResultWrapperBase < Try < T > > { } ; <nl> + template < typename T > <nl> + struct InvokeResultWrapper < SemiFuture < T > > <nl> + : InvokeResultWrapperBase < SemiFuture < T > > { } ; <nl> + template < typename T > <nl> + struct InvokeResultWrapper < Future < T > > : InvokeResultWrapperBase < Future < T > > { } ; <nl> + template < > <nl> + struct InvokeResultWrapper < void > : InvokeResultWrapperBase < Try < Unit > > { <nl> + template < typename F > <nl> + static Try < Unit > wrapResult ( F fn ) { <nl> + fn ( ) ; <nl> + return Try < Unit > ( unit ) ; <nl> + } <nl> + } ; <nl> + <nl> + template < typename T , typename F > <nl> + auto wrapInvoke ( folly : : Try < T > & & t , F & & f ) { <nl> + auto fn = [ & ] ( ) { <nl> + return std : : forward < F > ( f ) ( <nl> + t . template get < <nl> + false , <nl> + typename futures : : detail : : valueCallableResult < T , F > : : FirstArg > ( ) ) ; <nl> + } ; <nl> + using FnResult = decltype ( fn ( ) ) ; <nl> + using Wrapper = InvokeResultWrapper < FnResult > ; <nl> + if ( t . hasException ( ) ) { <nl> + return Wrapper : : wrapException ( std : : move ( t ) . exception ( ) ) ; <nl> + } <nl> + return Wrapper : : wrapResult ( fn ) ; <nl> + } <nl> + <nl> / / Guarantees that the stored functor is destructed before the stored promise <nl> / / may be fulfilled . Assumes the stored functor to be noexcept - destructible . <nl> template < typename T , typename F > <nl> template < class T > <nl> template < typename F > <nl> SemiFuture < typename futures : : detail : : valueCallableResult < T , F > : : value_type > <nl> SemiFuture < T > : : deferValue ( F & & func ) & & { <nl> - return std : : move ( * this ) . defer ( [ f = std : : forward < F > ( func ) ] ( <nl> - folly : : Try < T > & & t ) mutable { <nl> - return std : : forward < F > ( f ) ( <nl> - t . template get < <nl> - false , <nl> - typename futures : : detail : : valueCallableResult < T , F > : : FirstArg > ( ) ) ; <nl> - } ) ; <nl> + return std : : move ( * this ) . defer ( <nl> + [ f = std : : forward < F > ( func ) ] ( folly : : Try < T > & & t ) mutable { <nl> + return futures : : detail : : wrapInvoke ( std : : move ( t ) , std : : forward < F > ( f ) ) ; <nl> + } ) ; <nl> } <nl> <nl> template < class T > <nl> template < typename F > <nl> Future < typename futures : : detail : : valueCallableResult < T , F > : : value_type > <nl> Future < T > : : thenValue ( F & & func ) & & { <nl> auto lambdaFunc = [ f = std : : forward < F > ( func ) ] ( folly : : Try < T > & & t ) mutable { <nl> - return std : : forward < F > ( f ) ( <nl> - t . template get < <nl> - false , <nl> - typename futures : : detail : : valueCallableResult < T , F > : : FirstArg > ( ) ) ; <nl> + return futures : : detail : : wrapInvoke ( std : : move ( t ) , std : : forward < F > ( f ) ) ; <nl> } ; <nl> using R = futures : : detail : : tryCallableResult < T , decltype ( lambdaFunc ) > ; <nl> return this - > thenImplementation ( std : : move ( lambdaFunc ) , R { } ) ; <nl> mmm a / folly / futures / test / FutureTest . cpp <nl> ppp b / folly / futures / test / FutureTest . cpp <nl> TEST ( Future , ThenRecursion ) { <nl> EXPECT_EQ ( 42 , recursion ( & executor , 100000 ) . getVia ( & executor ) ) ; <nl> } <nl> <nl> + / / We want to detect if the Try value is being dereferenced before being <nl> + / / checked for validity . The only way to do that is with a custom Try impl . <nl> + struct NoThrowTestResult { } ; <nl> + namespace folly { <nl> + / / Forward all methods except throwIfFailed ( ) . <nl> + template < > <nl> + class Try < NoThrowTestResult > : public Try < void > { <nl> + public : <nl> + using Try < void > : : Try ; <nl> + <nl> + explicit Try ( const NoThrowTestResult & ) : Try < void > ( ) { } <nl> + <nl> + NoThrowTestResult value ( ) const { <nl> + throwIfFailed ( ) ; <nl> + return NoThrowTestResult ( ) ; <nl> + } <nl> + NoThrowTestResult operator * ( ) const { <nl> + return value ( ) ; <nl> + } <nl> + <nl> + / / If the Try contains an exception , throws it <nl> + inline void throwIfFailed ( ) const { <nl> + EXPECT_FALSE ( this - > hasException ( ) ) <nl> + < < " throwIfFailed ( ) should never have been invoked . " ; <nl> + Try < void > : : throwIfFailed ( ) ; <nl> + } <nl> + <nl> + template < bool isTry , typename R > <nl> + typename std : : enable_if < isTry , R > : : type get ( ) { <nl> + return std : : forward < R > ( * this ) ; <nl> + } <nl> + <nl> + template < bool isTry , typename R > <nl> + typename std : : enable_if < ! isTry , R > : : type get ( ) { <nl> + return std : : forward < R > ( value ( ) ) ; <nl> + } <nl> + } ; <nl> + } / / namespace folly <nl> + <nl> + TEST ( Future , NoThrow ) { <nl> + / / Test that the Futures implementation never invokes c + + throw , by <nl> + / / accessing the value without first checking whether the value exists . <nl> + const std : : string kErrorMessage = " NoThrow test " ; <nl> + / / Test thenValue <nl> + { <nl> + Try < NoThrowTestResult > t = <nl> + Future < NoThrowTestResult > ( std : : runtime_error ( kErrorMessage ) ) <nl> + . thenValue ( [ ] ( NoThrowTestResult & & value ) { <nl> + ADD_FAILURE ( ) < < " This code should be unreachable " ; <nl> + return std : : move ( value ) ; <nl> + } ) <nl> + . getTry ( ) ; <nl> + <nl> + EXPECT_TRUE ( t . hasException ( ) ) ; <nl> + EXPECT_EQ ( t . exception ( ) . get_exception ( ) - > what ( ) , kErrorMessage ) ; <nl> + } <nl> + <nl> + / / Test deferValue <nl> + { <nl> + Try < NoThrowTestResult > t = <nl> + SemiFuture < NoThrowTestResult > ( std : : runtime_error ( kErrorMessage ) ) <nl> + . deferValue ( [ ] ( NoThrowTestResult & & value ) { <nl> + ADD_FAILURE ( ) < < " This code should be unreachable " ; <nl> + return std : : move ( value ) ; <nl> + } ) <nl> + . via ( & InlineExecutor : : instance ( ) ) <nl> + . getTry ( ) ; <nl> + <nl> + EXPECT_TRUE ( t . hasException ( ) ) ; <nl> + EXPECT_EQ ( t . exception ( ) . get_exception ( ) - > what ( ) , kErrorMessage ) ; <nl> + } <nl> + } <nl> + <nl> # if FOLLY_FUTURE_USING_FIBER <nl> <nl> TEST ( Future , BatonWait ) { <nl>
avoid throwing in thenValue
facebook/folly
514a49f56395ba0c5dffbe58559cff525f848503
2019-04-24T22:05:46Z
mmm a / Code / CryPlugins / CryGamePlatform / Module / Steam / SteamPlatform . h <nl> ppp b / Code / CryPlugins / CryGamePlatform / Module / Steam / SteamPlatform . h <nl> namespace Cry <nl> virtual ApplicationIdentifier GetApplicationIdentifier ( ) const override ; <nl> <nl> virtual bool OpenDialog ( EDialog dialog ) const override ; <nl> - virtual bool OpenDialogWithTargetUser ( EUserTargetedDialog dialog , IUser : : Identifier targetUserId ) const ; <nl> + virtual bool OpenDialogWithTargetUser ( EUserTargetedDialog dialog , IUser : : Identifier targetUserId ) const override ; <nl> virtual bool OpenDialog ( const char * szPage ) const override ; <nl> - virtual bool OpenDialogWithTargetUser ( const char * szPage , IUser : : Identifier otherUserId ) const ; <nl> + virtual bool OpenDialogWithTargetUser ( const char * szPage , IUser : : Identifier otherUserId ) const override ; <nl> virtual bool OpenBrowser ( const char * szURL ) const override ; <nl> <nl> virtual bool CanOpenPurchaseOverlay ( ) const override ; <nl>
! XB ( GamePlatform ) Missing override keyword
CRYTEK/CRYENGINE
c6ccf282ba67a5bdcdad19bd55b6d84bdf0317dc
2018-06-08T12:09:11Z
mmm a / tensorflow / compiler / mlir / lite / tests / lower - static - tensor - list . mlir <nl> ppp b / tensorflow / compiler / mlir / lite / tests / lower - static - tensor - list . mlir <nl> func @ tensorlistGetItemWithUnknownRank ( % arg0 : tensor < * xf32 > , % arg1 : tensor < 1xi32 <nl> / / CHECK : return % 0 , % arg0 : tensor < * xf32 > , tensor < * xf32 > <nl> } <nl> <nl> - func @ tensorlistStackWithConstantElementShape ( % arg0 : tensor < 2x3xf32 > ) - > ( tensor < 2x3xf32 > ) { <nl> + func @ tensorlistStackWithConstantElementShape ( % arg0 : tensor < ? x3xf32 > ) - > ( tensor < 2x3xf32 > ) { <nl> % cst = constant dense < 3 > : tensor < 1xi32 > <nl> - % 0 = " tf . TensorListFromTensor " ( % arg0 , % cst ) : ( tensor < 2x3xf32 > , tensor < 1xi32 > ) - > tensor < ! tf . variant < tensor < 3xf32 > > > <nl> + % 0 = " tf . TensorListFromTensor " ( % arg0 , % cst ) : ( tensor < ? x3xf32 > , tensor < 1xi32 > ) - > tensor < ! tf . variant < tensor < 3xf32 > > > <nl> % 1 = " tf . TensorListStack " ( % 0 , % cst ) { num_elements = 2 : i64 } : ( tensor < ! tf . variant < tensor < 3xf32 > > > , tensor < 1xi32 > ) - > tensor < 2x3xf32 > <nl> return % 1 : tensor < 2x3xf32 > <nl> <nl> / / CHECK - LABEL : tensorlistStackWithConstantElementShape <nl> / / CHECK : [ [ ELEM_SHAPE : % cst . * ] ] = constant dense < 3 > : tensor < 1xi32 > <nl> - / / CHECK - NEXT : [ [ SHAPE : % . * ] ] = " tf . Shape " ( % arg0 ) : ( tensor < 2x3xf32 > ) - > tensor < ? xi32 > <nl> - / / CHECK - NEXT : [ [ RESHAPE : % . * ] ] = " tf . Reshape " ( % arg0 , [ [ SHAPE ] ] ) : ( tensor < 2x3xf32 > , tensor < ? xi32 > ) - > tensor < 2x3xf32 > <nl> + / / CHECK - NEXT : [ [ SHAPE : % . * ] ] = " tf . Shape " ( % arg0 ) : ( tensor < ? x3xf32 > ) - > tensor < ? xi32 > <nl> + / / CHECK - NEXT : [ [ RESHAPE : % . * ] ] = " tf . Reshape " ( % arg0 , [ [ SHAPE ] ] ) : ( tensor < ? x3xf32 > , tensor < ? xi32 > ) - > tensor < 2x3xf32 > <nl> / / CHECK - NEXT : return [ [ RESHAPE ] ] : tensor < 2x3xf32 > <nl> } <nl> <nl> mmm a / tensorflow / compiler / mlir / lite / transforms / lower_static_tensor_list . cc <nl> ppp b / tensorflow / compiler / mlir / lite / transforms / lower_static_tensor_list . cc <nl> struct ConvertTensorListStack : public ConversionPattern { <nl> Value * input = operands [ 0 ] ; <nl> Value * element_shape = operands [ 1 ] ; <nl> <nl> - auto shape_type = rewriter . getTensorType ( { - 1 } , rewriter . getIntegerType ( 32 ) ) ; <nl> / / If the ` element_shape ` is a known constant ( which is defined when calling <nl> / / ` tensor_list_stack ` ) and also valid ( not scalar ) , we rewrite this op to a <nl> / / trivial Reshape op ( that doesn ' t actually change the input ' s shape ) and <nl> struct ConvertTensorListStack : public ConversionPattern { <nl> rewriter . replaceOp ( op , { input } , llvm : : None ) ; <nl> return matchSuccess ( ) ; <nl> } <nl> + <nl> + auto shape_type = rewriter . getTensorType ( { - 1 } , rewriter . getIntegerType ( 32 ) ) ; <nl> auto new_shape = rewriter . create < TF : : ShapeOp > ( loc , shape_type , input ) ; <nl> SmallVector < int64_t , 8 > output_shape = { op . num_elements ( ) . getSExtValue ( ) } ; <nl> for ( auto dim : dense_elem_attr . getIntValues ( ) ) <nl>
Minor code style fix for TensorListStack conversion
tensorflow/tensorflow
204a0ce4eba255c8546445da83f9a194d46a2694
2019-09-24T21:14:04Z
mmm a / src / mongo / db / database . cpp <nl> ppp b / src / mongo / db / database . cpp <nl> <nl> # include " pdfile . h " <nl> # include " database . h " <nl> # include " instance . h " <nl> + # include " introspect . h " <nl> # include " clientcursor . h " <nl> # include " databaseholder . h " <nl> <nl> namespace mongo { <nl> checkDuplicateUncasedNames ( true ) ; <nl> / / If already exists , open . Otherwise behave as if empty until <nl> / / there ' s a write , then open . <nl> - if ( ! newDb | | cmdLine . defaultProfile ) { <nl> + if ( ! newDb ) { <nl> namespaceIndex . init ( ) ; <nl> if ( _openAllFiles ) <nl> openAllFiles ( ) ; <nl> namespace mongo { <nl> <nl> verify ( cc ( ) . database ( ) = = this ) ; <nl> <nl> - if ( ! namespaceIndex . details ( profileName . c_str ( ) ) ) { <nl> - log ( ) < < " creating profile collection : " < < profileName < < endl ; <nl> - BSONObjBuilder spec ; <nl> - spec . appendBool ( " capped " , true ) ; <nl> - spec . append ( " size " , 1024 * 1024 ) ; <nl> - if ( ! userCreateNS ( profileName . c_str ( ) , spec . done ( ) , errmsg , false / * we don ' t replica profile messages * / ) ) { <nl> - return false ; <nl> - } <nl> - } <nl> + if ( ! getOrCreateProfileCollection ( this , true ) ) <nl> + return false ; <nl> + <nl> profile = newLevel ; <nl> return true ; <nl> } <nl> mmm a / src / mongo / db / introspect . cpp <nl> ppp b / src / mongo / db / introspect . cpp <nl> namespace mongo { <nl> } <nl> <nl> / / write : not replicated <nl> - NamespaceDetails * d = db - > namespaceIndex . details ( ns ) ; <nl> - if ( d ) { <nl> + / / get or create the profiling collection <nl> + NamespaceDetails * details = getOrCreateProfileCollection ( db ) ; <nl> + if ( details ) { <nl> int len = p . objsize ( ) ; <nl> - Record * r = theDataFileMgr . fast_oplog_insert ( d , ns , len ) ; <nl> + Record * r = theDataFileMgr . fast_oplog_insert ( details , ns , len ) ; <nl> memcpy ( getDur ( ) . writingPtr ( r - > data ( ) , len ) , p . objdata ( ) , len ) ; <nl> } <nl> - else { <nl> - static time_t last ; <nl> + } <nl> + <nl> + NamespaceDetails * getOrCreateProfileCollection ( Database * db , bool force ) { <nl> + fassert ( 16363 , db ) ; <nl> + const char * profileName = db - > profileName . c_str ( ) ; <nl> + NamespaceDetails * details = db - > namespaceIndex . details ( profileName ) ; <nl> + if ( ! details & & ( cmdLine . defaultProfile | | force ) ) { <nl> + / / system . profile namespace doesn ' t exist ; create it <nl> + log ( ) < < " creating profile collection : " < < profileName < < endl ; <nl> + string errmsg ; <nl> + if ( ! userCreateNS ( db - > profileName . c_str ( ) , <nl> + BSON ( " capped " < < true < < " size " < < 1024 * 1024 ) , errmsg , false ) ) { <nl> + log ( ) < < " could not create ns " < < db - > profileName < < " : " < < errmsg < < endl ; <nl> + return NULL ; <nl> + } <nl> + details = db - > namespaceIndex . details ( profileName ) ; <nl> + } <nl> + if ( ! details ) { <nl> + / / failed to get or create profile collection <nl> + static time_t last = time ( 0 ) - 10 ; / / warn the first time <nl> if ( time ( 0 ) > last + 10 ) { <nl> - log ( ) < < " profile : warning ns " < < ns < < " does not exist " < < endl ; <nl> + log ( ) < < " profile : warning ns " < < profileName < < " does not exist " < < endl ; <nl> last = time ( 0 ) ; <nl> } <nl> } <nl> + return details ; <nl> } <nl> <nl> } / / namespace mongo <nl> mmm a / src / mongo / db / introspect . h <nl> ppp b / src / mongo / db / introspect . h <nl> namespace mongo { <nl> <nl> void profile ( const Client & c , CurOp & currentOp ) ; <nl> <nl> + / * * <nl> + * Get ( or create ) the profile collection <nl> + * <nl> + * @ param db Database in which to create the profile collection <nl> + * @ param force Always create the collection if it does not exist <nl> + * @ return NamespaceDetails for the newly created collection , or NULL on error <nl> + * * / <nl> + NamespaceDetails * getOrCreateProfileCollection ( Database * db , bool force = false ) ; <nl> + <nl> } / / namespace mongo <nl>
SERVER - 6222 : lazy init profile collection
mongodb/mongo
1d58eed0e23bb446388205a1df7f3eebef1f29a2
2012-07-02T19:16:13Z
mmm a / Documentation / Books / AQL / Operations / With . md <nl> ppp b / Documentation / Books / AQL / Operations / With . md <nl> However , if client applications specify the list of used collections for all <nl> their queries using * WITH * , then no deadlocks will happen and no queries will <nl> be aborted due to deadlock situations . <nl> <nl> + From ArangoDB 3 . 1 onwards ` WITH ` is required for traversals in a <nl> + clustered environment in order to avoid deadlocks . <nl> + <nl> Note that for queries that access only a single collection or that have all <nl> collection names specified somewhere else in the query string , there is no <nl> need to use * WITH * . * WITH * is only useful when the AQL query parser cannot <nl> mmm a / Documentation / Books / Manual / ReleaseNotes / UpgradingChanges31 . md <nl> ppp b / Documentation / Books / Manual / ReleaseNotes / UpgradingChanges31 . md <nl> The behavior of the AQL array comparison operators has changed for empty arrays : <nl> * ` [ 1 , 2 ] NONE = = 1 ` will return ` false ` <nl> * ` [ 2 , 2 ] NONE = = 1 ` will return ` true ` <nl> <nl> + * ` WITH ` in cluster traversals is now mandatory in order to avoid deadlocks . <nl> + <nl> Data format changes <nl> mmmmmmmmmmmmmmmmmm - <nl> <nl>
documented WITH change
arangodb/arangodb
5c464301b2f0692522e782c776dc597420ec2e84
2017-05-25T21:01:33Z
mmm a / lib / Sema / CSDiag . cpp <nl> ppp b / lib / Sema / CSDiag . cpp <nl> static bool tryRawRepresentableFixIts ( InFlightDiagnostic & diag , <nl> toType , fromType ) <nl> . highlight ( exprRange ) <nl> . fixItInsert ( exprRange . Start , fixItBefore ) <nl> - . fixItInsert ( exprRange . End , fixItAfter ) ; <nl> + . fixItInsertAfter ( exprRange . End , fixItAfter ) ; <nl> } <nl> } ; <nl> <nl> mmm a / test / Sema / enum_raw_representable . swift <nl> ppp b / test / Sema / enum_raw_representable . swift <nl> func rdar32431736 ( ) { <nl> <nl> let myE1 : E = items1 . first <nl> / / expected - error @ - 1 { { cannot convert value of type ' String ? ' to specified type ' E ' } } <nl> - / / expected - note @ - 2 { { construct ' E ' from unwrapped ' String ' value } } { { 17 - 17 = E ( rawValue : } } { { 24 - 24 = ! ) } } <nl> + / / expected - note @ - 2 { { construct ' E ' from unwrapped ' String ' value } } { { 17 - 17 = E ( rawValue : } } { { 29 - 29 = ! ) } } <nl> <nl> let myE2 : E = items2 ? . first <nl> / / expected - error @ - 1 { { cannot convert value of type ' String ? ' to specified type ' E ' } } <nl> - / / expected - note @ - 2 { { construct ' E ' from unwrapped ' String ' value } } { { 17 - 17 = E ( rawValue : ( } } { { 25 - 25 = ) ! ) } } <nl> + / / expected - note @ - 2 { { construct ' E ' from unwrapped ' String ' value } } { { 17 - 17 = E ( rawValue : ( } } { { 30 - 30 = ) ! ) } } <nl> } <nl> <nl> / / rdar : / / problem / 32431165 - improve diagnostic for raw representable argument mismatch <nl>
Merge pull request from xedin / rdar - 32431736 - fix
apple/swift
4d2ace6ff0c18817104557e2a0c92120a9a08bc5
2017-07-19T06:12:37Z
mmm a / dbms / src / Client / Connection . cpp <nl> ppp b / dbms / src / Client / Connection . cpp <nl> void Connection : : receiveHello ( ) <nl> { <nl> readStringBinary ( server_timezone , * in ) ; <nl> } <nl> - if ( server_revision > = DBMS_MIN_REVISION_WITH_SERVER_GROUP_NAME ) <nl> + if ( server_revision > = DBMS_MIN_REVISION_WITH_SERVER_DISPLAY_NAME ) <nl> { <nl> - readStringBinary ( server_group_name , * in ) ; <nl> + readStringBinary ( server_display_name , * in ) ; <nl> } <nl> } <nl> else if ( packet_type = = Protocol : : Server : : Exception ) <nl> const String & Connection : : getServerTimezone ( ) <nl> return server_timezone ; <nl> } <nl> <nl> - const String & Connection : : getServerGroupName ( ) <nl> + const String & Connection : : getServerDisplayName ( ) <nl> { <nl> if ( ! connected ) <nl> connect ( ) ; <nl> <nl> - return server_group_name ; <nl> + return server_display_name ; <nl> } <nl> <nl> void Connection : : forceConnected ( ) <nl> mmm a / dbms / src / Client / Connection . h <nl> ppp b / dbms / src / Client / Connection . h <nl> class Connection : private boost : : noncopyable <nl> void getServerVersion ( String & name , UInt64 & version_major , UInt64 & version_minor , UInt64 & revision ) ; <nl> <nl> const String & getServerTimezone ( ) ; <nl> - const String & getServerGroupName ( ) ; <nl> + const String & getServerDisplayName ( ) ; <nl> <nl> / / / For log and exception messages . <nl> const String & getDescription ( ) const ; <nl> class Connection : private boost : : noncopyable <nl> UInt64 server_version_minor = 0 ; <nl> UInt64 server_revision = 0 ; <nl> String server_timezone ; <nl> - String server_group_name ; <nl> + String server_display_name ; <nl> <nl> std : : unique_ptr < Poco : : Net : : StreamSocket > socket ; <nl> std : : shared_ptr < ReadBuffer > in ; <nl> mmm a / dbms / src / Core / Defines . h <nl> ppp b / dbms / src / Core / Defines . h <nl> <nl> # define DBMS_MIN_REVISION_WITH_QUOTA_KEY_IN_CLIENT_INFO 54060 <nl> # define DBMS_MIN_REVISION_WITH_TABLES_STATUS 54226 <nl> # define DBMS_MIN_REVISION_WITH_TIME_ZONE_PARAMETER_IN_DATETIME_DATA_TYPE 54337 <nl> - # define DBMS_MIN_REVISION_WITH_SERVER_GROUP_NAME 54355 <nl> + # define DBMS_MIN_REVISION_WITH_SERVER_DISPLAY_NAME 54355 <nl> <nl> / / / Version of ClickHouse TCP protocol . Set to git tag with latest protocol change . <nl> # define DBMS_TCP_PROTOCOL_VERSION 54226 <nl> mmm a / dbms / src / Server / Client . cpp <nl> ppp b / dbms / src / Server / Client . cpp <nl> class Client : public Poco : : Util : : Application <nl> / / / If the last query resulted in exception . <nl> bool got_exception = false ; <nl> String server_version ; <nl> - String server_group_name ; <nl> + String server_display_name ; <nl> <nl> Stopwatch watch ; <nl> <nl> class Client : public Poco : : Util : : Application <nl> connection - > getServerVersion ( server_name , server_version_major , server_version_minor , server_revision ) ; <nl> <nl> server_version = toString ( server_version_major ) + " . " + toString ( server_version_minor ) + " . " + toString ( server_revision ) ; <nl> - server_group_name = connection - > getServerGroupName ( ) ; <nl> + server_display_name = connection - > getServerDisplayName ( ) ; <nl> if ( is_interactive ) <nl> { <nl> std : : cout < < " Connected to " < < server_name <nl> class Client : public Poco : : Util : : Application <nl> { <nl> String query ; <nl> String prev_query ; <nl> - String prompt = server_group_name . length ( ) ? " [ " + server_group_name + " ] : ) " : " : ) " ; <nl> + String prompt = server_display_name . length ( ) ? " [ " + server_display_name + " ] : ) " : " : ) " ; <nl> while ( char * line_ = readline ( query . empty ( ) ? prompt . c_str ( ) : " : - ] " ) ) <nl> { <nl> String line = line_ ; <nl> mmm a / dbms / src / Server / TCPHandler . cpp <nl> ppp b / dbms / src / Server / TCPHandler . cpp <nl> <nl> # include " TCPHandler . h " <nl> <nl> # include < Common / NetException . h > <nl> + # include < Common / getFQDNOrHostName . h > <nl> <nl> <nl> namespace DB <nl> void TCPHandler : : sendHello ( ) <nl> { <nl> writeStringBinary ( DateLUT : : instance ( ) . getTimeZone ( ) , * out ) ; <nl> } <nl> - if ( client_revision > = DBMS_MIN_REVISION_WITH_SERVER_GROUP_NAME ) <nl> + if ( client_revision > = DBMS_MIN_REVISION_WITH_SERVER_DISPLAY_NAME ) <nl> { <nl> - writeStringBinary ( server . config ( ) . getString ( " group_name " ) , * out ) ; <nl> + writeStringBinary ( server . config ( ) . getString ( " display_name " , getFQDNOrHostName ( ) ) , * out ) ; <nl> } <nl> out - > next ( ) ; <nl> } <nl> mmm a / dbms / src / Server / config . xml <nl> ppp b / dbms / src / Server / config . xml <nl> <nl> < count > 10 < / count > <nl> < ! - - < console > 1 < / console > - - > < ! - - Default behavior is autodetection ( log to console if not daemon mode and is tty ) - - > <nl> < / logger > <nl> - < group_name > staging < / group_name > <nl> + < ! - - display_name > production < / display_name - - > < ! - - It is the name that will be shown in the client - - > <nl> < http_port > 8123 < / http_port > <nl> < tcp_port > 9000 < / tcp_port > <nl> <nl>
Rename group_name to display_name
ClickHouse/ClickHouse
d3b6aafa097c3c772531ea30b07bd9348bf43856
2018-02-26T06:49:17Z
mmm a / hphp / idl / network . idl . php <nl> ppp b / hphp / idl / network . idl . php <nl> <nl> ) , <nl> ) ) ; <nl> <nl> + DefineFunction ( <nl> + array ( <nl> + ' name ' = > " http_response_code " , <nl> + ' desc ' = > " Get and / or Set the HTTP response code . " , <nl> + ' return ' = > array ( <nl> + ' type ' = > Variant , <nl> + ' desc ' = > " The current response code . " , <nl> + ) , <nl> + ' args ' = > array ( <nl> + array ( <nl> + ' name ' = > " response_code " , <nl> + ' type ' = > Int32 , <nl> + ' value ' = > " 0 " , <nl> + ' desc ' = > " New response code to set . " , <nl> + ) , <nl> + ) , <nl> + ) <nl> + ) ; <nl> + <nl> DefineFunction ( <nl> array ( <nl> ' name ' = > " headers_list " , <nl> mmm a / hphp / runtime / ext / ext_network . cpp <nl> ppp b / hphp / runtime / ext / ext_network . cpp <nl> void f_header ( CStrRef str , bool replace / * = true * / , <nl> } <nl> } <nl> <nl> + Variant f_http_response_code ( int response_code / * = 0 * / ) { <nl> + Transport * transport = g_context - > getTransport ( ) ; <nl> + if ( ! transport ) { <nl> + raise_warning ( " Unable to access response code , no transport " ) ; <nl> + return false ; <nl> + } <nl> + <nl> + int old_code = transport - > getResponseCode ( ) ; <nl> + if ( response_code ) { <nl> + transport - > setResponse ( response_code , " explicit_header_response_code " ) ; <nl> + } <nl> + <nl> + if ( old_code ) { <nl> + return old_code ; <nl> + } <nl> + <nl> + return response_code ? true : false ; <nl> + } <nl> + <nl> Array f_headers_list ( ) { <nl> Transport * transport = g_context - > getTransport ( ) ; <nl> if ( transport ) { <nl> mmm a / hphp / runtime / ext / ext_network . ext_hhvm . cpp <nl> ppp b / hphp / runtime / ext / ext_network . ext_hhvm . cpp <nl> TypedValue * fg_header ( HPHP : : VM : : ActRec * ar ) { <nl> <nl> <nl> <nl> + / * <nl> + HPHP : : Variant HPHP : : f_http_response_code ( int ) <nl> + _ZN4HPHP20f_http_response_codeEi <nl> + <nl> + ( return value ) = > rax <nl> + _rv = > rdi <nl> + response_code = > rsi <nl> + * / <nl> + <nl> + TypedValue * fh_http_response_code ( TypedValue * _rv , int response_code ) asm ( " _ZN4HPHP20f_http_response_codeEi " ) ; <nl> + <nl> + TypedValue * fg1_http_response_code ( TypedValue * rv , HPHP : : VM : : ActRec * ar , int64_t count ) __attribute__ ( ( noinline , cold ) ) ; <nl> + TypedValue * fg1_http_response_code ( TypedValue * rv , HPHP : : VM : : ActRec * ar , int64_t count ) { <nl> + TypedValue * args UNUSED = ( ( TypedValue * ) ar ) - 1 ; <nl> + tvCastToInt64InPlace ( args - 0 ) ; <nl> + fh_http_response_code ( ( rv ) , ( count > 0 ) ? ( int ) ( args [ - 0 ] . m_data . num ) : ( int ) ( 0 ) ) ; <nl> + if ( rv - > m_type = = KindOfUninit ) rv - > m_type = KindOfNull ; <nl> + return rv ; <nl> + } <nl> + <nl> + TypedValue * fg_http_response_code ( HPHP : : VM : : ActRec * ar ) { <nl> + TypedValue rv ; <nl> + int64_t count = ar - > numArgs ( ) ; <nl> + TypedValue * args UNUSED = ( ( TypedValue * ) ar ) - 1 ; <nl> + if ( count < = 1LL ) { <nl> + if ( ( count < = 0 | | ( args - 0 ) - > m_type = = KindOfInt64 ) ) { <nl> + fh_http_response_code ( ( & ( rv ) ) , ( count > 0 ) ? ( int ) ( args [ - 0 ] . m_data . num ) : ( int ) ( 0 ) ) ; <nl> + if ( rv . m_type = = KindOfUninit ) rv . m_type = KindOfNull ; <nl> + frame_free_locals_no_this_inl ( ar , 1 ) ; <nl> + memcpy ( & ar - > m_r , & rv , sizeof ( TypedValue ) ) ; <nl> + return & ar - > m_r ; <nl> + } else { <nl> + fg1_http_response_code ( & rv , ar , count ) ; <nl> + frame_free_locals_no_this_inl ( ar , 1 ) ; <nl> + memcpy ( & ar - > m_r , & rv , sizeof ( TypedValue ) ) ; <nl> + return & ar - > m_r ; <nl> + } <nl> + } else { <nl> + throw_toomany_arguments_nr ( " http_response_code " , 1 , 1 ) ; <nl> + } <nl> + rv . m_data . num = 0LL ; <nl> + rv . m_type = KindOfNull ; <nl> + frame_free_locals_no_this_inl ( ar , 1 ) ; <nl> + memcpy ( & ar - > m_r , & rv , sizeof ( TypedValue ) ) ; <nl> + return & ar - > m_r ; <nl> + return & ar - > m_r ; <nl> + } <nl> + <nl> + <nl> + <nl> / * <nl> HPHP : : Array HPHP : : f_headers_list ( ) <nl> _ZN4HPHP14f_headers_listEv <nl> mmm a / hphp / runtime / ext / ext_network . ext_hhvm . h <nl> ppp b / hphp / runtime / ext / ext_network . ext_hhvm . h <nl> http_response_code = > rdx <nl> <nl> void fh_header ( Value * str , bool replace , int http_response_code ) asm ( " _ZN4HPHP8f_headerERKNS_6StringEbi " ) ; <nl> <nl> + / * <nl> + HPHP : : Variant HPHP : : f_http_response_code ( int ) <nl> + _ZN4HPHP20f_http_response_codeEi <nl> + <nl> + ( return value ) = > rax <nl> + _rv = > rdi <nl> + response_code = > rsi <nl> + * / <nl> + <nl> + TypedValue * fh_http_response_code ( TypedValue * _rv , int response_code ) asm ( " _ZN4HPHP20f_http_response_codeEi " ) ; <nl> + <nl> / * <nl> HPHP : : Array HPHP : : f_headers_list ( ) <nl> _ZN4HPHP14f_headers_listEv <nl> mmm a / hphp / runtime / ext / ext_network . h <nl> ppp b / hphp / runtime / ext / ext_network . h <nl> inline bool f_socket_set_timeout ( CObjRef stream , int seconds , <nl> <nl> void f_header ( CStrRef str , bool replace = true , int http_response_code = 0 ) ; <nl> <nl> + Variant f_http_response_code ( int response_code = 0 ) ; <nl> + <nl> Array f_headers_list ( ) ; <nl> <nl> bool f_headers_sent ( VRefParam file = null , VRefParam line = null ) ; <nl> mmm a / hphp / runtime / ext_hhvm / ext_hhvm_infotabs . cpp <nl> ppp b / hphp / runtime / ext_hhvm / ext_hhvm_infotabs . cpp <nl> TypedValue * fg_socket_get_status ( VM : : ActRec * ar ) ; <nl> TypedValue * fg_socket_set_blocking ( VM : : ActRec * ar ) ; <nl> TypedValue * fg_socket_set_timeout ( VM : : ActRec * ar ) ; <nl> TypedValue * fg_header ( VM : : ActRec * ar ) ; <nl> + TypedValue * fg_http_response_code ( VM : : ActRec * ar ) ; <nl> TypedValue * fg_headers_list ( VM : : ActRec * ar ) ; <nl> TypedValue * fg_get_http_request_size ( VM : : ActRec * ar ) ; <nl> TypedValue * fg_headers_sent ( VM : : ActRec * ar ) ; <nl> TypedValue * tg_9XMLWriter_endDTD ( VM : : ActRec * ar ) ; <nl> TypedValue * tg_9XMLWriter_flush ( VM : : ActRec * ar ) ; <nl> TypedValue * tg_9XMLWriter_outputMemory ( VM : : ActRec * ar ) ; <nl> <nl> - const long long hhbc_ext_funcs_count = 2207 ; <nl> + const long long hhbc_ext_funcs_count = 2208 ; <nl> const HhbcExtFuncInfo hhbc_ext_funcs [ ] = { <nl> { " apache_note " , fg_apache_note , ( void * ) & fh_apache_note } , <nl> { " apache_request_headers " , fg_apache_request_headers , ( void * ) & fh_apache_request_headers } , <nl> const HhbcExtFuncInfo hhbc_ext_funcs [ ] = { <nl> { " socket_set_blocking " , fg_socket_set_blocking , ( void * ) & fh_socket_set_blocking } , <nl> { " socket_set_timeout " , fg_socket_set_timeout , ( void * ) & fh_socket_set_timeout } , <nl> { " header " , fg_header , ( void * ) & fh_header } , <nl> + { " http_response_code " , fg_http_response_code , ( void * ) & fh_http_response_code } , <nl> { " headers_list " , fg_headers_list , ( void * ) & fh_headers_list } , <nl> { " get_http_request_size " , fg_get_http_request_size , ( void * ) & fh_get_http_request_size } , <nl> { " headers_sent " , fg_headers_sent , ( void * ) & fh_headers_sent } , <nl> mmm a / hphp / system / class_map . cpp <nl> ppp b / hphp / system / class_map . cpp <nl> const char * g_class_map [ ] = { <nl> NULL , <nl> NULL , <nl> NULL , <nl> + ( const char * ) 0x10006040 , " http_response_code " , " " , ( const char * ) 0 , ( const char * ) 0 , <nl> + " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . http - response - code . php \ n * ) \ n * \ n * Get and / or Set the HTTP response code . \ n * \ n * @ response_code \ n * int New response code to set . \ n * \ n * @ return mixed The current response code . \ n * / " , <nl> + ( const char * ) 0xffffffff , ( const char * ) 0x2000 , " response_code " , " " , ( const char * ) 0xa , " i : 0 ; " , " 0 " , NULL , <nl> + NULL , <nl> + NULL , <nl> + NULL , <nl> ( const char * ) 0x10006040 , " headers_list " , " " , ( const char * ) 0 , ( const char * ) 0 , <nl> " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . headers - list . php ) \ n * \ n * headers_list ( ) will return a list of headers to be sent to the browser \ n * / client . To determine whether or not these headers have been sent yet , \ n * use headers_sent ( ) . \ n * \ n * @ return vector Returns a numerically indexed array of headers . \ n * / " , <nl> ( const char * ) 0x20 , NULL , <nl> mmm a / hphp / system / network . inc <nl> ppp b / hphp / system / network . inc <nl> <nl> <nl> # if EXT_TYPE = = 0 <nl> " gethostname " , T ( Variant ) , S ( 0 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . gethostname . php ) \ n * \ n * Gets the standard host name for the local machine . \ n * \ n * @ return mixed Returns a string with the hostname on success , \ n * otherwise FALSE is returned . \ n * / " , <nl> - " gethostbyaddr " , T ( Variant ) , S ( 0 ) , " ip_address " , T ( String ) , NULL , S ( 0 ) , NULL , S ( 0 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . gethostbyaddr . php ) \ n * \ n * Returns the host name of the Internet host specified by ip_address . \ n * \ n * @ ip_address string The host IP address . \ n * \ n * @ return mixed Returns the host name or the unmodified ip_address \ n * on failure . \ n * / " , <nl> - " gethostbyname " , T ( String ) , S ( 0 ) , " hostname " , T ( String ) , NULL , S ( 0 ) , NULL , S ( 0 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . gethostbyname . php ) \ n * \ n * Returns the IPv4 address of the Internet host specified by hostname . \ n * \ n * @ hostname string The host name . \ n * \ n * @ return string Returns the IPv4 address or a string containing the \ n * unmodified hostname on failure . \ n * / " , <nl> - " gethostbynamel " , T ( Variant ) , S ( 0 ) , " hostname " , T ( String ) , NULL , S ( 0 ) , NULL , S ( 0 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . gethostbynamel . php ) \ n * \ n * Returns a list of IPv4 addresses to which the Internet host specified \ n * by hostname resolves . \ n * \ n * @ hostname string The host name . \ n * \ n * @ return mixed Returns an array of IPv4 addresses or FALSE if \ n * hostname could not be resolved . \ n * / " , <nl> - " getprotobyname " , T ( Variant ) , S ( 0 ) , " name " , T ( String ) , NULL , S ( 0 ) , NULL , S ( 0 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . getprotobyname . php ) \ n * \ n * getprotobyname ( ) returns the protocol number associated with the \ n * protocol name as per / etc / protocols . \ n * \ n * @ name string The protocol name . \ n * \ n * @ return mixed Returns the protocol number or - 1 if the protocol is \ n * not found . \ n * / " , <nl> - " getprotobynumber " , T ( Variant ) , S ( 0 ) , " number " , T ( Int32 ) , NULL , S ( 0 ) , NULL , S ( 0 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . getprotobynumber . php ) \ n * \ n * getprotobynumber ( ) returns the protocol name associated with protocol \ n * number as per / etc / protocols . \ n * \ n * @ number int The protocol number . \ n * \ n * @ return mixed Returns the protocol name as a string , or FALSE on \ n * failure . \ n * / " , <nl> - " getservbyname " , T ( Variant ) , S ( 0 ) , " service " , T ( String ) , NULL , S ( 0 ) , NULL , S ( 0 ) , " protocol " , T ( String ) , NULL , S ( 0 ) , NULL , S ( 0 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . getservbyname . php ) \ n * \ n * getservbyname ( ) returns the Internet port which corresponds to service \ n * for the specified protocol as per / etc / services . \ n * \ n * @ service string The Internet service name , as a string . \ n * @ protocol string protocol is either \ " tcp \ " or \ " udp \ " ( in lowercase ) . \ n * \ n * @ return mixed Returns the port number , or FALSE if service or \ n * protocol is not found . \ n * / " , <nl> - " getservbyport " , T ( Variant ) , S ( 0 ) , " port " , T ( Int32 ) , NULL , S ( 0 ) , NULL , S ( 0 ) , " protocol " , T ( String ) , NULL , S ( 0 ) , NULL , S ( 0 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . getservbyport . php ) \ n * \ n * getservbyport ( ) returns the Internet service associated with port for \ n * the specified protocol as per / etc / services . \ n * \ n * @ port int The port number . \ n * @ protocol string protocol is either \ " tcp \ " or \ " udp \ " ( in lowercase ) . \ n * \ n * @ return mixed Returns the Internet service name as a string . \ n * / " , <nl> - " inet_ntop " , T ( Variant ) , S ( 0 ) , " in_addr " , T ( String ) , NULL , S ( 0 ) , NULL , S ( 0 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . inet - ntop . php ) \ n * \ n * \ n * @ in_addr string A 32bit IPv4 , or 128bit IPv6 address . \ n * \ n * @ return mixed Returns a string representation of the address or \ n * FALSE on failure . \ n * / " , <nl> - " inet_pton " , T ( Variant ) , S ( 0 ) , " address " , T ( String ) , NULL , S ( 0 ) , NULL , S ( 0 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . inet - pton . php ) \ n * \ n * This function converts a human readable IPv4 or IPv6 address ( if PHP \ n * was built with IPv6 support enabled ) into an address family appropriate \ n * 32bit or 128bit binary structure . \ n * \ n * @ address string A human readable IPv4 or IPv6 address . \ n * \ n * @ return mixed Returns the in_addr representation of the given \ n * address \ n * / " , <nl> - " ip2long " , T ( Variant ) , S ( 0 ) , " ip_address " , T ( String ) , NULL , S ( 0 ) , NULL , S ( 0 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . ip2long . php ) \ n * \ n * The function ip2long ( ) generates an IPv4 Internet network address from \ n * its Internet standard format ( dotted string ) representation . \ n * \ n * ip2long ( ) will also work with non - complete IP addresses . Read \ 273 \ n * http : / / publibn . boulder . ibm . com / doc_link / en_US / a_doc_lib / libs / commtrf2 / inet_addr . htm \ n * for more info . \ n * \ n * @ ip_address string A standard format address . \ n * \ n * @ return mixed Returns the IPv4 address or FALSE if ip_address is \ n * invalid . \ n * / " , <nl> - " long2ip " , T ( String ) , S ( 0 ) , " proper_address " , T ( Int32 ) , NULL , S ( 0 ) , NULL , S ( 0 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . long2ip . php ) \ n * \ n * The function long2ip ( ) generates an Internet address in dotted format \ n * ( i . e . : aaa . bbb . ccc . ddd ) from the proper address representation . \ n * \ n * @ proper_address \ n * int A proper address representation . \ n * \ n * @ return string Returns the Internet IP address as a string . \ n * / " , <nl> - " dns_check_record " , T ( Boolean ) , S ( 0 ) , " host " , T ( String ) , NULL , S ( 0 ) , NULL , S ( 0 ) , " type " , T ( String ) , " N ; " , S ( 2 ) , " null " , S ( 0 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . dns - check - record . php ) \ n * \ n * \ n * @ host string \ n * @ type string \ n * \ n * @ return bool \ n * / " , <nl> - " checkdnsrr " , T ( Boolean ) , S ( 0 ) , " host " , T ( String ) , NULL , S ( 0 ) , NULL , S ( 0 ) , " type " , T ( String ) , " N ; " , S ( 2 ) , " null " , S ( 0 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . checkdnsrr . php ) \ n * \ n * Searches DNS for records of type type corresponding to host . \ n * \ n * @ host string host may either be the IP address in dotted - quad \ n * notation or the host name . \ n * @ type string type may be any one of : A , MX , NS , SOA , PTR , CNAME , \ n * AAAA , A6 , SRV , NAPTR , TXT or ANY . \ n * \ n * @ return bool Returns TRUE if any records are found ; returns FALSE \ n * if no records were found or if an error occurred . \ n * / " , <nl> - " dns_get_record " , T ( Variant ) , S ( 0 ) , " hostname " , T ( String ) , NULL , S ( 0 ) , NULL , S ( 0 ) , " type " , T ( Int32 ) , " i : - 1 ; " , S ( 5 ) , " - 1 " , S ( 0 ) , " authns " , T ( Variant ) , " N ; " , S ( 2 ) , " null " , S ( 1 ) , " addtl " , T ( Variant ) , " N ; " , S ( 2 ) , " null " , S ( 1 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . dns - get - record . php ) \ n * \ n * Fetch DNS Resource Records associated with the given hostname . \ n * \ n * @ hostname string hostname should be a valid DNS hostname such as \ n * \ " www . example . com \ " . Reverse lookups can be generated \ n * using in - addr . arpa notation , but gethostbyaddr ( ) is \ n * more suitable for the majority of reverse lookups . \ n * \ n * Per DNS standards , email addresses are given in \ n * user . host format ( for example : \ n * hostmaster . example . com as opposed to \ n * hostmaster @ example . com ) , be sure to check this value \ n * and modify if necessary before using it with a \ n * functions such as mail ( ) . \ n * @ type int By default , dns_get_record ( ) will search for any \ n * resource records associated with hostname . To limit \ n * the query , specify the optional type parameter . May \ n * be any one of the following : DNS_A , DNS_CNAME , \ n * DNS_HINFO , DNS_MX , DNS_NS , DNS_PTR , DNS_SOA , \ n * DNS_TXT , DNS_AAAA , DNS_SRV , DNS_NAPTR , DNS_A6 , \ n * DNS_ALL or DNS_ANY . \ n * \ n * Because of eccentricities in the performance of \ n * libresolv between platforms , DNS_ANY will not always \ n * return every record , the slower DNS_ALL will collect \ n * all records more reliably . \ n * @ authns mixed Passed by reference and , if given , will be populated \ n * with Resource Records for the Authoritative Name \ n * Servers . \ n * @ addtl mixed Passed by reference and , if given , will be populated \ n * with any Additional Records . \ n * \ n * @ return mixed \ n * / " , <nl> - " dns_get_mx " , T ( Boolean ) , S ( 0 ) , " hostname " , T ( String ) , NULL , S ( 0 ) , NULL , S ( 0 ) , " mxhosts " , T ( Variant ) , NULL , S ( 0 ) , NULL , S ( 1 ) , " weights " , T ( Variant ) , " N ; " , S ( 2 ) , " null " , S ( 1 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . dns - get - mx . php ) \ n * \ n * \ n * @ hostname string \ n * @ mxhosts mixed \ n * @ weights mixed \ n * \ n * @ return bool \ n * / " , <nl> - " getmxrr " , T ( Boolean ) , S ( 0 ) , " hostname " , T ( String ) , NULL , S ( 0 ) , NULL , S ( 0 ) , " mxhosts " , T ( Variant ) , NULL , S ( 0 ) , NULL , S ( 1 ) , " weight " , T ( Variant ) , " N ; " , S ( 2 ) , " null " , S ( 1 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . getmxrr . php ) \ n * \ n * Searches DNS for MX records corresponding to hostname . \ n * \ n * @ hostname string The Internet host name . \ n * @ mxhosts mixed A list of the MX records found is placed into the \ n * array mxhosts . \ n * @ weight mixed If the weight array is given , it will be filled with \ n * the weight information gathered . \ n * \ n * @ return bool Returns TRUE if any records are found ; returns FALSE \ n * if no records were found or if an error occurred . \ n * / " , <nl> - " fsockopen " , T ( Variant ) , S ( 0 ) , " hostname " , T ( String ) , NULL , S ( 0 ) , NULL , S ( 0 ) , " port " , T ( Int32 ) , " i : - 1 ; " , S ( 5 ) , " - 1 " , S ( 0 ) , " errnum " , T ( Variant ) , " N ; " , S ( 2 ) , " null " , S ( 1 ) , " errstr " , T ( Variant ) , " N ; " , S ( 2 ) , " null " , S ( 1 ) , " timeout " , T ( Double ) , " d : 0 ; " , S ( 4 ) , " 0 . 0 " , S ( 0 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . fsockopen . php ) \ n * \ n * Initiates a socket connection to the resource specified by hostname . \ n * \ n * PHP supports targets in the Internet and Unix domains as described in \ n * List of Supported Socket Transports . A list of supported transports can \ n * also be retrieved using stream_get_transports ( ) . \ n * \ n * The socket will by default be opened in blocking mode . You can switch \ n * it to non - blocking mode by using stream_set_blocking ( ) . \ n * \ n * @ hostname string If you have compiled in OpenSSL support , you may \ n * prefix the hostname with either ssl : / / or tls : / / to \ n * use an SSL or TLS client connection over TCP / IP to \ n * connect to the remote host . \ n * @ port int The port number . \ n * @ errnum mixed If provided , holds the system level error number \ n * that occurred in the system - level connect ( ) call . \ n * \ n * If the value returned in errno is 0 and the \ n * function returned FALSE , it is an indication that \ n * the error occurred before the connect ( ) call . This \ n * is most likely due to a problem initializing the \ n * socket . \ n * @ errstr mixed The error message as a string . \ n * @ timeout float The connection timeout , in seconds . \ n * \ n * If you need to set a timeout for reading / writing \ n * data over the socket , use stream_set_timeout ( ) , as \ n * the timeout parameter to fsockopen ( ) only applies \ n * while connecting the socket . \ n * \ n * @ return mixed fsockopen ( ) returns a file pointer which may be used \ n * together with the other file functions ( such as \ n * fgets ( ) , fgetss ( ) , fwrite ( ) , fclose ( ) , and feof ( ) ) . \ n * If the call fails , it will return FALSE \ n * / " , <nl> - " pfsockopen " , T ( Variant ) , S ( 0 ) , " hostname " , T ( String ) , NULL , S ( 0 ) , NULL , S ( 0 ) , " port " , T ( Int32 ) , " i : - 1 ; " , S ( 5 ) , " - 1 " , S ( 0 ) , " errnum " , T ( Variant ) , " N ; " , S ( 2 ) , " null " , S ( 1 ) , " errstr " , T ( Variant ) , " N ; " , S ( 2 ) , " null " , S ( 1 ) , " timeout " , T ( Double ) , " d : 0 ; " , S ( 4 ) , " 0 . 0 " , S ( 0 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . pfsockopen . php ) \ n * \ n * This function behaves exactly as fsockopen ( ) with the difference that \ n * the connection is not closed after the script finishes . It is the \ n * persistent version of fsockopen ( ) . \ n * For parameter information , see the fsockopen ( ) documentation . \ n * \ n * @ hostname string \ n * @ port int \ n * @ errnum mixed \ n * @ errstr mixed \ n * @ timeout float \ n * \ n * @ return mixed \ n * / " , <nl> - " socket_get_status " , T ( Variant ) , S ( 0 ) , " stream " , T ( Object ) , NULL , S ( 0 ) , NULL , S ( 0 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . socket - get - status . php ) \ n * \ n * \ n * @ stream resource \ n * \ n * \ n * @ return mixed \ n * / " , <nl> - " socket_set_blocking " , T ( Boolean ) , S ( 0 ) , " stream " , T ( Object ) , NULL , S ( 0 ) , NULL , S ( 0 ) , " mode " , T ( Int32 ) , NULL , S ( 0 ) , NULL , S ( 0 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . socket - set - blocking . php \ n * ) \ n * \ n * \ n * @ stream resource \ n * \ n * @ mode int \ n * \ n * @ return bool \ n * / " , <nl> - " socket_set_timeout " , T ( Boolean ) , S ( 0 ) , " stream " , T ( Object ) , NULL , S ( 0 ) , NULL , S ( 0 ) , " seconds " , T ( Int32 ) , NULL , S ( 0 ) , NULL , S ( 0 ) , " microseconds " , T ( Int32 ) , " i : 0 ; " , S ( 4 ) , " 0 " , S ( 0 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . socket - set - timeout . php \ n * ) \ n * \ n * \ n * @ stream resource \ n * \ n * @ seconds int \ n * @ microseconds \ n * int \ n * \ n * @ return bool \ n * / " , <nl> - " header " , T ( Void ) , S ( 0 ) , " str " , T ( String ) , NULL , S ( 0 ) , NULL , S ( 0 ) , " replace " , T ( Boolean ) , " b : 1 ; " , S ( 4 ) , " true " , S ( 0 ) , " http_response_code " , T ( Int32 ) , " i : 0 ; " , S ( 4 ) , " 0 " , S ( 0 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . header . php ) \ n * \ n * header ( ) is used to send a raw HTTP header . See the \ 273 HTTP / 1 . 1 \ n * specification for more information on HTTP headers . \ n * \ n * Remember that header ( ) must be called before any actual output is sent , \ n * either by normal HTML tags , blank lines in a file , or from PHP . It is a \ n * very common error to read code with include ( ) , or require ( ) , functions , \ n * or another file access function , and have spaces or empty lines that are \ n * output before header ( ) is called . The same problem exists when using a \ n * single PHP / HTML file . \ n * \ n * @ str string The header string . \ n * \ n * There are two special - case header calls . The first \ n * is a header that starts with the string \ " HTTP / \ " \ n * ( case is not significant ) , which will be used to \ n * figure out the HTTP status code to send . For \ n * example , if you have configured Apache to use a PHP \ n * script to handle requests for missing files ( using \ n * the ErrorDocument directive ) , you may want to make \ n * sure that your script generates the proper status \ n * code . \ n * \ n * \ n * \ n * \ n * \ n * The second special case is the \ " Location : \ " header . \ n * Not only does it send this header back to the \ n * browser , but it also returns a REDIRECT ( 302 ) status \ n * code to the browser unless the 201 or a 3xx status \ n * code has already been set . \ n * @ replace bool The optional replace parameter indicates whether the \ n * header should replace a previous similar header , or \ n * add a second header of the same type . By default it \ n * will replace , but if you pass in FALSE as the second \ n * argument you can force multiple headers of the same \ n * type . For example : \ n * @ http_response_code \ n * int Forces the HTTP response code to the specified \ n * value . \ n * \ n * @ return mixed No value is returned . \ n * / " , <nl> + " gethostbyaddr " , T ( Variant ) , S ( 0 ) , " ip_address " , T ( String ) , NULL , S ( 0 ) , NULL , S ( 0 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . gethostbyaddr . php ) \ n * \ n * Returns the host name of the Internet host specified by ip_address . \ n * \ n * @ ip_address string The host IP address . \ n * \ n * @ return mixed Returns the host name or the unmodified ip_address \ n * on failure . \ n * / " , <nl> + " gethostbyname " , T ( String ) , S ( 0 ) , " hostname " , T ( String ) , NULL , S ( 0 ) , NULL , S ( 0 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . gethostbyname . php ) \ n * \ n * Returns the IPv4 address of the Internet host specified by hostname . \ n * \ n * @ hostname string The host name . \ n * \ n * @ return string Returns the IPv4 address or a string containing the \ n * unmodified hostname on failure . \ n * / " , <nl> + " gethostbynamel " , T ( Variant ) , S ( 0 ) , " hostname " , T ( String ) , NULL , S ( 0 ) , NULL , S ( 0 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . gethostbynamel . php ) \ n * \ n * Returns a list of IPv4 addresses to which the Internet host specified \ n * by hostname resolves . \ n * \ n * @ hostname string The host name . \ n * \ n * @ return mixed Returns an array of IPv4 addresses or FALSE if \ n * hostname could not be resolved . \ n * / " , <nl> + " getprotobyname " , T ( Variant ) , S ( 0 ) , " name " , T ( String ) , NULL , S ( 0 ) , NULL , S ( 0 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . getprotobyname . php ) \ n * \ n * getprotobyname ( ) returns the protocol number associated with the \ n * protocol name as per / etc / protocols . \ n * \ n * @ name string The protocol name . \ n * \ n * @ return mixed Returns the protocol number or - 1 if the protocol is \ n * not found . \ n * / " , <nl> + " getprotobynumber " , T ( Variant ) , S ( 0 ) , " number " , T ( Int32 ) , NULL , S ( 0 ) , NULL , S ( 0 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . getprotobynumber . php ) \ n * \ n * getprotobynumber ( ) returns the protocol name associated with protocol \ n * number as per / etc / protocols . \ n * \ n * @ number int The protocol number . \ n * \ n * @ return mixed Returns the protocol name as a string , or FALSE on \ n * failure . \ n * / " , <nl> + " getservbyname " , T ( Variant ) , S ( 0 ) , " service " , T ( String ) , NULL , S ( 0 ) , NULL , S ( 0 ) , " protocol " , T ( String ) , NULL , S ( 0 ) , NULL , S ( 0 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . getservbyname . php ) \ n * \ n * getservbyname ( ) returns the Internet port which corresponds to service \ n * for the specified protocol as per / etc / services . \ n * \ n * @ service string The Internet service name , as a string . \ n * @ protocol string protocol is either \ " tcp \ " or \ " udp \ " ( in lowercase ) . \ n * \ n * @ return mixed Returns the port number , or FALSE if service or \ n * protocol is not found . \ n * / " , <nl> + " getservbyport " , T ( Variant ) , S ( 0 ) , " port " , T ( Int32 ) , NULL , S ( 0 ) , NULL , S ( 0 ) , " protocol " , T ( String ) , NULL , S ( 0 ) , NULL , S ( 0 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . getservbyport . php ) \ n * \ n * getservbyport ( ) returns the Internet service associated with port for \ n * the specified protocol as per / etc / services . \ n * \ n * @ port int The port number . \ n * @ protocol string protocol is either \ " tcp \ " or \ " udp \ " ( in lowercase ) . \ n * \ n * @ return mixed Returns the Internet service name as a string . \ n * / " , <nl> + " inet_ntop " , T ( Variant ) , S ( 0 ) , " in_addr " , T ( String ) , NULL , S ( 0 ) , NULL , S ( 0 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . inet - ntop . php ) \ n * \ n * \ n * @ in_addr string A 32bit IPv4 , or 128bit IPv6 address . \ n * \ n * @ return mixed Returns a string representation of the address or \ n * FALSE on failure . \ n * / " , <nl> + " inet_pton " , T ( Variant ) , S ( 0 ) , " address " , T ( String ) , NULL , S ( 0 ) , NULL , S ( 0 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . inet - pton . php ) \ n * \ n * This function converts a human readable IPv4 or IPv6 address ( if PHP \ n * was built with IPv6 support enabled ) into an address family appropriate \ n * 32bit or 128bit binary structure . \ n * \ n * @ address string A human readable IPv4 or IPv6 address . \ n * \ n * @ return mixed Returns the in_addr representation of the given \ n * address \ n * / " , <nl> + " ip2long " , T ( Variant ) , S ( 0 ) , " ip_address " , T ( String ) , NULL , S ( 0 ) , NULL , S ( 0 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . ip2long . php ) \ n * \ n * The function ip2long ( ) generates an IPv4 Internet network address from \ n * its Internet standard format ( dotted string ) representation . \ n * \ n * ip2long ( ) will also work with non - complete IP addresses . Read \ 273 \ n * http : / / publibn . boulder . ibm . com / doc_link / en_US / a_doc_lib / libs / commtrf2 / inet_addr . htm \ n * for more info . \ n * \ n * @ ip_address string A standard format address . \ n * \ n * @ return mixed Returns the IPv4 address or FALSE if ip_address is \ n * invalid . \ n * / " , <nl> + " long2ip " , T ( String ) , S ( 0 ) , " proper_address " , T ( Int32 ) , NULL , S ( 0 ) , NULL , S ( 0 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . long2ip . php ) \ n * \ n * The function long2ip ( ) generates an Internet address in dotted format \ n * ( i . e . : aaa . bbb . ccc . ddd ) from the proper address representation . \ n * \ n * @ proper_address \ n * int A proper address representation . \ n * \ n * @ return string Returns the Internet IP address as a string . \ n * / " , <nl> + " dns_check_record " , T ( Boolean ) , S ( 0 ) , " host " , T ( String ) , NULL , S ( 0 ) , NULL , S ( 0 ) , " type " , T ( String ) , " N ; " , S ( 2 ) , " null " , S ( 0 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . dns - check - record . php ) \ n * \ n * \ n * @ host string \ n * @ type string \ n * \ n * @ return bool \ n * / " , <nl> + " checkdnsrr " , T ( Boolean ) , S ( 0 ) , " host " , T ( String ) , NULL , S ( 0 ) , NULL , S ( 0 ) , " type " , T ( String ) , " N ; " , S ( 2 ) , " null " , S ( 0 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . checkdnsrr . php ) \ n * \ n * Searches DNS for records of type type corresponding to host . \ n * \ n * @ host string host may either be the IP address in dotted - quad \ n * notation or the host name . \ n * @ type string type may be any one of : A , MX , NS , SOA , PTR , CNAME , \ n * AAAA , A6 , SRV , NAPTR , TXT or ANY . \ n * \ n * @ return bool Returns TRUE if any records are found ; returns FALSE \ n * if no records were found or if an error occurred . \ n * / " , <nl> + " dns_get_record " , T ( Variant ) , S ( 0 ) , " hostname " , T ( String ) , NULL , S ( 0 ) , NULL , S ( 0 ) , " type " , T ( Int32 ) , " i : - 1 ; " , S ( 5 ) , " - 1 " , S ( 0 ) , " authns " , T ( Variant ) , " N ; " , S ( 2 ) , " null " , S ( 1 ) , " addtl " , T ( Variant ) , " N ; " , S ( 2 ) , " null " , S ( 1 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . dns - get - record . php ) \ n * \ n * Fetch DNS Resource Records associated with the given hostname . \ n * \ n * @ hostname string hostname should be a valid DNS hostname such as \ n * \ " www . example . com \ " . Reverse lookups can be generated \ n * using in - addr . arpa notation , but gethostbyaddr ( ) is \ n * more suitable for the majority of reverse lookups . \ n * \ n * Per DNS standards , email addresses are given in \ n * user . host format ( for example : \ n * hostmaster . example . com as opposed to \ n * hostmaster @ example . com ) , be sure to check this value \ n * and modify if necessary before using it with a \ n * functions such as mail ( ) . \ n * @ type int By default , dns_get_record ( ) will search for any \ n * resource records associated with hostname . To limit \ n * the query , specify the optional type parameter . May \ n * be any one of the following : DNS_A , DNS_CNAME , \ n * DNS_HINFO , DNS_MX , DNS_NS , DNS_PTR , DNS_SOA , \ n * DNS_TXT , DNS_AAAA , DNS_SRV , DNS_NAPTR , DNS_A6 , \ n * DNS_ALL or DNS_ANY . \ n * \ n * Because of eccentricities in the performance of \ n * libresolv between platforms , DNS_ANY will not always \ n * return every record , the slower DNS_ALL will collect \ n * all records more reliably . \ n * @ authns mixed Passed by reference and , if given , will be populated \ n * with Resource Records for the Authoritative Name \ n * Servers . \ n * @ addtl mixed Passed by reference and , if given , will be populated \ n * with any Additional Records . \ n * \ n * @ return mixed \ n * / " , <nl> + " dns_get_mx " , T ( Boolean ) , S ( 0 ) , " hostname " , T ( String ) , NULL , S ( 0 ) , NULL , S ( 0 ) , " mxhosts " , T ( Variant ) , NULL , S ( 0 ) , NULL , S ( 1 ) , " weights " , T ( Variant ) , " N ; " , S ( 2 ) , " null " , S ( 1 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . dns - get - mx . php ) \ n * \ n * \ n * @ hostname string \ n * @ mxhosts mixed \ n * @ weights mixed \ n * \ n * @ return bool \ n * / " , <nl> + " getmxrr " , T ( Boolean ) , S ( 0 ) , " hostname " , T ( String ) , NULL , S ( 0 ) , NULL , S ( 0 ) , " mxhosts " , T ( Variant ) , NULL , S ( 0 ) , NULL , S ( 1 ) , " weight " , T ( Variant ) , " N ; " , S ( 2 ) , " null " , S ( 1 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . getmxrr . php ) \ n * \ n * Searches DNS for MX records corresponding to hostname . \ n * \ n * @ hostname string The Internet host name . \ n * @ mxhosts mixed A list of the MX records found is placed into the \ n * array mxhosts . \ n * @ weight mixed If the weight array is given , it will be filled with \ n * the weight information gathered . \ n * \ n * @ return bool Returns TRUE if any records are found ; returns FALSE \ n * if no records were found or if an error occurred . \ n * / " , <nl> + " fsockopen " , T ( Variant ) , S ( 0 ) , " hostname " , T ( String ) , NULL , S ( 0 ) , NULL , S ( 0 ) , " port " , T ( Int32 ) , " i : - 1 ; " , S ( 5 ) , " - 1 " , S ( 0 ) , " errnum " , T ( Variant ) , " N ; " , S ( 2 ) , " null " , S ( 1 ) , " errstr " , T ( Variant ) , " N ; " , S ( 2 ) , " null " , S ( 1 ) , " timeout " , T ( Double ) , " d : 0 ; " , S ( 4 ) , " 0 . 0 " , S ( 0 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . fsockopen . php ) \ n * \ n * Initiates a socket connection to the resource specified by hostname . \ n * \ n * PHP supports targets in the Internet and Unix domains as described in \ n * List of Supported Socket Transports . A list of supported transports can \ n * also be retrieved using stream_get_transports ( ) . \ n * \ n * The socket will by default be opened in blocking mode . You can switch \ n * it to non - blocking mode by using stream_set_blocking ( ) . \ n * \ n * @ hostname string If you have compiled in OpenSSL support , you may \ n * prefix the hostname with either ssl : / / or tls : / / to \ n * use an SSL or TLS client connection over TCP / IP to \ n * connect to the remote host . \ n * @ port int The port number . \ n * @ errnum mixed If provided , holds the system level error number \ n * that occurred in the system - level connect ( ) call . \ n * \ n * If the value returned in errno is 0 and the \ n * function returned FALSE , it is an indication that \ n * the error occurred before the connect ( ) call . This \ n * is most likely due to a problem initializing the \ n * socket . \ n * @ errstr mixed The error message as a string . \ n * @ timeout float The connection timeout , in seconds . \ n * \ n * If you need to set a timeout for reading / writing \ n * data over the socket , use stream_set_timeout ( ) , as \ n * the timeout parameter to fsockopen ( ) only applies \ n * while connecting the socket . \ n * \ n * @ return mixed fsockopen ( ) returns a file pointer which may be used \ n * together with the other file functions ( such as \ n * fgets ( ) , fgetss ( ) , fwrite ( ) , fclose ( ) , and feof ( ) ) . \ n * If the call fails , it will return FALSE \ n * / " , <nl> + " pfsockopen " , T ( Variant ) , S ( 0 ) , " hostname " , T ( String ) , NULL , S ( 0 ) , NULL , S ( 0 ) , " port " , T ( Int32 ) , " i : - 1 ; " , S ( 5 ) , " - 1 " , S ( 0 ) , " errnum " , T ( Variant ) , " N ; " , S ( 2 ) , " null " , S ( 1 ) , " errstr " , T ( Variant ) , " N ; " , S ( 2 ) , " null " , S ( 1 ) , " timeout " , T ( Double ) , " d : 0 ; " , S ( 4 ) , " 0 . 0 " , S ( 0 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . pfsockopen . php ) \ n * \ n * This function behaves exactly as fsockopen ( ) with the difference that \ n * the connection is not closed after the script finishes . It is the \ n * persistent version of fsockopen ( ) . \ n * For parameter information , see the fsockopen ( ) documentation . \ n * \ n * @ hostname string \ n * @ port int \ n * @ errnum mixed \ n * @ errstr mixed \ n * @ timeout float \ n * \ n * @ return mixed \ n * / " , <nl> + " socket_get_status " , T ( Variant ) , S ( 0 ) , " stream " , T ( Object ) , NULL , S ( 0 ) , NULL , S ( 0 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . socket - get - status . php ) \ n * \ n * \ n * @ stream resource \ n * \ n * \ n * @ return mixed \ n * / " , <nl> + " socket_set_blocking " , T ( Boolean ) , S ( 0 ) , " stream " , T ( Object ) , NULL , S ( 0 ) , NULL , S ( 0 ) , " mode " , T ( Int32 ) , NULL , S ( 0 ) , NULL , S ( 0 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . socket - set - blocking . php \ n * ) \ n * \ n * \ n * @ stream resource \ n * \ n * @ mode int \ n * \ n * @ return bool \ n * / " , <nl> + " socket_set_timeout " , T ( Boolean ) , S ( 0 ) , " stream " , T ( Object ) , NULL , S ( 0 ) , NULL , S ( 0 ) , " seconds " , T ( Int32 ) , NULL , S ( 0 ) , NULL , S ( 0 ) , " microseconds " , T ( Int32 ) , " i : 0 ; " , S ( 4 ) , " 0 " , S ( 0 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . socket - set - timeout . php \ n * ) \ n * \ n * \ n * @ stream resource \ n * \ n * @ seconds int \ n * @ microseconds \ n * int \ n * \ n * @ return bool \ n * / " , <nl> + " header " , T ( Void ) , S ( 0 ) , " str " , T ( String ) , NULL , S ( 0 ) , NULL , S ( 0 ) , " replace " , T ( Boolean ) , " b : 1 ; " , S ( 4 ) , " true " , S ( 0 ) , " http_response_code " , T ( Int32 ) , " i : 0 ; " , S ( 4 ) , " 0 " , S ( 0 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . header . php ) \ n * \ n * header ( ) is used to send a raw HTTP header . See the \ 273 HTTP / 1 . 1 \ n * specification for more information on HTTP headers . \ n * \ n * Remember that header ( ) must be called before any actual output is sent , \ n * either by normal HTML tags , blank lines in a file , or from PHP . It is a \ n * very common error to read code with include ( ) , or require ( ) , functions , \ n * or another file access function , and have spaces or empty lines that are \ n * output before header ( ) is called . The same problem exists when using a \ n * single PHP / HTML file . \ n * \ n * @ str string The header string . \ n * \ n * There are two special - case header calls . The first \ n * is a header that starts with the string \ " HTTP / \ " \ n * ( case is not significant ) , which will be used to \ n * figure out the HTTP status code to send . For \ n * example , if you have configured Apache to use a PHP \ n * script to handle requests for missing files ( using \ n * the ErrorDocument directive ) , you may want to make \ n * sure that your script generates the proper status \ n * code . \ n * \ n * \ n * \ n * \ n * \ n * The second special case is the \ " Location : \ " header . \ n * Not only does it send this header back to the \ n * browser , but it also returns a REDIRECT ( 302 ) status \ n * code to the browser unless the 201 or a 3xx status \ n * code has already been set . \ n * @ replace bool The optional replace parameter indicates whether the \ n * header should replace a previous similar header , or \ n * add a second header of the same type . By default it \ n * will replace , but if you pass in FALSE as the second \ n * argument you can force multiple headers of the same \ n * type . For example : \ n * @ http_response_code \ n * int Forces the HTTP response code to the specified \ n * value . \ n * \ n * @ return mixed No value is returned . \ n * / " , <nl> + " http_response_code " , T ( Variant ) , S ( 0 ) , " response_code " , T ( Int32 ) , " i : 0 ; " , S ( 4 ) , " 0 " , S ( 0 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . http - response - code . php \ n * ) \ n * \ n * Get and / or Set the HTTP response code . \ n * \ n * @ response_code \ n * int New response code to set . \ n * \ n * @ return mixed The current response code . \ n * / " , <nl> " headers_list " , T ( Array ) , S ( 0 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . headers - list . php ) \ n * \ n * headers_list ( ) will return a list of headers to be sent to the browser \ n * / client . To determine whether or not these headers have been sent yet , \ n * use headers_sent ( ) . \ n * \ n * @ return vector Returns a numerically indexed array of headers . \ n * / " , <nl> " get_http_request_size " , T ( Int32 ) , S ( 0 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from \ n * http : / / php . net / manual / en / function . get - http - request - size . php ) \ n * \ n * get_http_request_size ( ) will return the size of the http request . \ n * \ n * @ return int Returns the size of the http request . \ n * / " , <nl> - " headers_sent " , T ( Boolean ) , S ( 0 ) , " file " , T ( Variant ) , " N ; " , S ( 2 ) , " null " , S ( 1 ) , " line " , T ( Variant ) , " N ; " , S ( 2 ) , " null " , S ( 1 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . headers - sent . php ) \ n * \ n * Checks if or where headers have been sent . \ n * \ n * You can ' t add any more header lines using the header ( ) function once \ n * the header block has already been sent . Using this function you can at \ n * least prevent getting HTTP header related error messages . Another option \ n * is to use Output Buffering . \ n * \ n * @ file mixed If the optional file and line parameters are set , \ n * headers_sent ( ) will put the PHP source file name and \ n * line number where output started in the file and \ n * line variables . \ n * @ line mixed The line number where the output started . \ n * \ n * @ return bool headers_sent ( ) will return FALSE if no HTTP headers \ n * have already been sent or TRUE otherwise . \ n * / " , <nl> - " header_register_callback " , T ( Boolean ) , S ( 0 ) , " callback " , T ( Variant ) , NULL , S ( 0 ) , NULL , S ( 0 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from \ n * http : / / php . net / manual / en / function . header - register - callback . php ) \ n * \ n * Registers a function that will be called when PHP starts sending \ n * output . \ n * \ n * @ callback mixed Function called just before the headers are sent . It \ n * gets no parameters and the return value is ignored . \ n * \ n * @ return bool Returns TRUE on success or FALSE on failure . \ n * / " , <nl> - " header_remove " , T ( Void ) , S ( 0 ) , " name " , T ( String ) , " N ; " , S ( 2 ) , " null " , S ( 0 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . header - remove . php ) \ n * \ n * Removes an HTTP header previously set using header ( ) . \ n * \ n * @ name string The header name to be removed . This parameter is \ n * case - insensitive . \ n * \ n * @ return mixed No value is returned . \ n * / " , <nl> - " setcookie " , T ( Boolean ) , S ( 0 ) , " name " , T ( String ) , NULL , S ( 0 ) , NULL , S ( 0 ) , " value " , T ( String ) , " N ; " , S ( 2 ) , " null " , S ( 0 ) , " expire " , T ( Int64 ) , " i : 0 ; " , S ( 4 ) , " 0 " , S ( 0 ) , " path " , T ( String ) , " N ; " , S ( 2 ) , " null " , S ( 0 ) , " domain " , T ( String ) , " N ; " , S ( 2 ) , " null " , S ( 0 ) , " secure " , T ( Boolean ) , " b : 0 ; " , S ( 4 ) , " false " , S ( 0 ) , " httponly " , T ( Boolean ) , " b : 0 ; " , S ( 4 ) , " false " , S ( 0 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . setcookie . php ) \ n * \ n * setcookie ( ) defines a cookie to be sent along with the rest of the HTTP \ n * headers . Like other headers , cookies must be sent before any output from \ n * your script ( this is a protocol restriction ) . This requires that you \ n * place calls to this function prior to any output , including < html > and \ n * < head > tags as well as any whitespace . \ n * \ n * Once the cookies have been set , they can be accessed on the next page \ n * load with the $ _COOKIE or $ HTTP_COOKIE_VARS arrays . Note , superglobals \ n * such as $ _COOKIE became available in PHP 4 . 1 . 0 . Cookie values also exist \ n * in $ _REQUEST . \ n * All the arguments except the name argument are optional . You may also \ n * replace an argument with an empty string ( \ " \ " ) in order to skip that \ n * argument . Because the expire argument is integer , it cannot be skipped \ n * with an empty string , use a zero ( 0 ) instead . \ n * \ n * See \ 273 Netscape cookie specification for specifics on how each \ n * setcookie ( ) parameter works \ n * \ n * @ name string The name of the cookie . \ n * @ value string The value of the cookie . This value is stored on the \ n * clients computer ; do not store sensitive \ n * information . Assuming the name is ' cookiename ' , this \ n * value is retrieved through $ _COOKIE [ ' cookiename ' ] \ n * @ expire int The time the cookie expires . This is a Unix \ n * timestamp so is in number of seconds since the \ n * epoch . In other words , you ' ll most likely set this \ n * with the time ( ) function plus the number of seconds \ n * before you want it to expire . Or you might use \ n * mktime ( ) . time ( ) + 60 * 60 * 24 * 30 will set the cookie to \ n * expire in 30 days . If set to 0 , or omitted , the \ n * cookie will expire at the end of the session ( when \ n * the browser closes ) . \ n * \ n * \ n * \ n * You may notice the expire parameter takes on a Unix \ n * timestamp , as opposed to the date format Wdy , \ n * DD - Mon - YYYY HH : MM : SS GMT , this is because PHP does \ n * this conversion internally . \ n * \ n * expire is compared to the client ' s time which can \ n * differ from server ' s time . \ n * @ path string The path on the server in which the cookie will be \ n * available on . If set to ' / ' , the cookie will be \ n * available within the entire domain . If set to \ n * ' / foo / ' , the cookie will only be available within \ n * the / foo / directory and all sub - directories such as \ n * / foo / bar / of domain . The default value is the \ n * current directory that the cookie is being set in . \ n * @ domain string The domain that the cookie is available . To make the \ n * cookie available on all subdomains of example . com \ n * then you ' d set it to ' . example . com ' . The . is not \ n * required but makes it compatible with more browsers . \ n * Setting it to www . example . com will make the cookie \ n * only available in the www subdomain . Refer to tail \ n * matching in the \ 273 spec for details . \ n * @ secure bool Indicates that the cookie should only be transmitted \ n * over a secure HTTPS connection from the client . When \ n * set to TRUE , the cookie will only be set if a secure \ n * connection exists . On the server - side , it ' s on the \ n * programmer to send this kind of cookie only on \ n * secure connection ( e . g . with respect to \ n * $ _SERVER [ \ " HTTPS \ " ] ) . \ n * @ httponly bool When TRUE the cookie will be made accessible only \ n * through the HTTP protocol . This means that the \ n * cookie won ' t be accessible by scripting languages , \ n * such as JavaScript . This setting can effectively \ n * help to reduce identity theft through XSS attacks \ n * ( although it is not supported by all browsers ) . \ n * Added in PHP 5 . 2 . 0 . TRUE or FALSE \ n * \ n * @ return bool If output exists prior to calling this function , \ n * setcookie ( ) will fail and return FALSE . If \ n * setcookie ( ) successfully runs , it will return TRUE . \ n * This does not indicate whether the user accepted the \ n * cookie . \ n * / " , <nl> - " setrawcookie " , T ( Boolean ) , S ( 0 ) , " name " , T ( String ) , NULL , S ( 0 ) , NULL , S ( 0 ) , " value " , T ( String ) , " N ; " , S ( 2 ) , " null " , S ( 0 ) , " expire " , T ( Int64 ) , " i : 0 ; " , S ( 4 ) , " 0 " , S ( 0 ) , " path " , T ( String ) , " N ; " , S ( 2 ) , " null " , S ( 0 ) , " domain " , T ( String ) , " N ; " , S ( 2 ) , " null " , S ( 0 ) , " secure " , T ( Boolean ) , " b : 0 ; " , S ( 4 ) , " false " , S ( 0 ) , " httponly " , T ( Boolean ) , " b : 0 ; " , S ( 4 ) , " false " , S ( 0 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . setrawcookie . php ) \ n * \ n * setrawcookie ( ) is exactly the same as setcookie ( ) except that the \ n * cookie value will not be automatically urlencoded when sent to the \ n * browser . \ n * For parameter information , see the setcookie ( ) documentation . \ n * \ n * @ name string \ n * @ value string \ n * @ expire int \ n * @ path string \ n * @ domain string \ n * @ secure bool \ n * @ httponly bool \ n * \ n * @ return bool Returns TRUE on success or FALSE on failure . \ n * / " , <nl> + " headers_sent " , T ( Boolean ) , S ( 0 ) , " file " , T ( Variant ) , " N ; " , S ( 2 ) , " null " , S ( 1 ) , " line " , T ( Variant ) , " N ; " , S ( 2 ) , " null " , S ( 1 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . headers - sent . php ) \ n * \ n * Checks if or where headers have been sent . \ n * \ n * You can ' t add any more header lines using the header ( ) function once \ n * the header block has already been sent . Using this function you can at \ n * least prevent getting HTTP header related error messages . Another option \ n * is to use Output Buffering . \ n * \ n * @ file mixed If the optional file and line parameters are set , \ n * headers_sent ( ) will put the PHP source file name and \ n * line number where output started in the file and \ n * line variables . \ n * @ line mixed The line number where the output started . \ n * \ n * @ return bool headers_sent ( ) will return FALSE if no HTTP headers \ n * have already been sent or TRUE otherwise . \ n * / " , <nl> + " header_register_callback " , T ( Boolean ) , S ( 0 ) , " callback " , T ( Variant ) , NULL , S ( 0 ) , NULL , S ( 0 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from \ n * http : / / php . net / manual / en / function . header - register - callback . php ) \ n * \ n * Registers a function that will be called when PHP starts sending \ n * output . \ n * \ n * @ callback mixed Function called just before the headers are sent . It \ n * gets no parameters and the return value is ignored . \ n * \ n * @ return bool Returns TRUE on success or FALSE on failure . \ n * / " , <nl> + " header_remove " , T ( Void ) , S ( 0 ) , " name " , T ( String ) , " N ; " , S ( 2 ) , " null " , S ( 0 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . header - remove . php ) \ n * \ n * Removes an HTTP header previously set using header ( ) . \ n * \ n * @ name string The header name to be removed . This parameter is \ n * case - insensitive . \ n * \ n * @ return mixed No value is returned . \ n * / " , <nl> + " setcookie " , T ( Boolean ) , S ( 0 ) , " name " , T ( String ) , NULL , S ( 0 ) , NULL , S ( 0 ) , " value " , T ( String ) , " N ; " , S ( 2 ) , " null " , S ( 0 ) , " expire " , T ( Int64 ) , " i : 0 ; " , S ( 4 ) , " 0 " , S ( 0 ) , " path " , T ( String ) , " N ; " , S ( 2 ) , " null " , S ( 0 ) , " domain " , T ( String ) , " N ; " , S ( 2 ) , " null " , S ( 0 ) , " secure " , T ( Boolean ) , " b : 0 ; " , S ( 4 ) , " false " , S ( 0 ) , " httponly " , T ( Boolean ) , " b : 0 ; " , S ( 4 ) , " false " , S ( 0 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . setcookie . php ) \ n * \ n * setcookie ( ) defines a cookie to be sent along with the rest of the HTTP \ n * headers . Like other headers , cookies must be sent before any output from \ n * your script ( this is a protocol restriction ) . This requires that you \ n * place calls to this function prior to any output , including < html > and \ n * < head > tags as well as any whitespace . \ n * \ n * Once the cookies have been set , they can be accessed on the next page \ n * load with the $ _COOKIE or $ HTTP_COOKIE_VARS arrays . Note , superglobals \ n * such as $ _COOKIE became available in PHP 4 . 1 . 0 . Cookie values also exist \ n * in $ _REQUEST . \ n * All the arguments except the name argument are optional . You may also \ n * replace an argument with an empty string ( \ " \ " ) in order to skip that \ n * argument . Because the expire argument is integer , it cannot be skipped \ n * with an empty string , use a zero ( 0 ) instead . \ n * \ n * See \ 273 Netscape cookie specification for specifics on how each \ n * setcookie ( ) parameter works \ n * \ n * @ name string The name of the cookie . \ n * @ value string The value of the cookie . This value is stored on the \ n * clients computer ; do not store sensitive \ n * information . Assuming the name is ' cookiename ' , this \ n * value is retrieved through $ _COOKIE [ ' cookiename ' ] \ n * @ expire int The time the cookie expires . This is a Unix \ n * timestamp so is in number of seconds since the \ n * epoch . In other words , you ' ll most likely set this \ n * with the time ( ) function plus the number of seconds \ n * before you want it to expire . Or you might use \ n * mktime ( ) . time ( ) + 60 * 60 * 24 * 30 will set the cookie to \ n * expire in 30 days . If set to 0 , or omitted , the \ n * cookie will expire at the end of the session ( when \ n * the browser closes ) . \ n * \ n * \ n * \ n * You may notice the expire parameter takes on a Unix \ n * timestamp , as opposed to the date format Wdy , \ n * DD - Mon - YYYY HH : MM : SS GMT , this is because PHP does \ n * this conversion internally . \ n * \ n * expire is compared to the client ' s time which can \ n * differ from server ' s time . \ n * @ path string The path on the server in which the cookie will be \ n * available on . If set to ' / ' , the cookie will be \ n * available within the entire domain . If set to \ n * ' / foo / ' , the cookie will only be available within \ n * the / foo / directory and all sub - directories such as \ n * / foo / bar / of domain . The default value is the \ n * current directory that the cookie is being set in . \ n * @ domain string The domain that the cookie is available . To make the \ n * cookie available on all subdomains of example . com \ n * then you ' d set it to ' . example . com ' . The . is not \ n * required but makes it compatible with more browsers . \ n * Setting it to www . example . com will make the cookie \ n * only available in the www subdomain . Refer to tail \ n * matching in the \ 273 spec for details . \ n * @ secure bool Indicates that the cookie should only be transmitted \ n * over a secure HTTPS connection from the client . When \ n * set to TRUE , the cookie will only be set if a secure \ n * connection exists . On the server - side , it ' s on the \ n * programmer to send this kind of cookie only on \ n * secure connection ( e . g . with respect to \ n * $ _SERVER [ \ " HTTPS \ " ] ) . \ n * @ httponly bool When TRUE the cookie will be made accessible only \ n * through the HTTP protocol . This means that the \ n * cookie won ' t be accessible by scripting languages , \ n * such as JavaScript . This setting can effectively \ n * help to reduce identity theft through XSS attacks \ n * ( although it is not supported by all browsers ) . \ n * Added in PHP 5 . 2 . 0 . TRUE or FALSE \ n * \ n * @ return bool If output exists prior to calling this function , \ n * setcookie ( ) will fail and return FALSE . If \ n * setcookie ( ) successfully runs , it will return TRUE . \ n * This does not indicate whether the user accepted the \ n * cookie . \ n * / " , <nl> + " setrawcookie " , T ( Boolean ) , S ( 0 ) , " name " , T ( String ) , NULL , S ( 0 ) , NULL , S ( 0 ) , " value " , T ( String ) , " N ; " , S ( 2 ) , " null " , S ( 0 ) , " expire " , T ( Int64 ) , " i : 0 ; " , S ( 4 ) , " 0 " , S ( 0 ) , " path " , T ( String ) , " N ; " , S ( 2 ) , " null " , S ( 0 ) , " domain " , T ( String ) , " N ; " , S ( 2 ) , " null " , S ( 0 ) , " secure " , T ( Boolean ) , " b : 0 ; " , S ( 4 ) , " false " , S ( 0 ) , " httponly " , T ( Boolean ) , " b : 0 ; " , S ( 4 ) , " false " , S ( 0 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . setrawcookie . php ) \ n * \ n * setrawcookie ( ) is exactly the same as setcookie ( ) except that the \ n * cookie value will not be automatically urlencoded when sent to the \ n * browser . \ n * For parameter information , see the setcookie ( ) documentation . \ n * \ n * @ name string \ n * @ value string \ n * @ expire int \ n * @ path string \ n * @ domain string \ n * @ secure bool \ n * @ httponly bool \ n * \ n * @ return bool Returns TRUE on success or FALSE on failure . \ n * / " , <nl> " define_syslog_variables " , T ( Void ) , S ( 0 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from \ n * http : / / php . net / manual / en / function . define - syslog - variables . php ) \ n * \ n * Initializes all variables used in the syslog functions . \ n * \ n * / " , <nl> - " openlog " , T ( Boolean ) , S ( 0 ) , " ident " , T ( String ) , NULL , S ( 0 ) , NULL , S ( 0 ) , " option " , T ( Int32 ) , NULL , S ( 0 ) , NULL , S ( 0 ) , " facility " , T ( Int32 ) , NULL , S ( 0 ) , NULL , S ( 0 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . openlog . php ) \ n * \ n * openlog ( ) opens a connection to the system logger for a program . \ n * \ n * The use of openlog ( ) is optional . It will automatically be called by \ n * syslog ( ) if necessary , in which case ident will default to FALSE . \ n * \ n * @ ident string The string ident is added to each message . \ n * @ option int The option argument is used to indicate what logging \ n * options will be used when generating a log message . \ n * openlog ( ) Options Constant Description LOG_CONS if \ n * there is an error while sending data to the system \ n * logger , write directly to the system console \ n * LOG_NDELAY open the connection to the logger \ n * immediately LOG_ODELAY ( default ) delay opening the \ n * connection until the first message is logged \ n * LOG_PERROR print log message also to standard error \ n * LOG_PID include PID with each message You can use \ n * one or more of this options . When using multiple \ n * options you need to OR them , i . e . to open the \ n * connection immediately , write to the console and \ n * include the PID in each message , you will use : \ n * LOG_CONS | LOG_NDELAY | LOG_PID \ n * @ facility int The facility argument is used to specify what type \ n * of program is logging the message . This allows you \ n * to specify ( in your machine ' s syslog configuration ) \ n * how messages coming from different facilities will \ n * be handled . openlog ( ) Facilities Constant \ n * Description LOG_AUTH security / authorization messages \ n * ( use LOG_AUTHPRIV instead in systems where that \ n * constant is defined ) LOG_AUTHPRIV \ n * security / authorization messages ( private ) LOG_CRON \ n * clock daemon ( cron and at ) LOG_DAEMON other system \ n * daemons LOG_KERN kernel messages LOG_LOCAL0 . . . \ n * LOG_LOCAL7 reserved for local use , these are not \ n * available in Windows LOG_LPR line printer subsystem \ n * LOG_MAIL mail subsystem LOG_NEWS USENET news \ n * subsystem LOG_SYSLOG messages generated internally \ n * by syslogd LOG_USER generic user - level messages \ n * LOG_UUCP UUCP subsystem \ n * \ n * LOG_USER is the only valid log type under Windows \ n * operating systems \ n * \ n * @ return bool Returns TRUE on success or FALSE on failure . \ n * / " , <nl> + " openlog " , T ( Boolean ) , S ( 0 ) , " ident " , T ( String ) , NULL , S ( 0 ) , NULL , S ( 0 ) , " option " , T ( Int32 ) , NULL , S ( 0 ) , NULL , S ( 0 ) , " facility " , T ( Int32 ) , NULL , S ( 0 ) , NULL , S ( 0 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . openlog . php ) \ n * \ n * openlog ( ) opens a connection to the system logger for a program . \ n * \ n * The use of openlog ( ) is optional . It will automatically be called by \ n * syslog ( ) if necessary , in which case ident will default to FALSE . \ n * \ n * @ ident string The string ident is added to each message . \ n * @ option int The option argument is used to indicate what logging \ n * options will be used when generating a log message . \ n * openlog ( ) Options Constant Description LOG_CONS if \ n * there is an error while sending data to the system \ n * logger , write directly to the system console \ n * LOG_NDELAY open the connection to the logger \ n * immediately LOG_ODELAY ( default ) delay opening the \ n * connection until the first message is logged \ n * LOG_PERROR print log message also to standard error \ n * LOG_PID include PID with each message You can use \ n * one or more of this options . When using multiple \ n * options you need to OR them , i . e . to open the \ n * connection immediately , write to the console and \ n * include the PID in each message , you will use : \ n * LOG_CONS | LOG_NDELAY | LOG_PID \ n * @ facility int The facility argument is used to specify what type \ n * of program is logging the message . This allows you \ n * to specify ( in your machine ' s syslog configuration ) \ n * how messages coming from different facilities will \ n * be handled . openlog ( ) Facilities Constant \ n * Description LOG_AUTH security / authorization messages \ n * ( use LOG_AUTHPRIV instead in systems where that \ n * constant is defined ) LOG_AUTHPRIV \ n * security / authorization messages ( private ) LOG_CRON \ n * clock daemon ( cron and at ) LOG_DAEMON other system \ n * daemons LOG_KERN kernel messages LOG_LOCAL0 . . . \ n * LOG_LOCAL7 reserved for local use , these are not \ n * available in Windows LOG_LPR line printer subsystem \ n * LOG_MAIL mail subsystem LOG_NEWS USENET news \ n * subsystem LOG_SYSLOG messages generated internally \ n * by syslogd LOG_USER generic user - level messages \ n * LOG_UUCP UUCP subsystem \ n * \ n * LOG_USER is the only valid log type under Windows \ n * operating systems \ n * \ n * @ return bool Returns TRUE on success or FALSE on failure . \ n * / " , <nl> " closelog " , T ( Boolean ) , S ( 0 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . closelog . php ) \ n * \ n * closelog ( ) closes the descriptor being used to write to the system \ n * logger . The use of closelog ( ) is optional . \ n * \ n * @ return bool Returns TRUE on success or FALSE on failure . \ n * / " , <nl> - " syslog " , T ( Boolean ) , S ( 0 ) , " priority " , T ( Int32 ) , NULL , S ( 0 ) , NULL , S ( 0 ) , " message " , T ( String ) , NULL , S ( 0 ) , NULL , S ( 0 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . syslog . php ) \ n * \ n * syslog ( ) generates a log message that will be distributed by the system \ n * logger . \ n * \ n * For information on setting up a user defined log handler , see the \ n * syslog . conf ( 5 ) Unix manual page . More information on the syslog \ n * facilities and option can be found in the man pages for syslog ( 3 ) on \ n * Unix machines . \ n * \ n * @ priority int priority is a combination of the facility and the \ n * level . Possible values are : syslog ( ) Priorities ( in \ n * descending order ) Constant Description LOG_EMERG \ n * system is unusable LOG_ALERT action must be taken \ n * immediately LOG_CRIT critical conditions LOG_ERR \ n * error conditions LOG_WARNING warning conditions \ n * LOG_NOTICE normal , but significant , condition \ n * LOG_INFO informational message LOG_DEBUG debug - level \ n * message \ n * @ message string The message to send , except that the two characters \ n * % m will be replaced by the error message string \ n * ( strerror ) corresponding to the present value of \ n * errno . \ n * \ n * @ return bool Returns TRUE on success or FALSE on failure . \ n * / " , <nl> + " syslog " , T ( Boolean ) , S ( 0 ) , " priority " , T ( Int32 ) , NULL , S ( 0 ) , NULL , S ( 0 ) , " message " , T ( String ) , NULL , S ( 0 ) , NULL , S ( 0 ) , NULL , S ( 16384 ) , " / * * \ n * ( excerpt from http : / / php . net / manual / en / function . syslog . php ) \ n * \ n * syslog ( ) generates a log message that will be distributed by the system \ n * logger . \ n * \ n * For information on setting up a user defined log handler , see the \ n * syslog . conf ( 5 ) Unix manual page . More information on the syslog \ n * facilities and option can be found in the man pages for syslog ( 3 ) on \ n * Unix machines . \ n * \ n * @ priority int priority is a combination of the facility and the \ n * level . Possible values are : syslog ( ) Priorities ( in \ n * descending order ) Constant Description LOG_EMERG \ n * system is unusable LOG_ALERT action must be taken \ n * immediately LOG_CRIT critical conditions LOG_ERR \ n * error conditions LOG_WARNING warning conditions \ n * LOG_NOTICE normal , but significant , condition \ n * LOG_INFO informational message LOG_DEBUG debug - level \ n * message \ n * @ message string The message to send , except that the two characters \ n * % m will be replaced by the error message string \ n * ( strerror ) corresponding to the present value of \ n * errno . \ n * \ n * @ return bool Returns TRUE on success or FALSE on failure . \ n * / " , <nl> <nl> # elif EXT_TYPE = = 1 <nl> <nl>
Add http_response_code ( ) , behavior matching Zend
facebook/hhvm
e0d098ef704efa1f46f02e1c687f08e8d9e17518
2013-03-10T04:23:28Z
mmm a / hphp / runtime / vm / jit / vasm - fold - imms . cpp <nl> ppp b / hphp / runtime / vm / jit / vasm - fold - imms . cpp <nl> void foldImms ( Vunit & unit ) { <nl> for ( auto & inst : unit . blocks [ b ] . code ) { <nl> switch ( inst . op ) { <nl> # define O ( name , imms , uses , defs ) \ <nl> - case Vinstr : : name : folder . fold ( inst . name # # _ , inst ) ; break ; <nl> + case Vinstr : : name : { \ <nl> + auto origin = inst . origin ; \ <nl> + folder . fold ( inst . name # # _ , inst ) ; \ <nl> + inst . origin = origin ; \ <nl> + break ; \ <nl> + } <nl> X64_OPCODES <nl> # undef O <nl> } <nl> mmm a / hphp / runtime / vm / jit / vasm - x64 . cpp <nl> ppp b / hphp / runtime / vm / jit / vasm - x64 . cpp <nl> void Vgen : : emit ( jit : : vector < Vlabel > & labels ) { <nl> } <nl> <nl> for ( auto i = 0 ; i < areas . size ( ) ; + + i ) { <nl> - const IRInstruction * currentOrigin = nullptr ; <nl> auto & blockInfos = areaToBlockInfos [ i ] ; <nl> for ( auto const blockID : labels ) { <nl> auto const & blockInfo = blockInfos [ static_cast < size_t > ( blockID ) ] ; <nl> if ( blockInfo . snippets . empty ( ) ) continue ; <nl> <nl> + const IRInstruction * currentOrigin = nullptr ; <nl> for ( auto const & snip : blockInfo . snippets ) { <nl> if ( currentOrigin ! = snip . origin & & snip . origin ) { <nl> currentOrigin = snip . origin ; <nl>
Fixup origins during vasm immediate folding
facebook/hhvm
8a35ba8ee0fd1e54658d07ab09c209d8e7d490a3
2014-09-26T15:00:22Z
mmm a / src / runtime / base / array / zend_array . cpp <nl> ppp b / src / runtime / base / array / zend_array . cpp <nl> void ZendArray : : iter_dirty_reset ( ) const { <nl> } <nl> <nl> void ZendArray : : iter_dirty_check ( ) const { <nl> - if ( RuntimeOption : : EnableHipHopErrors & & ( m_flag & IterationDirty ) ) { <nl> + if ( RuntimeOption : : EnableHipHopErrors & & ( m_flag & IterationDirty ) & & <nl> + ! isStatic ( ) ) { <nl> raise_notice ( " In PHP , mixing up foreach ( ) and functional style array " <nl> " iteration by calling current ( ) , each ( ) , key ( ) , value ( ) , " <nl> " prev ( ) , next ( ) may lead to undefined behavior . In HipHop , " <nl>
don ' t error on static array ' s iterations
facebook/hhvm
40299ae21c53945795af519f8b03a337a3f1a4b7
2010-11-25T13:50:43Z
mmm a / lib / ffmpeg / patches / 0028 - ffmpeg - 1 . 2 - fixed - dvd - still - frames - ended - up - in - intern . patch <nl> ppp b / lib / ffmpeg / patches / 0028 - ffmpeg - 1 . 2 - fixed - dvd - still - frames - ended - up - in - intern . patch <nl> Subject : [ PATCH 4 / 4 ] ffmpeg 1 . 2 - fixed dvd still frames ended up in internal <nl> lavf <nl> <nl> mmm <nl> - lib / ffmpeg / libavformat / utils . c | 2 + - <nl> + libavformat / utils . c | 2 + - <nl> 1 file changed , 1 insertion ( + ) , 1 deletion ( - ) <nl> <nl> - + mmm - a / lib / ffmpeg / libavformat / utils . c <nl> - ppp b / lib / ffmpeg / libavformat / utils . c <nl> + mmm a / libavformat / utils . c <nl> ppp + b / libavformat / utils . c <nl> no_packet : <nl> <nl> if ( end | | av_log2 ( pd - > buf_size ) ! = av_log2 ( pd - > buf_size - pkt - > size ) ) { <nl>
[ ffmpeg ] fix : patch needs to operate from lib / ffmpeg
xbmc/xbmc
451d5ce02be3f4c88ea45a97d95639bee767f7bb
2013-04-19T19:57:05Z
mmm a / unittests / misc_tests . cpp <nl> ppp b / unittests / misc_tests . cpp <nl> BOOST_AUTO_TEST_CASE ( json_from_string_test ) <nl> BOOST_CHECK_EQUAL ( exc_found , true ) ; <nl> } <nl> <nl> + BOOST_AUTO_TEST_CASE ( variant_format_string_limited ) <nl> + { <nl> + const string format = " $ { a } $ { b } $ { c } " ; <nl> + { <nl> + fc : : mutable_variant_object mu ; <nl> + mu ( " a " , string ( 1024 , ' a ' ) ) ; <nl> + mu ( " b " , string ( 1024 , ' b ' ) ) ; <nl> + mu ( " c " , string ( 1024 , ' c ' ) ) ; <nl> + string result = fc : : format_string ( format , mu , true ) ; <nl> + BOOST_CHECK_EQUAL ( result , string ( 256 , ' a ' ) + " . . . " + string ( 256 , ' b ' ) + " . . . " + string ( 256 , ' c ' ) + " . . . " ) ; <nl> + } <nl> + { <nl> + fc : : mutable_variant_object mu ; <nl> + signed_block a ; <nl> + blob b ; <nl> + for ( int i = 0 ; i < 1024 ; + + i ) <nl> + b . data . push_back ( ' b ' ) ; <nl> + variants c ; <nl> + c . push_back ( variant ( a ) ) ; <nl> + mu ( " a " , a ) ; <nl> + mu ( " b " , b ) ; <nl> + mu ( " c " , c ) ; <nl> + string result = fc : : format_string ( format , mu , true ) ; <nl> + BOOST_CHECK_EQUAL ( result , " $ { a } $ { b } $ { c } " ) ; <nl> + } <nl> + } <nl> + <nl> / / Test overflow handling in asset : : from_string <nl> BOOST_AUTO_TEST_CASE ( asset_from_string_overflow ) <nl> { <nl>
Add test for mimized format_string
EOSIO/eos
7d82b8287fdb9f1bde9770aad51eef87dd788b6a
2019-02-21T02:08:50Z
mmm a / runtests . py <nl> ppp b / runtests . py <nl> def getTestCaseNames ( self , testCaseClass ) : <nl> names = super ( Loader , self ) . getTestCaseNames ( testCaseClass ) <nl> return filter ( lambda name : shouldIncludeTestName ( name ) , names ) <nl> <nl> - def loadTestsFromModule ( self , module ) : <nl> + def loadTestsFromModule ( self , module , * args , * * kw ) : <nl> if not shouldIncludeTestFile ( module . __file__ ) : <nl> return unittest . TestSuite ( ) <nl> - return super ( Loader , self ) . loadTestsFromModule ( module ) <nl> + return super ( Loader , self ) . loadTestsFromModule ( module , * args , * * kw ) <nl> <nl> loader = Loader ( ) <nl> suite = unittest . TestSuite ( ) <nl>
runtests . py : support extra args in loadTestsFromModule
facebook/watchman
22a2ea8f869a65b7fc05547ceafdc44d753b80d4
2016-05-23T01:21:38Z
mmm a / test / extensions / clusters / redis / redis_cluster_test . cc <nl> ppp b / test / extensions / clusters / redis / redis_cluster_test . cc <nl> class RedisClusterTest : public testing : : Test , <nl> EXPECT_CALL ( active_dns_query_ , cancel ( ) ) ; <nl> } <nl> <nl> - void testRedisResolve ( ) { <nl> - EXPECT_CALL ( dispatcher_ , createTimer_ ( _ ) ) ; <nl> - RedisCluster : : RedisDiscoverySession discovery_session ( * cluster_ , * this ) ; <nl> - auto dns_response = <nl> - TestUtility : : makeDnsResponse ( std : : list < std : : string > ( { " 127 . 0 . 0 . 1 " , " 127 . 0 . 0 . 2 " } ) ) ; <nl> - discovery_session . registerDiscoveryAddress ( std : : move ( dns_response ) , 22120 ) ; <nl> - expectRedisResolve ( true ) ; <nl> - discovery_session . startResolveRedis ( ) ; <nl> - <nl> - / / 2nd startResolveRedis ( ) call will be a no - opt until the first startResolve is done . <nl> - discovery_session . startResolveRedis ( ) ; <nl> - <nl> - / / Make sure cancel is called . <nl> - EXPECT_CALL ( pool_request_ , cancel ( ) ) ; <nl> - } <nl> - <nl> Stats : : IsolatedStoreImpl stats_store_ ; <nl> Ssl : : MockContextManager ssl_context_manager_ ; <nl> std : : shared_ptr < NiceMock < Network : : MockDnsResolver > > dns_resolver_ { <nl> TEST_F ( RedisClusterTest , DnsDiscoveryResolverBasic ) { <nl> testDnsResolve ( " foo . bar . com " , 22120 ) ; <nl> } <nl> <nl> - TEST_F ( RedisClusterTest , RedisDiscoveryResolverBasic ) { <nl> - setupFromV2Yaml ( BasicConfig ) ; <nl> - testRedisResolve ( ) ; <nl> + TEST_F ( RedisClusterTest , MultipleDnsDiscovery ) { <nl> + const std : : string config = R " EOF ( <nl> + name : name <nl> + connect_timeout : 0 . 25s <nl> + dns_lookup_family : V4_ONLY <nl> + hosts : <nl> + - socket_address : <nl> + address : foo . bar . com <nl> + port_value : 22120 <nl> + - socket_address : <nl> + address : foo1 . bar . com <nl> + port_value : 22120 <nl> + cluster_type : <nl> + name : envoy . clusters . redis <nl> + typed_config : <nl> + " @ type " : type . googleapis . com / google . protobuf . Struct <nl> + value : <nl> + cluster_refresh_rate : 4s <nl> + cluster_refresh_timeout : 0 . 25s <nl> + ) EOF " ; <nl> + <nl> + setupFromV2Yaml ( config ) ; <nl> + <nl> + / / Only single in - flight " cluster slots " call . <nl> + expectRedisResolve ( true ) ; <nl> + <nl> + ReadyWatcher dns_resolve_1 ; <nl> + ReadyWatcher dns_resolve_2 ; <nl> + <nl> + EXPECT_CALL ( * dns_resolver_ , resolve ( " foo . bar . com " , _ , _ ) ) <nl> + . WillOnce ( Invoke ( [ & ] ( const std : : string & , Network : : DnsLookupFamily , <nl> + Network : : DnsResolver : : ResolveCb cb ) - > Network : : ActiveDnsQuery * { <nl> + cb ( TestUtility : : makeDnsResponse ( std : : list < std : : string > ( { " 127 . 0 . 0 . 1 " , " 127 . 0 . 0 . 2 " } ) ) ) ; <nl> + return nullptr ; <nl> + } ) ) ; <nl> + <nl> + EXPECT_CALL ( * dns_resolver_ , resolve ( " foo1 . bar . com " , _ , _ ) ) <nl> + . WillOnce ( Invoke ( [ & ] ( const std : : string & , Network : : DnsLookupFamily , <nl> + Network : : DnsResolver : : ResolveCb cb ) - > Network : : ActiveDnsQuery * { <nl> + cb ( TestUtility : : makeDnsResponse ( std : : list < std : : string > ( { " 127 . 0 . 0 . 3 " , " 127 . 0 . 0 . 4 " } ) ) ) ; <nl> + return nullptr ; <nl> + } ) ) ; <nl> + <nl> + cluster_ - > initialize ( [ & ] ( ) - > void { initialized_ . ready ( ) ; } ) ; <nl> + <nl> + / / Pending RedisResolve will call cancel in the destructor . <nl> + EXPECT_CALL ( pool_request_ , cancel ( ) ) ; <nl> } <nl> <nl> } / / namespace Redis <nl>
Fix redis_cluster_test for multiple DNS resolution scenario . ( )
envoyproxy/envoy
34dbd5c544ba0773d7911fc514ec6d7e22abf660
2019-07-09T03:48:42Z
mmm a / dbtests / namespacetests . cpp <nl> ppp b / dbtests / namespacetests . cpp <nl> namespace NamespaceTests { <nl> } <nl> <nl> / / Too big <nl> - char ch2 [ 800 ] ; <nl> - ASSERT ( theDataFileMgr . insert ( ns ( ) , ch2 , 800 ) . isNull ( ) ) ; <nl> + BSONObjBuilder bob ; <nl> + bob . append ( " a " , string ( 787 , ' a ' ) ) ; <nl> + BSONObj bigger = bob . done ( ) ; <nl> + ASSERT ( theDataFileMgr . insert ( ns ( ) , bigger . objdata ( ) , bigger . objsize ( ) ) . isNull ( ) ) ; <nl> ASSERT_EQUALS ( 0 , nRecords ( ) ) ; <nl> } <nl> private : <nl>
Insert a real object
mongodb/mongo
387103f95562ea7655bf33c7ddfe83ffd08afdba
2009-01-31T03:19:12Z
mmm a / tensorflow / compiler / xla / service / heap_simulator . cc <nl> ppp b / tensorflow / compiler / xla / service / heap_simulator . cc <nl> GlobalDecreasingSizeBestFitHeap : : FindChunkCandidate ( <nl> / / Find the minimum free chunk that can hold this buffer . <nl> ChunkCandidate chunk_candidate { Chunk { - 1 , INT64_MAX } , result_ . heap_size } ; <nl> Chunk & min_fit_chunk = chunk_candidate . chunk ; <nl> + int64 preferred_chunk_end = preferred_offset + buffer_interval . size ; <nl> auto use_free_chunk_if_smaller = [ & ] ( int64 free_offset , int64 free_size ) { <nl> if ( free_size < buffer_interval . size ) { <nl> return ; <nl> GlobalDecreasingSizeBestFitHeap : : FindChunkCandidate ( <nl> <nl> / / If a preferred offset is provided , pick that offset . <nl> if ( free_offset < = preferred_offset & & <nl> - free_offset + free_size > = preferred_offset + buffer_interval . size ) { <nl> + free_offset + free_size > = preferred_chunk_end ) { <nl> min_fit_chunk = { preferred_offset , buffer_interval . size } ; <nl> + } else if ( free_offset + free_size = = result_ . heap_size & & <nl> + free_offset < = preferred_offset ) { <nl> + / / If the free offset is at the very end and if the preferred offset lies <nl> + / / in this , pick the preferred offset and grow the heap . <nl> + min_fit_chunk = { preferred_offset , buffer_interval . size } ; <nl> + chunk_candidate . heap_size = preferred_chunk_end ; <nl> } <nl> <nl> / / Pick the min - fit chunk only if we didn ' t have a preferred offset or a <nl> GlobalDecreasingSizeBestFitHeap : : FindChunkCandidate ( <nl> / / When preferred offset is provided and the preferred offset is larger than <nl> / / the current heap size , simply use the preferred offset provided . <nl> if ( result_ . heap_size < = preferred_offset ) { <nl> - chunk_candidate . heap_size = preferred_offset + buffer_interval . size ; <nl> + chunk_candidate . heap_size = preferred_chunk_end ; <nl> min_fit_chunk = { preferred_offset , buffer_interval . size } ; <nl> } <nl> <nl> mmm a / tensorflow / compiler / xla / service / heap_simulator_test . cc <nl> ppp b / tensorflow / compiler / xla / service / heap_simulator_test . cc <nl> TEST_F ( NoFragmentationStatsHeapTest , Mixed ) { <nl> EXPECT_EQ ( 40 , heap . Finish ( ) . heap_size ) ; <nl> } <nl> <nl> - class GlobalDecreasingSizeBestFitHeapTest : public HeapAlgorithmTestBase { } ; <nl> + class GlobalDecreasingSizeBestFitHeapTest : public HeapAlgorithmTestBase { <nl> + protected : <nl> + class InheritedGlobalDecreasingSizeBestFitHeap <nl> + : public GlobalDecreasingSizeBestFitHeap { <nl> + public : <nl> + InheritedGlobalDecreasingSizeBestFitHeap ( ) <nl> + : GlobalDecreasingSizeBestFitHeap ( / * alignment = * / 1 ) { } <nl> + <nl> + / / Finds a chunk candidate and returns the offset and the new heap size . <nl> + std : : pair < int64 , int64 > FindChunkCandidate ( const HloValue * buffer , <nl> + int64 size , int64 start , <nl> + int64 end , <nl> + int64 preferred_offset = - 1 ) { <nl> + buffer_interval_ . buffer = buffer ; <nl> + buffer_interval_ . size = size ; <nl> + buffer_interval_ . start = start ; <nl> + buffer_interval_ . end = end ; <nl> + chunk_candidate_ = GlobalDecreasingSizeBestFitHeap : : FindChunkCandidate ( <nl> + buffer_interval_ , preferred_offset ) ; <nl> + EXPECT_EQ ( chunk_candidate_ . chunk . size , size ) ; <nl> + return { chunk_candidate_ . chunk . offset , chunk_candidate_ . heap_size } ; <nl> + } <nl> + <nl> + / / Commits the previously found chunk candidate . <nl> + void CommitChunk ( ) { <nl> + GlobalDecreasingSizeBestFitHeap : : CommitChunk ( buffer_interval_ , <nl> + chunk_candidate_ ) ; <nl> + } <nl> + <nl> + private : <nl> + BufferInterval buffer_interval_ ; <nl> + ChunkCandidate chunk_candidate_ ; <nl> + } ; <nl> + <nl> + InheritedGlobalDecreasingSizeBestFitHeap heap_ ; <nl> + } ; <nl> <nl> TEST_F ( GlobalDecreasingSizeBestFitHeapTest , Empty ) { <nl> GlobalDecreasingSizeBestFitHeap heap ( / * alignment = * / 1 ) ; <nl> TEST_F ( GlobalDecreasingSizeBestFitHeapTest , ColocatedIII ) { <nl> EXPECT_EQ ( 30 , result . chunk_map . at ( buffer_c_ ) . offset ) ; <nl> } <nl> <nl> + TEST_F ( GlobalDecreasingSizeBestFitHeapTest , ChunkCandidate ) { <nl> + / / space <nl> + / / ^ <nl> + / / 35 | <nl> + / / | + mmmmmmmmm - - + <nl> + / / | | | <nl> + / / 30 | | | <nl> + / / | | po : 15 | <nl> + / / | | | <nl> + / / 25 | + mmm - - gmmm - - + <nl> + / / | + mmm - - + <nl> + / / | | po : 20 | <nl> + / / 20 | + - - f - - + <nl> + / / | + mmm - - + <nl> + / / | | | <nl> + / / 15 | | | <nl> + / / | + mmmmmmmmmmmmmmm - - + | po : 10 | <nl> + / / | | | | | <nl> + / / 10 | + mmmmmm - cmmmmmmmmm + + - - e - - + <nl> + / / | + mmm - - + + mmmmmmmmm - - + <nl> + / / | | | | po : 5 | <nl> + / / 5 | | | + mmm - - ammm - - + <nl> + / / | + mmm - - + | | <nl> + / / | | po : 10 | | | <nl> + / / 0 | + - - d - - + + - - b - - + <nl> + / / mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - > time <nl> + / / 0 1 2 3 4 5 6 7 8 9 10 11 12 13 <nl> + using pair = std : : pair < int64 , int64 > ; <nl> + EXPECT_EQ ( pair ( 5 , 10 ) , heap_ . FindChunkCandidate ( buffer_a_ , 5 , 6 , 10 , 5 ) ) ; <nl> + heap_ . CommitChunk ( ) ; / / offset : 5 , size : 5 , start : 6 , end : 10 <nl> + / / Preferred offset 5 is returned . <nl> + EXPECT_EQ ( pair ( 0 , 10 ) , heap_ . FindChunkCandidate ( buffer_b_ , 10 , 3 , 5 ) ) ; <nl> + heap_ . CommitChunk ( ) ; / / offset : 0 , size : 10 , start : 3 , end : 5 <nl> + EXPECT_EQ ( pair ( 10 , 15 ) , heap_ . FindChunkCandidate ( buffer_c_ , 5 , 2 , 8 ) ) ; <nl> + heap_ . CommitChunk ( ) ; / / offset : 10 , size : 5 , start : 2 , end : 8 <nl> + EXPECT_EQ ( pair ( 0 , 15 ) , heap_ . FindChunkCandidate ( buffer_d_ , 5 , 0 , 2 , 10 ) ) ; <nl> + heap_ . CommitChunk ( ) ; / / offset : 0 , size : 5 , start : 0 , end : 2 <nl> + / / Preferred offset 10 could not be given because it is occupied . <nl> + EXPECT_EQ ( pair ( 10 , 20 ) , heap_ . FindChunkCandidate ( buffer_e_ , 10 , 11 , 13 , 10 ) ) ; <nl> + heap_ . CommitChunk ( ) ; / / offset : 10 , size : 10 , start : 11 , end : 13 <nl> + / / Preferred offset 10 is returned . <nl> + EXPECT_EQ ( pair ( 20 , 25 ) , heap_ . FindChunkCandidate ( buffer_f_ , 5 , 3 , 5 , 20 ) ) ; <nl> + heap_ . CommitChunk ( ) ; / / offset : 20 , size : 5 , start : 3 , end : 5 <nl> + / / Preferred offset 20 is returned . <nl> + EXPECT_EQ ( pair ( 25 , 35 ) , heap_ . FindChunkCandidate ( buffer_g_ , 10 , 4 , 8 , 15 ) ) ; <nl> + heap_ . CommitChunk ( ) ; / / offset : 25 , size : 10 , start : 4 , end : 8 <nl> + / / Preferred offset 15 could not be given because it is occupied . <nl> + } <nl> + <nl> } / / namespace <nl> } / / namespace xla <nl>
[ XLA ] Fix a minor bug in picking candidate chunk at preferred offset
tensorflow/tensorflow
93ccefd6d3b8d32f7afcc43568fc7e872e744767
2020-01-30T02:00:12Z
mmm a / src / app / ui / file_selector . cpp <nl> ppp b / src / app / ui / file_selector . cpp <nl> static void on_exit_delete_navigation_history ( ) <nl> class CustomFileNameEntry : public Entry <nl> { <nl> public : <nl> - CustomFileNameEntry ( ) : Entry ( 256 , " " ) , m_fileList ( NULL ) { <nl> + CustomFileNameEntry ( ) : <nl> + Entry ( 256 , " " ) , <nl> + m_fileList ( NULL ) , <nl> + m_timer ( 250 , this ) { <nl> + m_timer . Tick . connect ( & CustomFileNameEntry : : onTick , this ) ; <nl> } <nl> <nl> void setAssociatedFileList ( FileList * fileList ) { <nl> class CustomFileNameEntry : public Entry <nl> <nl> protected : <nl> virtual bool onProcessMessage ( Message * msg ) override { <nl> - if ( msg - > type ( ) = = kKeyUpMessage & & <nl> - static_cast < KeyMessage * > ( msg ) - > unicodeChar ( ) > = 32 ) { <nl> - / / String to be autocompleted <nl> - std : : string left_part = getText ( ) ; <nl> - if ( left_part . empty ( ) ) <nl> - return false ; <nl> + switch ( msg - > type ( ) ) { <nl> <nl> - const FileItemList & children = m_fileList - > getFileList ( ) ; <nl> + case kKeyDownMessage : <nl> + m_timer . stop ( ) ; <nl> + break ; <nl> <nl> - for ( IFileItem * child : children ) { <nl> - std : : string child_name = child - > getDisplayName ( ) ; <nl> - std : : string : : iterator it1 , it2 ; <nl> + case kKeyUpMessage : <nl> + if ( static_cast < KeyMessage * > ( msg ) - > unicodeChar ( ) > = 32 ) <nl> + m_timer . start ( ) ; <nl> + break ; <nl> <nl> - for ( it1 = child_name . begin ( ) , it2 = left_part . begin ( ) ; <nl> - it1 ! = child_name . end ( ) & & it2 ! = left_part . end ( ) ; <nl> - + + it1 , + + it2 ) { <nl> - if ( std : : tolower ( * it1 ) ! = std : : tolower ( * it2 ) ) <nl> - break ; <nl> - } <nl> + } <nl> + return Entry : : onProcessMessage ( msg ) ; <nl> + } <nl> <nl> - / / Is the pattern ( left_part ) in the child_name ' s beginning ? <nl> - if ( it2 = = left_part . end ( ) ) { <nl> - setText ( left_part + child_name . substr ( left_part . size ( ) ) ) ; <nl> - selectText ( child_name . size ( ) , left_part . size ( ) ) ; <nl> - return true ; <nl> - } <nl> + void onTick ( ) { <nl> + m_timer . stop ( ) ; <nl> + <nl> + / / String to be autocompleted <nl> + std : : string left_part = getText ( ) ; <nl> + if ( left_part . empty ( ) ) <nl> + return ; <nl> + <nl> + const FileItemList & children = m_fileList - > getFileList ( ) ; <nl> + <nl> + for ( IFileItem * child : children ) { <nl> + std : : string child_name = child - > getDisplayName ( ) ; <nl> + std : : string : : iterator it1 , it2 ; <nl> + <nl> + for ( it1 = child_name . begin ( ) , it2 = left_part . begin ( ) ; <nl> + it1 ! = child_name . end ( ) & & it2 ! = left_part . end ( ) ; <nl> + + + it1 , + + it2 ) { <nl> + if ( std : : tolower ( * it1 ) ! = std : : tolower ( * it2 ) ) <nl> + break ; <nl> + } <nl> + <nl> + / / Is the pattern ( left_part ) in the child_name ' s beginning ? <nl> + if ( it2 = = left_part . end ( ) ) { <nl> + setText ( left_part + child_name . substr ( left_part . size ( ) ) ) ; <nl> + selectText ( child_name . size ( ) , left_part . size ( ) ) ; <nl> + return ; <nl> } <nl> } <nl> - return Entry : : onProcessMessage ( msg ) ; <nl> } <nl> <nl> private : <nl> FileList * m_fileList ; <nl> + Timer m_timer ; <nl> } ; <nl> <nl> / / Class to create CustomFileNameEntries . <nl>
Add timer to autocomplete in FileSelector
aseprite/aseprite
03ac41e05dc5d93c9993d86ceab3367886109b23
2014-09-10T03:58:25Z
mmm a / dbms / include / DB / IO / ReadBufferFromHTTP . h <nl> ppp b / dbms / include / DB / IO / ReadBufferFromHTTP . h <nl> namespace DB <nl> class ReadBufferFromHTTP : public ReadBuffer <nl> { <nl> private : <nl> + String method ; <nl> String host ; <nl> int port ; <nl> + String path ; <nl> <nl> Poco : : Net : : HTTPClientSession session ; <nl> std : : istream * istr ; / / / owned by session <nl> class ReadBufferFromHTTP : public ReadBuffer <nl> ReadBufferFromHTTP ( <nl> const String & host_ , <nl> int port_ , <nl> + const String & path_ , <nl> const Params & params , <nl> + const String & method_ = " " , <nl> size_t buffer_size_ = DBMS_DEFAULT_BUFFER_SIZE , <nl> const Poco : : Timespan & connection_timeout = Poco : : Timespan ( DEFAULT_HTTP_READ_BUFFER_CONNECTION_TIMEOUT , 0 ) , <nl> const Poco : : Timespan & send_timeout = Poco : : Timespan ( DEFAULT_HTTP_READ_BUFFER_TIMEOUT , 0 ) , <nl> mmm a / dbms / include / DB / IO / RemoteReadBuffer . h <nl> ppp b / dbms / include / DB / IO / RemoteReadBuffer . h <nl> class RemoteReadBuffer : public ReadBuffer <nl> std : : make_pair ( " path " , path ) , <nl> std : : make_pair ( " compress " , ( compress ? " true " : " false " ) ) } ; <nl> <nl> - impl = std : : make_unique < ReadBufferFromHTTP > ( host , port , params , buffer_size , connection_timeout , send_timeout , receive_timeout ) ; <nl> + impl = std : : make_unique < ReadBufferFromHTTP > ( host , port , " " , params , " " , buffer_size , connection_timeout , send_timeout , receive_timeout ) ; <nl> } <nl> <nl> bool nextImpl ( ) override <nl> class RemoteReadBuffer : public ReadBuffer <nl> std : : make_pair ( " action " , " list " ) , <nl> std : : make_pair ( " path " , path ) } ; <nl> <nl> - ReadBufferFromHTTP in ( host , port , params , timeout ) ; <nl> + ReadBufferFromHTTP in ( host , port , " " , params , " " , timeout ) ; <nl> <nl> std : : vector < std : : string > files ; <nl> while ( ! in . eof ( ) ) <nl> mmm a / dbms / include / DB / Storages / MergeTree / DataPartsExchange . h <nl> ppp b / dbms / include / DB / Storages / MergeTree / DataPartsExchange . h <nl> <nl> <nl> # include < DB / Interpreters / InterserverIOHandler . h > <nl> # include < DB / Storages / MergeTree / MergeTreeData . h > <nl> - # include < DB / IO / ReadBufferFromHTTP . h > <nl> # include < DB / IO / HashingWriteBuffer . h > <nl> # include < DB / IO / copyData . h > <nl> <nl> mmm a / dbms / src / Dictionaries / HTTPDictionarySource . cpp <nl> ppp b / dbms / src / Dictionaries / HTTPDictionarySource . cpp <nl> <nl> # include < DB / Dictionaries / OwningBufferBlockInputStream . h > <nl> # include < DB / IO / ReadBufferFromHTTP . h > <nl> # include < DB / IO / RemoteReadBuffer . h > <nl> + # include < Poco / Net / HTTPRequest . h > <nl> <nl> namespace DB <nl> { <nl> HTTPDictionarySource : : HTTPDictionarySource ( const Poco : : Util : : AbstractConfigurati <nl> host { config . getString ( config_prefix + " . host " ) } , <nl> port { std : : stoi ( config . getString ( config_prefix + " . port " ) ) } , <nl> path { config . getString ( config_prefix + " . path " ) } , <nl> - format { format } , <nl> + format { config . getString ( config_prefix + " . format " ) } , <nl> sample_block { sample_block } , <nl> context ( context ) <nl> { <nl> - / / last_modification = LocalDateTime { std : : time ( nullptr ) } ; <nl> last_modification = std : : time ( nullptr ) ; <nl> - std : : cerr < < __FUNCTION__ < < " : " < < __LINE__ < < " Ok . " < < std : : endl ; <nl> <nl> } <nl> <nl> HTTPDictionarySource : : HTTPDictionarySource ( const HTTPDictionarySource & other ) : <nl> { <nl> } <nl> <nl> - <nl> BlockInputStreamPtr HTTPDictionarySource : : loadAll ( ) <nl> { <nl> - <nl> - std : : cerr < < " http go " < < toString ( ) < < std : : endl ; <nl> - <nl> - ReadBufferFromHTTP : : Params params = <nl> - { <nl> - / / { " endpoint " , getEndpointId ( location . name ) } , <nl> - / / { " compress " , " false " } , <nl> - / / { " query " , query } <nl> - } ; <nl> - <nl> - ReadBufferFromHTTP in { host , port , params } ; <nl> - <nl> - <nl> - <nl> - / * <nl> - auto in_ptr = std : : make_unique < RemoteReadBuffer > ( host , port , path ) ; <nl> + auto in_ptr = std : : make_unique < ReadBufferFromHTTP > ( host , port , path , params , Poco : : Net : : HTTPRequest : : HTTP_GET ) ; <nl> auto stream = context . getInputFormat ( format , * in_ptr , sample_block , max_block_size ) ; <nl> return std : : make_shared < OwningBufferBlockInputStream > ( stream , std : : move ( in_ptr ) ) ; <nl> - * / <nl> - throw Exception { " Method unsupported " , ErrorCodes : : NOT_IMPLEMENTED } ; <nl> } <nl> <nl> BlockInputStreamPtr HTTPDictionarySource : : loadIds ( const std : : vector < UInt64 > & ids ) <nl> DictionarySourcePtr HTTPDictionarySource : : clone ( ) const <nl> <nl> std : : string HTTPDictionarySource : : toString ( ) const <nl> { <nl> - std : : cerr < < " TS " < < std : : endl ; <nl> - <nl> - return " HTTP : " + host + " : " + std : : to_string ( port ) + path ; <nl> + return " http : / / " + host + " : " + std : : to_string ( port ) + " / " + path ; <nl> } <nl> <nl> LocalDateTime HTTPDictionarySource : : getLastModification ( ) const <nl> LocalDateTime HTTPDictionarySource : : getLastModification ( ) const <nl> return last_modification ; <nl> } <nl> <nl> - <nl> } <nl> mmm a / dbms / src / IO / ReadBufferFromHTTP . cpp <nl> ppp b / dbms / src / IO / ReadBufferFromHTTP . cpp <nl> static Poco : : Net : : IPAddress resolveHost ( const String & host ) <nl> ReadBufferFromHTTP : : ReadBufferFromHTTP ( <nl> const String & host_ , <nl> int port_ , <nl> + const String & path_ , <nl> const Params & params , <nl> + const String & method_ , <nl> size_t buffer_size_ , <nl> const Poco : : Timespan & connection_timeout , <nl> const Poco : : Timespan & send_timeout , <nl> const Poco : : Timespan & receive_timeout ) <nl> - : ReadBuffer ( nullptr , 0 ) , host ( host_ ) , port ( port_ ) <nl> + : ReadBuffer ( nullptr , 0 ) , host ( host_ ) , port ( port_ ) , path ( path_ ) , method ( method_ ) <nl> { <nl> + if ( method . empty ( ) ) <nl> + method = Poco : : Net : : HTTPRequest : : HTTP_POST ; <nl> + <nl> std : : stringstream uri ; <nl> - uri < < " http : / / " < < host < < " : " < < port < < " / " ; <nl> + uri < < " http : / / " < < host < < " : " < < port < < " / " < < path ; <nl> <nl> bool first = true ; <nl> for ( const auto & it : params ) <nl> ReadBufferFromHTTP : : ReadBufferFromHTTP ( <nl> <nl> session . setTimeout ( connection_timeout , send_timeout , receive_timeout ) ; <nl> <nl> - Poco : : Net : : HTTPRequest request ( Poco : : Net : : HTTPRequest : : HTTP_POST , uri . str ( ) ) ; <nl> + Poco : : Net : : HTTPRequest request ( method , " / " + path ) ; <nl> Poco : : Net : : HTTPResponse response ; <nl> <nl> LOG_TRACE ( ( & Logger : : get ( " ReadBufferFromHTTP " ) ) , " Sending request to " < < uri . str ( ) ) ; <nl> mmm a / dbms / src / Storages / MergeTree / DataPartsExchange . cpp <nl> ppp b / dbms / src / Storages / MergeTree / DataPartsExchange . cpp <nl> <nl> # include < DB / Storages / MergeTree / DataPartsExchange . h > <nl> # include < DB / Storages / StorageReplicatedMergeTree . h > <nl> # include < DB / Common / CurrentMetrics . h > <nl> + # include < DB / IO / ReadBufferFromHTTP . h > <nl> <nl> <nl> namespace CurrentMetrics <nl> MergeTreeData : : MutableDataPartPtr Fetcher : : fetchPartImpl ( <nl> { " compress " , " false " } <nl> } ; <nl> <nl> - ReadBufferFromHTTP in ( host , port , params ) ; <nl> + ReadBufferFromHTTP in ( host , port , " " , params ) ; <nl> <nl> String full_part_name = String ( to_detached ? " detached / " : " " ) + " tmp_ " + part_name ; <nl> String part_path = data . getFullPath ( ) + full_part_name + " / " ; <nl> mmm a / dbms / src / Storages / MergeTree / RemoteDiskSpaceMonitor . cpp <nl> ppp b / dbms / src / Storages / MergeTree / RemoteDiskSpaceMonitor . cpp <nl> size_t Client : : getFreeSpace ( const InterserverIOEndpointLocation & location ) cons <nl> { " compress " , " false " } <nl> } ; <nl> <nl> - ReadBufferFromHTTP in { location . host , location . port , params } ; <nl> + ReadBufferFromHTTP in { location . host , location . port , " " , params } ; <nl> <nl> size_t free_disk_space ; <nl> readBinary ( free_disk_space , in ) ; <nl> mmm a / dbms / src / Storages / MergeTree / RemotePartChecker . cpp <nl> ppp b / dbms / src / Storages / MergeTree / RemotePartChecker . cpp <nl> Status Client : : check ( const std : : string & part_name , const std : : string & hash , <nl> { " hash " , hash } <nl> } ; <nl> <nl> - ReadBufferFromHTTP in { to_location . host , to_location . port , params } ; <nl> + ReadBufferFromHTTP in { to_location . host , to_location . port , " " , params } ; <nl> <nl> UInt8 val ; <nl> readBinary ( val , in ) ; <nl> mmm a / dbms / src / Storages / MergeTree / RemoteQueryExecutor . cpp <nl> ppp b / dbms / src / Storages / MergeTree / RemoteQueryExecutor . cpp <nl> bool Client : : executeQuery ( const InterserverIOEndpointLocation & location , const <nl> { " query " , query } <nl> } ; <nl> <nl> - ReadBufferFromHTTP in { location . host , location . port , params } ; <nl> + ReadBufferFromHTTP in { location . host , location . port , " " , params } ; <nl> <nl> bool flag ; <nl> readBinary ( flag , in ) ; <nl> mmm a / dbms / src / Storages / MergeTree / ShardedPartitionUploader . cpp <nl> ppp b / dbms / src / Storages / MergeTree / ShardedPartitionUploader . cpp <nl> <nl> # include < DB / Storages / MergeTree / ShardedPartitionUploader . h > <nl> # include < DB / Storages / MergeTree / ReplicatedMergeTreeAddress . h > <nl> # include < DB / Storages / StorageReplicatedMergeTree . h > <nl> - # include < DB / IO / ReadBufferFromHTTP . h > <nl> # include < DB / IO / ReadHelpers . h > <nl> # include < DB / IO / WriteHelpers . h > <nl> <nl>
simple get works
ClickHouse/ClickHouse
8fec8e2ac4fc74930353a2b7c51d1722b5d944ff
2016-11-15T23:55:45Z
mmm a / imgui . cpp <nl> ppp b / imgui . cpp <nl> <nl> / / Forward Declarations <nl> / / mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> <nl> - static void LogRenderedText ( const ImVec2 & ref_pos , const char * text , const char * text_end = NULL ) ; <nl> - <nl> static float GetDraggedColumnOffset ( int column_index ) ; <nl> <nl> static bool IsKeyPressedMap ( ImGuiKey key , bool repeat = true ) ; <nl> void ImGui : : LogText ( const char * fmt , . . . ) <nl> <nl> / / Internal version that takes a position to decide on newline placement and pad items according to their depth . <nl> / / We split text into individual lines to add current tree level padding <nl> - static void LogRenderedText ( const ImVec2 & ref_pos , const char * text , const char * text_end ) <nl> + static void LogRenderedText ( const ImVec2 * ref_pos , const char * text , const char * text_end = NULL ) <nl> { <nl> ImGuiContext & g = * GImGui ; <nl> ImGuiWindow * window = ImGui : : GetCurrentWindowRead ( ) ; <nl> static void LogRenderedText ( const ImVec2 & ref_pos , const char * text , const char * <nl> if ( ! text_end ) <nl> text_end = ImGui : : FindRenderedTextEnd ( text , text_end ) ; <nl> <nl> - const bool log_new_line = ref_pos . y > window - > DC . LogLinePosY + 1 ; <nl> - window - > DC . LogLinePosY = ref_pos . y ; <nl> + const bool log_new_line = ref_pos & & ( ref_pos - > y > window - > DC . LogLinePosY + 1 ) ; <nl> + if ( ref_pos ) <nl> + window - > DC . LogLinePosY = ref_pos - > y ; <nl> <nl> const char * text_remaining = text ; <nl> if ( g . LogStartDepth > window - > DC . TreeDepth ) / / Re - adjust padding if we have popped out of our starting depth <nl> void ImGui : : RenderText ( ImVec2 pos , const char * text , const char * text_end , bool <nl> { <nl> window - > DrawList - > AddText ( g . Font , g . FontSize , pos , GetColorU32 ( ImGuiCol_Text ) , text , text_display_end ) ; <nl> if ( g . LogEnabled ) <nl> - LogRenderedText ( pos , text , text_display_end ) ; <nl> + LogRenderedText ( & pos , text , text_display_end ) ; <nl> } <nl> } <nl> <nl> void ImGui : : RenderTextWrapped ( ImVec2 pos , const char * text , const char * text_end <nl> { <nl> window - > DrawList - > AddText ( g . Font , g . FontSize , pos , GetColorU32 ( ImGuiCol_Text ) , text , text_end , wrap_width ) ; <nl> if ( g . LogEnabled ) <nl> - LogRenderedText ( pos , text , text_end ) ; <nl> + LogRenderedText ( & pos , text , text_end ) ; <nl> } <nl> } <nl> <nl> void ImGui : : RenderTextClipped ( const ImVec2 & pos_min , const ImVec2 & pos_max , cons <nl> window - > DrawList - > AddText ( g . Font , g . FontSize , pos , GetColorU32 ( ImGuiCol_Text ) , text , text_display_end , 0 . 0f , NULL ) ; <nl> } <nl> if ( g . LogEnabled ) <nl> - LogRenderedText ( pos , text , text_display_end ) ; <nl> + LogRenderedText ( & pos , text , text_display_end ) ; <nl> } <nl> <nl> / / Render a rectangle shaped with optional rounding and borders <nl> bool ImGui : : Begin ( const char * name , bool * p_open , const ImVec2 & size_on_first_us <nl> if ( window - > Collapsed ) <nl> { <nl> / / Title bar only <nl> - RenderFrame ( title_bar_rect . GetTL ( ) , title_bar_rect . GetBR ( ) , GetColorU32 ( ImGuiCol_TitleBgCollapsed ) , true , window_rounding ) ; <nl> + RenderFrame ( title_bar_rect . Min , title_bar_rect . Max , GetColorU32 ( ImGuiCol_TitleBgCollapsed ) , true , window_rounding ) ; <nl> } <nl> else <nl> { <nl> bool ImGui : : TreeNodeBehavior ( ImGuiID id , ImGuiTreeNodeFlags flags , const char * l <nl> / / NB : ' # # ' is normally used to hide text ( as a library - wide feature ) , so we need to specify the text range to make sure the # # aren ' t stripped out here . <nl> const char log_prefix [ ] = " \ n # # " ; <nl> const char log_suffix [ ] = " # # " ; <nl> - LogRenderedText ( text_pos , log_prefix , log_prefix + 3 ) ; <nl> + LogRenderedText ( & text_pos , log_prefix , log_prefix + 3 ) ; <nl> RenderTextClipped ( text_pos , bb . Max , label , label_end , & label_size ) ; <nl> - LogRenderedText ( text_pos , log_suffix + 1 , log_suffix + 3 ) ; <nl> + LogRenderedText ( & text_pos , log_suffix + 1 , log_suffix + 3 ) ; <nl> } <nl> else <nl> { <nl> bool ImGui : : TreeNodeBehavior ( ImGuiID id , ImGuiTreeNodeFlags flags , const char * l <nl> else if ( ! ( flags & ImGuiTreeNodeFlags_Leaf ) ) <nl> RenderCollapseTriangle ( bb . Min + ImVec2 ( padding . x , g . FontSize * 0 . 15f + text_base_offset_y ) , is_open , 0 . 70f ) ; <nl> if ( g . LogEnabled ) <nl> - LogRenderedText ( text_pos , " > " ) ; <nl> + LogRenderedText ( & text_pos , " > " ) ; <nl> RenderText ( text_pos , label , label_end , false ) ; <nl> } <nl> <nl> bool ImGui : : Checkbox ( const char * label , bool * v ) <nl> } <nl> <nl> if ( g . LogEnabled ) <nl> - LogRenderedText ( text_bb . GetTL ( ) , * v ? " [ x ] " : " [ ] " ) ; <nl> + LogRenderedText ( & text_bb . Min , * v ? " [ x ] " : " [ ] " ) ; <nl> if ( label_size . x > 0 . 0f ) <nl> - RenderText ( text_bb . GetTL ( ) , label ) ; <nl> + RenderText ( text_bb . Min , label ) ; <nl> <nl> return pressed ; <nl> } <nl> bool ImGui : : RadioButton ( const char * label , bool active ) <nl> } <nl> <nl> if ( g . LogEnabled ) <nl> - LogRenderedText ( text_bb . GetTL ( ) , active ? " ( x ) " : " ( ) " ) ; <nl> + LogRenderedText ( & text_bb . Min , active ? " ( x ) " : " ( ) " ) ; <nl> if ( label_size . x > 0 . 0f ) <nl> - RenderText ( text_bb . GetTL ( ) , label ) ; <nl> + RenderText ( text_bb . Min , label ) ; <nl> <nl> return pressed ; <nl> } <nl> bool ImGui : : InputTextEx ( const char * label , char * buf , int buf_size , const ImVec2 <nl> <nl> / / Log as text <nl> if ( g . LogEnabled & & ! is_password ) <nl> - LogRenderedText ( render_pos , buf_display , NULL ) ; <nl> + LogRenderedText ( & render_pos , buf_display , NULL ) ; <nl> <nl> if ( label_size . x > 0 ) <nl> RenderText ( ImVec2 ( frame_bb . Max . x + style . ItemInnerSpacing . x , frame_bb . Min . y + style . FramePadding . y ) , label ) ; <nl> void ImGui : : Separator ( ) <nl> window - > DrawList - > AddLine ( bb . Min , ImVec2 ( bb . Max . x , bb . Min . y ) , GetColorU32 ( ImGuiCol_Separator ) ) ; <nl> <nl> if ( g . LogEnabled ) <nl> - LogText ( IM_NEWLINE " mmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - " ) ; <nl> + LogRenderedText ( NULL , IM_NEWLINE " mmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - " ) ; <nl> <nl> if ( window - > DC . ColumnsCount > 1 ) <nl> { <nl>
Separator ( ) : Tweak Logging so that the separator text is aligned according to tree padding .
ocornut/imgui
914200212187d9d2e30b56420af256642ee63e07
2017-09-28T14:40:28Z
mmm a / . bazelrc <nl> ppp b / . bazelrc <nl> build : asan - fuzzer - - config = clang - asan <nl> build : asan - fuzzer - - define = FUZZING_ENGINE = libfuzzer <nl> build : asan - fuzzer - - copt = - fsanitize = fuzzer - no - link <nl> build : asan - fuzzer - - copt = - fno - omit - frame - pointer <nl> + build : asan - fuzzer - - copt = - DFUZZING_BUILD_MODE_UNSAFE_FOR_PRODUCTION <nl> # Remove UBSAN halt_on_error to avoid crashing on protobuf errors . <nl> build : asan - fuzzer - - test_env = UBSAN_OPTIONS = print_stacktrace = 1 <nl> <nl> mmm a / bazel / repository_locations . bzl <nl> ppp b / bazel / repository_locations . bzl <nl> DEPENDENCY_REPOSITORIES = dict ( <nl> # 3 . Find a commit in BoringSSL ' s " master - with - bazel " branch that merges < boringssl_revision > . <nl> # <nl> # chromium - 81 . 0 . 4044 . 69 <nl> + # 2020 - 01 - 22 <nl> urls = [ " https : / / github . com / google / boringssl / archive / 1c2769383f027befac5b75b6cedd25daf3bf4dcf . tar . gz " ] , <nl> use_category = [ " dataplane " ] , <nl> cpe = " N / A " , <nl> DEPENDENCY_REPOSITORIES = dict ( <nl> use_category = [ " other " ] , <nl> ) , <nl> com_github_fmtlib_fmt = dict ( <nl> - sha256 = " f1907a58d5e86e6c382e51441d92ad9e23aea63827ba47fd647eacc0d3a16c78 " , <nl> - strip_prefix = " fmt - 6 . 0 . 0 " , <nl> - urls = [ " https : / / github . com / fmtlib / fmt / archive / 6 . 0 . 0 . tar . gz " ] , <nl> + sha256 = " 5014aacf55285bf79654539791de0d6925063fddf4dfdd597ef76b53eb994f86 " , <nl> + strip_prefix = " fmt - e2ff910675c7800e5c4e28e1509ca6a50bdceafa " , <nl> + # 2020 - 04 - 29 <nl> + urls = [ " https : / / github . com / fmtlib / fmt / archive / e2ff910675c7800e5c4e28e1509ca6a50bdceafa . tar . gz " ] , <nl> use_category = [ " observability " ] , <nl> cpe = " N / A " , <nl> ) , <nl> DEPENDENCY_REPOSITORIES = dict ( <nl> # This sha on grpc : v1 . 25 . x branch is specifically chosen to fix gRPC STS call credential options . <nl> sha256 = " bbc8f020f4e85ec029b047fab939b8c81f3d67254b5c724e1003a2bc49ddd123 " , <nl> strip_prefix = " grpc - d8f4928fa779f6005a7fe55a176bdb373b0f910f " , <nl> + # 2020 - 02 - 11 <nl> urls = [ " https : / / github . com / grpc / grpc / archive / d8f4928fa779f6005a7fe55a176bdb373b0f910f . tar . gz " ] , <nl> use_category = [ " dataplane " , " controlplane " ] , <nl> cpe = " cpe : 2 . 3 : a : grpc : grpc : * " , <nl> new file mode 100644 <nl> index 00000000000 . . 11fd7836527 <nl> mmm / dev / null <nl> ppp b / test / server / server_corpus / crash - ac725507195d840cdb90bed3079b877e6e9419e3 <nl> <nl> + dynamic_resources { <nl> + } <nl> + cluster_manager { <nl> + local_cluster_name : " \ 000 \ 000 \ 000 \ 000 \ 000 \ 000 \ 000 \ 000 \ 000 \ 000 \ 000 \ 000 \ 000 \ 000 \ 000 \ 000 \ 000 \ 000 \ 000 \ 000 \ 000 \ 000 \ 000 \ 000 \ 000 \ 000 \ 000 \ 000 \ 000 \ 000 \ 000 \ 000 \ 000 \ 000 \ 000 \ 000 \ 000 \ 000 \ 000 \ 000 \ 000 \ 000 \ 000 \ 000 \ 000 \ 000 \ 000 \ 000 \ 000 \ 000 \ 000 \ 000 \ 000 \ 000 \ 000 \ 000 \ 000 \ 000 " <nl> + } <nl> + hidden_envoy_deprecated_runtime { <nl> + } <nl> + admin { <nl> + } <nl> + stats_config { <nl> + use_all_default_tags { <nl> + value : true <nl> + } <nl> + } <nl> + layered_runtime { <nl> + layers { <nl> + disk_layer { <nl> + append_service_cluster : true <nl> + } <nl> + } <nl> + } <nl> + use_tcp_for_dns_lookups : true <nl>
[ fuzz ] fix fuzz crashes in fmt format ( )
envoyproxy/envoy
63e6829659226b5f79ad1b3998ccb1eb9234f8a1
2020-05-15T03:32:02Z
mmm a / include / v8 - platform . h <nl> ppp b / include / v8 - platform . h <nl> class Platform { <nl> * Returns an instance of a v8 : : TracingController . This must be non - nullptr . <nl> * / <nl> virtual TracingController * GetTracingController ( ) = 0 ; <nl> - <nl> - / / DEPRECATED methods , use TracingController interface instead . <nl> - <nl> - / * * <nl> - * Called by TRACE_EVENT * macros , don ' t call this directly . <nl> - * The name parameter is a category group for example : <nl> - * TRACE_EVENT0 ( " v8 , parse " , " V8 . Parse " ) <nl> - * The pointer returned points to a value with zero or more of the bits <nl> - * defined in CategoryGroupEnabledFlags . <nl> - * * / <nl> - virtual const uint8_t * GetCategoryGroupEnabled ( const char * name ) { <nl> - static uint8_t no = 0 ; <nl> - return & no ; <nl> - } <nl> - <nl> - / * * <nl> - * Gets the category group name of the given category_enabled_flag pointer . <nl> - * Usually used while serliazing TRACE_EVENTs . <nl> - * * / <nl> - virtual const char * GetCategoryGroupName ( <nl> - const uint8_t * category_enabled_flag ) { <nl> - static const char dummy [ ] = " dummy " ; <nl> - return dummy ; <nl> - } <nl> - <nl> - / * * <nl> - * Adds a trace event to the platform tracing system . This function call is <nl> - * usually the result of a TRACE_ * macro from trace_event_common . h when <nl> - * tracing and the category of the particular trace are enabled . It is not <nl> - * advisable to call this function on its own ; it is really only meant to be <nl> - * used by the trace macros . The returned handle can be used by <nl> - * UpdateTraceEventDuration to update the duration of COMPLETE events . <nl> - * / <nl> - virtual uint64_t AddTraceEvent ( <nl> - char phase , const uint8_t * category_enabled_flag , const char * name , <nl> - const char * scope , uint64_t id , uint64_t bind_id , int32_t num_args , <nl> - const char * * arg_names , const uint8_t * arg_types , <nl> - const uint64_t * arg_values , unsigned int flags ) { <nl> - return 0 ; <nl> - } <nl> - <nl> - / * * <nl> - * Adds a trace event to the platform tracing system . This function call is <nl> - * usually the result of a TRACE_ * macro from trace_event_common . h when <nl> - * tracing and the category of the particular trace are enabled . It is not <nl> - * advisable to call this function on its own ; it is really only meant to be <nl> - * used by the trace macros . The returned handle can be used by <nl> - * UpdateTraceEventDuration to update the duration of COMPLETE events . <nl> - * / <nl> - virtual uint64_t AddTraceEvent ( <nl> - char phase , const uint8_t * category_enabled_flag , const char * name , <nl> - const char * scope , uint64_t id , uint64_t bind_id , int32_t num_args , <nl> - const char * * arg_names , const uint8_t * arg_types , <nl> - const uint64_t * arg_values , <nl> - std : : unique_ptr < ConvertableToTraceFormat > * arg_convertables , <nl> - unsigned int flags ) { <nl> - return AddTraceEvent ( phase , category_enabled_flag , name , scope , id , bind_id , <nl> - num_args , arg_names , arg_types , arg_values , flags ) ; <nl> - } <nl> - <nl> - / * * <nl> - * Sets the duration field of a COMPLETE trace event . It must be called with <nl> - * the handle returned from AddTraceEvent ( ) . <nl> - * * / <nl> - virtual void UpdateTraceEventDuration ( const uint8_t * category_enabled_flag , <nl> - const char * name , uint64_t handle ) { } <nl> - <nl> - typedef v8 : : TracingController : : TraceStateObserver TraceStateObserver ; <nl> - <nl> - / * * Adds tracing state change observer . * / <nl> - virtual void AddTraceStateObserver ( TraceStateObserver * ) { } <nl> - <nl> - / * * Removes tracing state change observer . * / <nl> - virtual void RemoveTraceStateObserver ( TraceStateObserver * ) { } <nl> } ; <nl> <nl> } / / namespace v8 <nl>
Remove deprecated tracing methods from v8 : : Platform
v8/v8
03850446dc17f2183387627c2585d395e9ab281a
2017-08-23T13:31:30Z
mmm a / xbmc / cores / AudioEngine / DSPAddons / ActiveAEDSPProcess . cpp <nl> ppp b / xbmc / cores / AudioEngine / DSPAddons / ActiveAEDSPProcess . cpp <nl> bool CActiveAEDSPProcess : : Create ( const AEAudioFormat & inputFormat , const AEAudio <nl> if ( m_inputFormat . m_channelLayout . HasChannel ( AE_CH_TBL ) ) m_addonSettings . lInChannelPresentFlags | = AE_DSP_PRSNT_CH_TBL ; <nl> if ( m_inputFormat . m_channelLayout . HasChannel ( AE_CH_TBR ) ) m_addonSettings . lInChannelPresentFlags | = AE_DSP_PRSNT_CH_TBR ; <nl> if ( m_inputFormat . m_channelLayout . HasChannel ( AE_CH_TBC ) ) m_addonSettings . lInChannelPresentFlags | = AE_DSP_PRSNT_CH_TBC ; <nl> + if ( m_inputFormat . m_channelLayout . HasChannel ( AE_CH_BLOC ) ) m_addonSettings . lInChannelPresentFlags | = AE_DSP_PRSNT_CH_BLOC ; <nl> + if ( m_inputFormat . m_channelLayout . HasChannel ( AE_CH_BROC ) ) m_addonSettings . lInChannelPresentFlags | = AE_DSP_PRSNT_CH_BROC ; <nl> <nl> if ( m_outputFormat . m_channelLayout . HasChannel ( AE_CH_FL ) ) m_addonSettings . lOutChannelPresentFlags | = AE_DSP_PRSNT_CH_FL ; <nl> if ( m_outputFormat . m_channelLayout . HasChannel ( AE_CH_FR ) ) m_addonSettings . lOutChannelPresentFlags | = AE_DSP_PRSNT_CH_FR ; <nl> bool CActiveAEDSPProcess : : Create ( const AEAudioFormat & inputFormat , const AEAudio <nl> if ( m_outputFormat . m_channelLayout . HasChannel ( AE_CH_TBL ) ) m_addonSettings . lOutChannelPresentFlags | = AE_DSP_PRSNT_CH_TBL ; <nl> if ( m_outputFormat . m_channelLayout . HasChannel ( AE_CH_TBR ) ) m_addonSettings . lOutChannelPresentFlags | = AE_DSP_PRSNT_CH_TBR ; <nl> if ( m_outputFormat . m_channelLayout . HasChannel ( AE_CH_TBC ) ) m_addonSettings . lOutChannelPresentFlags | = AE_DSP_PRSNT_CH_TBC ; <nl> + if ( m_outputFormat . m_channelLayout . HasChannel ( AE_CH_BLOC ) ) m_addonSettings . lOutChannelPresentFlags | = AE_DSP_PRSNT_CH_BLOC ; <nl> + if ( m_outputFormat . m_channelLayout . HasChannel ( AE_CH_BROC ) ) m_addonSettings . lOutChannelPresentFlags | = AE_DSP_PRSNT_CH_BROC ; <nl> <nl> / * ! <nl> * Setup off mode , used if dsp master processing is set off , required to have data <nl> bool CActiveAEDSPProcess : : Process ( CSampleBuffer * in , CSampleBuffer * out ) <nl> m_idx_in [ AE_CH_TBL ] = av_get_channel_layout_channel_index ( m_channelLayoutIn , AV_CH_TOP_BACK_LEFT ) ; <nl> m_idx_in [ AE_CH_TBC ] = av_get_channel_layout_channel_index ( m_channelLayoutIn , AV_CH_TOP_BACK_CENTER ) ; <nl> m_idx_in [ AE_CH_TBR ] = av_get_channel_layout_channel_index ( m_channelLayoutIn , AV_CH_TOP_BACK_RIGHT ) ; <nl> + m_idx_in [ AE_CH_BLOC ] = - 1 ; / / manually disable these channels because ffmpeg does not support them <nl> + m_idx_in [ AE_CH_BROC ] = - 1 ; <nl> <nl> needDSPAddonsReinit = true ; <nl> } <nl> bool CActiveAEDSPProcess : : Process ( CSampleBuffer * in , CSampleBuffer * out ) <nl> m_idx_out [ AE_CH_TBL ] = av_get_channel_layout_channel_index ( m_channelLayoutOut , AV_CH_TOP_BACK_LEFT ) ; <nl> m_idx_out [ AE_CH_TBC ] = av_get_channel_layout_channel_index ( m_channelLayoutOut , AV_CH_TOP_BACK_CENTER ) ; <nl> m_idx_out [ AE_CH_TBR ] = av_get_channel_layout_channel_index ( m_channelLayoutOut , AV_CH_TOP_BACK_RIGHT ) ; <nl> + m_idx_out [ AE_CH_BLOC ] = - 1 ; / / manually disable these channels because ffmpeg does not support them <nl> + m_idx_out [ AE_CH_BROC ] = - 1 ; <nl> <nl> needDSPAddonsReinit = true ; <nl> } <nl> bool CActiveAEDSPProcess : : Process ( CSampleBuffer * in , CSampleBuffer * out ) <nl> if ( m_idx_in [ AE_CH_TBL ] > = 0 ) m_addonSettings . lInChannelPresentFlags | = AE_DSP_PRSNT_CH_TBL ; <nl> if ( m_idx_in [ AE_CH_TBR ] > = 0 ) m_addonSettings . lInChannelPresentFlags | = AE_DSP_PRSNT_CH_TBR ; <nl> if ( m_idx_in [ AE_CH_TBC ] > = 0 ) m_addonSettings . lInChannelPresentFlags | = AE_DSP_PRSNT_CH_TBC ; <nl> + if ( m_idx_in [ AE_CH_TBR ] > = 0 ) m_addonSettings . lInChannelPresentFlags | = AE_DSP_PRSNT_CH_TBR ; <nl> + if ( m_idx_in [ AE_CH_BLOC ] > = 0 ) m_addonSettings . lInChannelPresentFlags | = AE_DSP_PRSNT_CH_BLOC ; <nl> + if ( m_idx_in [ AE_CH_BROC ] > = 0 ) m_addonSettings . lInChannelPresentFlags | = AE_DSP_PRSNT_CH_BROC ; <nl> <nl> m_addonSettings . lOutChannelPresentFlags = 0 ; <nl> if ( m_idx_out [ AE_CH_FL ] > = 0 ) m_addonSettings . lOutChannelPresentFlags | = AE_DSP_PRSNT_CH_FL ; <nl> bool CActiveAEDSPProcess : : Process ( CSampleBuffer * in , CSampleBuffer * out ) <nl> if ( m_idx_out [ AE_CH_TBL ] > = 0 ) m_addonSettings . lOutChannelPresentFlags | = AE_DSP_PRSNT_CH_TBL ; <nl> if ( m_idx_out [ AE_CH_TBR ] > = 0 ) m_addonSettings . lOutChannelPresentFlags | = AE_DSP_PRSNT_CH_TBR ; <nl> if ( m_idx_out [ AE_CH_TBC ] > = 0 ) m_addonSettings . lOutChannelPresentFlags | = AE_DSP_PRSNT_CH_TBC ; <nl> + if ( m_idx_out [ AE_CH_BLOC ] > = 0 ) m_addonSettings . lOutChannelPresentFlags | = AE_DSP_PRSNT_CH_BLOC ; <nl> + if ( m_idx_out [ AE_CH_BROC ] > = 0 ) m_addonSettings . lOutChannelPresentFlags | = AE_DSP_PRSNT_CH_BROC ; <nl> <nl> m_addonSettings . iStreamID = m_streamId ; <nl> m_addonSettings . iInChannels = in - > pkt - > config . channels ; <nl> void CActiveAEDSPProcess : : SetFFMpegDSPProcessorArray ( float * array_ffmpeg [ 2 ] [ AE_D <nl> / / ! Initialize input channel alignmment for ffmpeg process array <nl> if ( array_in ) <nl> { <nl> - if ( m_addonSettings . lInChannelPresentFlags & AE_DSP_PRSNT_CH_FL ) <nl> - { <nl> - array_ffmpeg [ FFMPEG_PROC_ARRAY_IN ] [ m_idx_in [ AE_CH_FL ] ] = array_in [ AE_DSP_CH_FL ] ; <nl> - array_ffmpeg [ FFMPEG_PROC_ARRAY_IN ] [ m_idx_in [ AE_CH_FR ] ] = array_in [ AE_DSP_CH_FR ] ; <nl> - } <nl> - if ( m_addonSettings . lInChannelPresentFlags & AE_DSP_PRSNT_CH_FC ) <nl> - { <nl> - array_ffmpeg [ FFMPEG_PROC_ARRAY_IN ] [ m_idx_in [ AE_CH_FC ] ] = array_in [ AE_DSP_CH_FC ] ; <nl> - } <nl> - if ( m_addonSettings . lInChannelPresentFlags & AE_DSP_PRSNT_CH_LFE ) <nl> - { <nl> - array_ffmpeg [ FFMPEG_PROC_ARRAY_IN ] [ m_idx_in [ AE_CH_LFE ] ] = array_in [ AE_DSP_CH_LFE ] ; <nl> - } <nl> - if ( m_addonSettings . lInChannelPresentFlags & AE_DSP_PRSNT_CH_BL ) <nl> - { <nl> - array_ffmpeg [ FFMPEG_PROC_ARRAY_IN ] [ m_idx_in [ AE_CH_BL ] ] = array_in [ AE_DSP_CH_BL ] ; <nl> - array_ffmpeg [ FFMPEG_PROC_ARRAY_IN ] [ m_idx_in [ AE_CH_BR ] ] = array_in [ AE_DSP_CH_BR ] ; <nl> - } <nl> - if ( m_addonSettings . lInChannelPresentFlags & AE_DSP_PRSNT_CH_FLOC ) <nl> - { <nl> - array_ffmpeg [ FFMPEG_PROC_ARRAY_IN ] [ m_idx_in [ AE_CH_FLOC ] ] = array_in [ AE_DSP_CH_FLOC ] ; <nl> - array_ffmpeg [ FFMPEG_PROC_ARRAY_IN ] [ m_idx_in [ AE_CH_FROC ] ] = array_in [ AE_DSP_CH_FROC ] ; <nl> - } <nl> - if ( m_addonSettings . lInChannelPresentFlags & AE_DSP_PRSNT_CH_BC ) <nl> - { <nl> - array_ffmpeg [ FFMPEG_PROC_ARRAY_IN ] [ m_idx_in [ AE_CH_BC ] ] = array_in [ AE_DSP_CH_BC ] ; <nl> - } <nl> - if ( m_addonSettings . lInChannelPresentFlags & AE_DSP_PRSNT_CH_SL ) <nl> - { <nl> - array_ffmpeg [ FFMPEG_PROC_ARRAY_IN ] [ m_idx_in [ AE_CH_SL ] ] = array_in [ AE_DSP_CH_SL ] ; <nl> - array_ffmpeg [ FFMPEG_PROC_ARRAY_IN ] [ m_idx_in [ AE_CH_SR ] ] = array_in [ AE_DSP_CH_SR ] ; <nl> - } <nl> - if ( m_addonSettings . lInChannelPresentFlags & AE_DSP_PRSNT_CH_TFL ) <nl> - { <nl> - array_ffmpeg [ FFMPEG_PROC_ARRAY_IN ] [ m_idx_in [ AE_CH_TFL ] ] = array_in [ AE_DSP_CH_TFL ] ; <nl> - array_ffmpeg [ FFMPEG_PROC_ARRAY_IN ] [ m_idx_in [ AE_CH_TFR ] ] = array_in [ AE_DSP_CH_TFR ] ; <nl> - } <nl> - if ( m_addonSettings . lInChannelPresentFlags & AE_DSP_PRSNT_CH_TFC ) <nl> - { <nl> - array_ffmpeg [ FFMPEG_PROC_ARRAY_IN ] [ m_idx_in [ AE_CH_TFC ] ] = array_in [ AE_DSP_CH_TFC ] ; <nl> - } <nl> - if ( m_addonSettings . lInChannelPresentFlags & AE_DSP_PRSNT_CH_TC ) <nl> - { <nl> - array_ffmpeg [ FFMPEG_PROC_ARRAY_IN ] [ m_idx_in [ AE_CH_TC ] ] = array_in [ AE_DSP_CH_TC ] ; <nl> - } <nl> - if ( m_addonSettings . lInChannelPresentFlags & AE_DSP_PRSNT_CH_TBL ) <nl> - { <nl> - array_ffmpeg [ FFMPEG_PROC_ARRAY_IN ] [ m_idx_in [ AE_CH_TBL ] ] = array_in [ AE_DSP_CH_TBL ] ; <nl> - array_ffmpeg [ FFMPEG_PROC_ARRAY_IN ] [ m_idx_in [ AE_CH_TBR ] ] = array_in [ AE_DSP_CH_TBR ] ; <nl> - } <nl> - if ( m_addonSettings . lInChannelPresentFlags & AE_DSP_PRSNT_CH_TBC ) <nl> - { <nl> - array_ffmpeg [ FFMPEG_PROC_ARRAY_IN ] [ m_idx_in [ AE_CH_TBC ] ] = array_in [ AE_DSP_CH_TBC ] ; <nl> - } <nl> + if ( m_addonSettings . lInChannelPresentFlags & AE_DSP_PRSNT_CH_FL ) array_ffmpeg [ FFMPEG_PROC_ARRAY_IN ] [ m_idx_in [ AE_CH_FL ] ] = array_in [ AE_DSP_CH_FL ] ; <nl> + if ( m_addonSettings . lInChannelPresentFlags & AE_DSP_PRSNT_CH_FR ) array_ffmpeg [ FFMPEG_PROC_ARRAY_IN ] [ m_idx_in [ AE_CH_FR ] ] = array_in [ AE_DSP_CH_FR ] ; <nl> + if ( m_addonSettings . lInChannelPresentFlags & AE_DSP_PRSNT_CH_FC ) array_ffmpeg [ FFMPEG_PROC_ARRAY_IN ] [ m_idx_in [ AE_CH_FC ] ] = array_in [ AE_DSP_CH_FC ] ; <nl> + if ( m_addonSettings . lInChannelPresentFlags & AE_DSP_PRSNT_CH_LFE ) array_ffmpeg [ FFMPEG_PROC_ARRAY_IN ] [ m_idx_in [ AE_CH_LFE ] ] = array_in [ AE_DSP_CH_LFE ] ; <nl> + if ( m_addonSettings . lInChannelPresentFlags & AE_DSP_PRSNT_CH_BL ) array_ffmpeg [ FFMPEG_PROC_ARRAY_IN ] [ m_idx_in [ AE_CH_BL ] ] = array_in [ AE_DSP_CH_BL ] ; <nl> + if ( m_addonSettings . lInChannelPresentFlags & AE_DSP_PRSNT_CH_BR ) array_ffmpeg [ FFMPEG_PROC_ARRAY_IN ] [ m_idx_in [ AE_CH_BR ] ] = array_in [ AE_DSP_CH_BR ] ; <nl> + if ( m_addonSettings . lInChannelPresentFlags & AE_DSP_PRSNT_CH_FLOC ) array_ffmpeg [ FFMPEG_PROC_ARRAY_IN ] [ m_idx_in [ AE_CH_FLOC ] ] = array_in [ AE_DSP_CH_FLOC ] ; <nl> + if ( m_addonSettings . lInChannelPresentFlags & AE_DSP_PRSNT_CH_FROC ) array_ffmpeg [ FFMPEG_PROC_ARRAY_IN ] [ m_idx_in [ AE_CH_FROC ] ] = array_in [ AE_DSP_CH_FROC ] ; <nl> + if ( m_addonSettings . lInChannelPresentFlags & AE_DSP_PRSNT_CH_BC ) array_ffmpeg [ FFMPEG_PROC_ARRAY_IN ] [ m_idx_in [ AE_CH_BC ] ] = array_in [ AE_DSP_CH_BC ] ; <nl> + if ( m_addonSettings . lInChannelPresentFlags & AE_DSP_PRSNT_CH_SL ) array_ffmpeg [ FFMPEG_PROC_ARRAY_IN ] [ m_idx_in [ AE_CH_SL ] ] = array_in [ AE_DSP_CH_SL ] ; <nl> + if ( m_addonSettings . lInChannelPresentFlags & AE_DSP_PRSNT_CH_SR ) array_ffmpeg [ FFMPEG_PROC_ARRAY_IN ] [ m_idx_in [ AE_CH_SR ] ] = array_in [ AE_DSP_CH_SR ] ; <nl> + if ( m_addonSettings . lInChannelPresentFlags & AE_DSP_PRSNT_CH_TFL ) array_ffmpeg [ FFMPEG_PROC_ARRAY_IN ] [ m_idx_in [ AE_CH_TFL ] ] = array_in [ AE_DSP_CH_TFL ] ; <nl> + if ( m_addonSettings . lInChannelPresentFlags & AE_DSP_PRSNT_CH_TFR ) array_ffmpeg [ FFMPEG_PROC_ARRAY_IN ] [ m_idx_in [ AE_CH_TFR ] ] = array_in [ AE_DSP_CH_TFR ] ; <nl> + if ( m_addonSettings . lInChannelPresentFlags & AE_DSP_PRSNT_CH_TFC ) array_ffmpeg [ FFMPEG_PROC_ARRAY_IN ] [ m_idx_in [ AE_CH_TFC ] ] = array_in [ AE_DSP_CH_TFC ] ; <nl> + if ( m_addonSettings . lInChannelPresentFlags & AE_DSP_PRSNT_CH_TC ) array_ffmpeg [ FFMPEG_PROC_ARRAY_IN ] [ m_idx_in [ AE_CH_TC ] ] = array_in [ AE_DSP_CH_TC ] ; <nl> + if ( m_addonSettings . lInChannelPresentFlags & AE_DSP_PRSNT_CH_TBL ) array_ffmpeg [ FFMPEG_PROC_ARRAY_IN ] [ m_idx_in [ AE_CH_TBL ] ] = array_in [ AE_DSP_CH_TBL ] ; <nl> + if ( m_addonSettings . lInChannelPresentFlags & AE_DSP_PRSNT_CH_TBR ) array_ffmpeg [ FFMPEG_PROC_ARRAY_IN ] [ m_idx_in [ AE_CH_TBR ] ] = array_in [ AE_DSP_CH_TBR ] ; <nl> + if ( m_addonSettings . lInChannelPresentFlags & AE_DSP_PRSNT_CH_TBC ) array_ffmpeg [ FFMPEG_PROC_ARRAY_IN ] [ m_idx_in [ AE_CH_TBC ] ] = array_in [ AE_DSP_CH_TBC ] ; <nl> + if ( m_addonSettings . lInChannelPresentFlags & AE_DSP_PRSNT_CH_BLOC ) array_ffmpeg [ FFMPEG_PROC_ARRAY_IN ] [ m_idx_in [ AE_CH_BLOC ] ] = array_in [ AE_DSP_CH_BLOC ] ; <nl> + if ( m_addonSettings . lInChannelPresentFlags & AE_DSP_PRSNT_CH_BROC ) array_ffmpeg [ FFMPEG_PROC_ARRAY_IN ] [ m_idx_in [ AE_CH_BROC ] ] = array_in [ AE_DSP_CH_BROC ] ; <nl> } <nl> <nl> + / / ! Initialize output channel alignmment for ffmpeg process array <nl> if ( array_out ) <nl> { <nl> - / / ! Initialize output channel alignmment for ffmpeg process array <nl> - if ( m_addonSettings . lOutChannelPresentFlags & AE_DSP_PRSNT_CH_FL ) <nl> - { <nl> - array_ffmpeg [ FFMPEG_PROC_ARRAY_OUT ] [ m_idx_out [ AE_CH_FL ] ] = array_out [ AE_DSP_CH_FL ] ; <nl> - array_ffmpeg [ FFMPEG_PROC_ARRAY_OUT ] [ m_idx_out [ AE_CH_FR ] ] = array_out [ AE_DSP_CH_FR ] ; <nl> - } <nl> - if ( m_addonSettings . lOutChannelPresentFlags & AE_DSP_PRSNT_CH_FC ) <nl> - { <nl> - array_ffmpeg [ FFMPEG_PROC_ARRAY_OUT ] [ m_idx_out [ AE_CH_FC ] ] = array_out [ AE_DSP_CH_FC ] ; <nl> - } <nl> - if ( m_addonSettings . lOutChannelPresentFlags & AE_DSP_PRSNT_CH_LFE ) <nl> - { <nl> - array_ffmpeg [ FFMPEG_PROC_ARRAY_OUT ] [ m_idx_out [ AE_CH_LFE ] ] = array_out [ AE_DSP_CH_LFE ] ; <nl> - } <nl> - if ( m_addonSettings . lOutChannelPresentFlags & AE_DSP_PRSNT_CH_BL ) <nl> - { <nl> - array_ffmpeg [ FFMPEG_PROC_ARRAY_OUT ] [ m_idx_out [ AE_CH_BL ] ] = array_out [ AE_DSP_CH_BL ] ; <nl> - array_ffmpeg [ FFMPEG_PROC_ARRAY_OUT ] [ m_idx_out [ AE_CH_BR ] ] = array_out [ AE_DSP_CH_BR ] ; <nl> - } <nl> - if ( m_addonSettings . lOutChannelPresentFlags & AE_DSP_PRSNT_CH_FLOC ) <nl> - { <nl> - array_ffmpeg [ FFMPEG_PROC_ARRAY_OUT ] [ m_idx_out [ AE_CH_FLOC ] ] = array_out [ AE_DSP_CH_FLOC ] ; <nl> - array_ffmpeg [ FFMPEG_PROC_ARRAY_OUT ] [ m_idx_out [ AE_CH_FROC ] ] = array_out [ AE_DSP_CH_FROC ] ; <nl> - } <nl> - if ( m_addonSettings . lOutChannelPresentFlags & AE_DSP_PRSNT_CH_BC ) <nl> - { <nl> - array_ffmpeg [ FFMPEG_PROC_ARRAY_OUT ] [ m_idx_out [ AE_CH_BC ] ] = array_out [ AE_DSP_CH_BC ] ; <nl> - } <nl> - if ( m_addonSettings . lOutChannelPresentFlags & AE_DSP_PRSNT_CH_SL ) <nl> - { <nl> - array_ffmpeg [ FFMPEG_PROC_ARRAY_OUT ] [ m_idx_out [ AE_CH_SL ] ] = array_out [ AE_DSP_CH_SL ] ; <nl> - array_ffmpeg [ FFMPEG_PROC_ARRAY_OUT ] [ m_idx_out [ AE_CH_SR ] ] = array_out [ AE_DSP_CH_SR ] ; <nl> - } <nl> - if ( m_addonSettings . lOutChannelPresentFlags & AE_DSP_PRSNT_CH_TFL ) <nl> - { <nl> - array_ffmpeg [ FFMPEG_PROC_ARRAY_OUT ] [ m_idx_out [ AE_CH_TFL ] ] = array_out [ AE_DSP_CH_TFL ] ; <nl> - array_ffmpeg [ FFMPEG_PROC_ARRAY_OUT ] [ m_idx_out [ AE_CH_TFR ] ] = array_out [ AE_DSP_CH_TFR ] ; <nl> - } <nl> - if ( m_addonSettings . lOutChannelPresentFlags & AE_DSP_PRSNT_CH_TFC ) <nl> - { <nl> - array_ffmpeg [ FFMPEG_PROC_ARRAY_OUT ] [ m_idx_out [ AE_CH_TFC ] ] = array_out [ AE_DSP_CH_TFC ] ; <nl> - } <nl> - if ( m_addonSettings . lOutChannelPresentFlags & AE_DSP_PRSNT_CH_TC ) <nl> - { <nl> - array_ffmpeg [ FFMPEG_PROC_ARRAY_OUT ] [ m_idx_out [ AE_CH_TC ] ] = array_out [ AE_DSP_CH_TC ] ; <nl> - } <nl> - if ( m_addonSettings . lOutChannelPresentFlags & AE_DSP_PRSNT_CH_TBL ) <nl> - { <nl> - array_ffmpeg [ FFMPEG_PROC_ARRAY_OUT ] [ m_idx_out [ AE_CH_TBL ] ] = array_out [ AE_DSP_CH_TBL ] ; <nl> - array_ffmpeg [ FFMPEG_PROC_ARRAY_OUT ] [ m_idx_out [ AE_CH_TBR ] ] = array_out [ AE_DSP_CH_TBR ] ; <nl> - } <nl> - if ( m_addonSettings . lOutChannelPresentFlags & AE_DSP_PRSNT_CH_TBC ) <nl> - { <nl> - array_ffmpeg [ FFMPEG_PROC_ARRAY_OUT ] [ m_idx_out [ AE_CH_TBC ] ] = array_out [ AE_DSP_CH_TBC ] ; <nl> - } <nl> + if ( m_addonSettings . lOutChannelPresentFlags & AE_DSP_PRSNT_CH_FL ) array_ffmpeg [ FFMPEG_PROC_ARRAY_OUT ] [ m_idx_out [ AE_CH_FL ] ] = array_out [ AE_DSP_CH_FL ] ; <nl> + if ( m_addonSettings . lOutChannelPresentFlags & AE_DSP_PRSNT_CH_FR ) array_ffmpeg [ FFMPEG_PROC_ARRAY_OUT ] [ m_idx_out [ AE_CH_FR ] ] = array_out [ AE_DSP_CH_FR ] ; <nl> + if ( m_addonSettings . lOutChannelPresentFlags & AE_DSP_PRSNT_CH_FC ) array_ffmpeg [ FFMPEG_PROC_ARRAY_OUT ] [ m_idx_out [ AE_CH_FC ] ] = array_out [ AE_DSP_CH_FC ] ; <nl> + if ( m_addonSettings . lOutChannelPresentFlags & AE_DSP_PRSNT_CH_LFE ) array_ffmpeg [ FFMPEG_PROC_ARRAY_OUT ] [ m_idx_out [ AE_CH_LFE ] ] = array_out [ AE_DSP_CH_LFE ] ; <nl> + if ( m_addonSettings . lOutChannelPresentFlags & AE_DSP_PRSNT_CH_BL ) array_ffmpeg [ FFMPEG_PROC_ARRAY_OUT ] [ m_idx_out [ AE_CH_BL ] ] = array_out [ AE_DSP_CH_BL ] ; <nl> + if ( m_addonSettings . lOutChannelPresentFlags & AE_DSP_PRSNT_CH_BR ) array_ffmpeg [ FFMPEG_PROC_ARRAY_OUT ] [ m_idx_out [ AE_CH_BR ] ] = array_out [ AE_DSP_CH_BR ] ; <nl> + if ( m_addonSettings . lOutChannelPresentFlags & AE_DSP_PRSNT_CH_FLOC ) array_ffmpeg [ FFMPEG_PROC_ARRAY_OUT ] [ m_idx_out [ AE_CH_FLOC ] ] = array_out [ AE_DSP_CH_FLOC ] ; <nl> + if ( m_addonSettings . lOutChannelPresentFlags & AE_DSP_PRSNT_CH_FROC ) array_ffmpeg [ FFMPEG_PROC_ARRAY_OUT ] [ m_idx_out [ AE_CH_FROC ] ] = array_out [ AE_DSP_CH_FROC ] ; <nl> + if ( m_addonSettings . lOutChannelPresentFlags & AE_DSP_PRSNT_CH_BC ) array_ffmpeg [ FFMPEG_PROC_ARRAY_OUT ] [ m_idx_out [ AE_CH_BC ] ] = array_out [ AE_DSP_CH_BC ] ; <nl> + if ( m_addonSettings . lOutChannelPresentFlags & AE_DSP_PRSNT_CH_SL ) array_ffmpeg [ FFMPEG_PROC_ARRAY_OUT ] [ m_idx_out [ AE_CH_SL ] ] = array_out [ AE_DSP_CH_SL ] ; <nl> + if ( m_addonSettings . lOutChannelPresentFlags & AE_DSP_PRSNT_CH_SR ) array_ffmpeg [ FFMPEG_PROC_ARRAY_OUT ] [ m_idx_out [ AE_CH_SR ] ] = array_out [ AE_DSP_CH_SR ] ; <nl> + if ( m_addonSettings . lOutChannelPresentFlags & AE_DSP_PRSNT_CH_TFL ) array_ffmpeg [ FFMPEG_PROC_ARRAY_OUT ] [ m_idx_out [ AE_CH_TFL ] ] = array_out [ AE_DSP_CH_TFL ] ; <nl> + if ( m_addonSettings . lOutChannelPresentFlags & AE_DSP_PRSNT_CH_TFR ) array_ffmpeg [ FFMPEG_PROC_ARRAY_OUT ] [ m_idx_out [ AE_CH_TFR ] ] = array_out [ AE_DSP_CH_TFR ] ; <nl> + if ( m_addonSettings . lOutChannelPresentFlags & AE_DSP_PRSNT_CH_TFC ) array_ffmpeg [ FFMPEG_PROC_ARRAY_OUT ] [ m_idx_out [ AE_CH_TFC ] ] = array_out [ AE_DSP_CH_TFC ] ; <nl> + if ( m_addonSettings . lOutChannelPresentFlags & AE_DSP_PRSNT_CH_TC ) array_ffmpeg [ FFMPEG_PROC_ARRAY_OUT ] [ m_idx_out [ AE_CH_TC ] ] = array_out [ AE_DSP_CH_TC ] ; <nl> + if ( m_addonSettings . lOutChannelPresentFlags & AE_DSP_PRSNT_CH_TBL ) array_ffmpeg [ FFMPEG_PROC_ARRAY_OUT ] [ m_idx_out [ AE_CH_TBL ] ] = array_out [ AE_DSP_CH_TBL ] ; <nl> + if ( m_addonSettings . lOutChannelPresentFlags & AE_DSP_PRSNT_CH_TBR ) array_ffmpeg [ FFMPEG_PROC_ARRAY_OUT ] [ m_idx_out [ AE_CH_TBR ] ] = array_out [ AE_DSP_CH_TBR ] ; <nl> + if ( m_addonSettings . lOutChannelPresentFlags & AE_DSP_PRSNT_CH_TBC ) array_ffmpeg [ FFMPEG_PROC_ARRAY_OUT ] [ m_idx_out [ AE_CH_TBC ] ] = array_out [ AE_DSP_CH_TBC ] ; <nl> + if ( m_addonSettings . lOutChannelPresentFlags & AE_DSP_PRSNT_CH_BLOC ) array_ffmpeg [ FFMPEG_PROC_ARRAY_OUT ] [ m_idx_out [ AE_CH_BLOC ] ] = array_out [ AE_DSP_CH_BLOC ] ; <nl> + if ( m_addonSettings . lOutChannelPresentFlags & AE_DSP_PRSNT_CH_BROC ) array_ffmpeg [ FFMPEG_PROC_ARRAY_OUT ] [ m_idx_out [ AE_CH_BROC ] ] = array_out [ AE_DSP_CH_BROC ] ; <nl> } <nl> } <nl> <nl> mmm a / xbmc / cores / AudioEngine / Engines / ActiveAE / ActiveAE . cpp <nl> ppp b / xbmc / cores / AudioEngine / Engines / ActiveAE / ActiveAE . cpp <nl> bool CActiveAE : : ResampleSound ( CActiveAESound * sound ) <nl> <nl> IAEStream * CActiveAE : : MakeStream ( AEAudioFormat & audioFormat , unsigned int options , IAEClockCallback * clock ) <nl> { <nl> + if ( audioFormat . m_dataFormat < = AE_FMT_INVALID | | audioFormat . m_dataFormat > = AE_FMT_MAX ) <nl> + { <nl> + return nullptr ; <nl> + } <nl> + <nl> if ( IsSuspended ( ) ) <nl> return NULL ; <nl> <nl> mmm a / xbmc / cores / AudioEngine / Engines / ActiveAE / ActiveAEBuffer . cpp <nl> ppp b / xbmc / cores / AudioEngine / Engines / ActiveAE / ActiveAEBuffer . cpp <nl> bool CActiveAEBufferPoolResample : : ResampleBuffers ( int64_t timestamp ) <nl> <nl> if ( m_dspSample & & m_processor - > Process ( in , m_dspSample ) ) <nl> { <nl> + m_dspSample - > timestamp = in - > timestamp ; <nl> in - > Return ( ) ; <nl> in = m_dspSample ; <nl> m_dspSample = NULL ; <nl>
Merge pull request from AchimTuran / AudioDSP_VS2015_fixes
xbmc/xbmc
ce686f5dc6cb0e80fd046da756f337956ac625c1
2016-05-25T16:18:11Z
mmm a / src / arborescence . h <nl> ppp b / src / arborescence . h <nl> <nl> # include < QDir > <nl> # include " misc . h " <nl> <nl> - class file { <nl> + class torrent_file { <nl> private : <nl> - file * parent ; <nl> + torrent_file * parent ; <nl> bool is_dir ; <nl> QString rel_path ; <nl> - QList < file * > children ; <nl> + QList < torrent_file * > children ; <nl> size_type size ; <nl> float progress ; <nl> int priority ; <nl> int index ; / / Index in torrent_info <nl> <nl> public : <nl> - file ( file * parent , QString path , bool dir , size_type size = 0 , int index = - 1 , float progress = 0 . , int priority = 1 ) : parent ( parent ) , is_dir ( dir ) , size ( size ) , progress ( progress ) , priority ( priority ) , index ( index ) { <nl> + torrent_file ( torrent_file * parent , QString path , bool dir , size_type size = 0 , int index = - 1 , float progress = 0 . , int priority = 1 ) : parent ( parent ) , is_dir ( dir ) , size ( size ) , progress ( progress ) , priority ( priority ) , index ( index ) { <nl> qDebug ( " created a file with index % d " , index ) ; <nl> rel_path = QDir : : cleanPath ( path ) ; <nl> if ( parent ) { <nl> class file { <nl> } <nl> } <nl> <nl> - ~ file ( ) { <nl> + ~ torrent_file ( ) { <nl> qDeleteAll ( children ) ; <nl> } <nl> <nl> class file { <nl> } <nl> float wanted = 0 . ; <nl> float done = 0 . ; <nl> - file * child ; <nl> + torrent_file * child ; <nl> foreach ( child , children ) { <nl> wanted + = child - > getSize ( ) ; <nl> done + = child - > getSize ( ) * child - > getProgress ( ) ; <nl> class file { <nl> <nl> void updatePriority ( int prio ) { <nl> Q_ASSERT ( is_dir ) ; <nl> - file * child ; <nl> + torrent_file * child ; <nl> foreach ( child , children ) { <nl> if ( child - > getPriority ( ) ! = prio ) return ; <nl> } <nl> class file { <nl> return ( ! children . isEmpty ( ) ) ; <nl> } <nl> <nl> - QList < file * > getChildren ( ) const { <nl> + QList < torrent_file * > getChildren ( ) const { <nl> return children ; <nl> } <nl> <nl> - file * getChild ( QString fileName ) const { <nl> + torrent_file * getChild ( QString fileName ) const { <nl> Q_ASSERT ( is_dir ) ; <nl> - file * f ; <nl> + torrent_file * f ; <nl> foreach ( f , children ) { <nl> if ( f - > name ( ) = = fileName ) return f ; <nl> } <nl> class file { <nl> parent - > addBytes ( b ) ; <nl> } <nl> <nl> - file * addChild ( QString fileName , bool dir , size_type size = 0 , int index = - 1 , float progress = 0 . , int priority = 1 ) { <nl> + torrent_file * addChild ( QString fileName , bool dir , size_type size = 0 , int index = - 1 , float progress = 0 . , int priority = 1 ) { <nl> Q_ASSERT ( is_dir ) ; <nl> qDebug ( " Adding a new child of size : % ld " , ( long ) size ) ; <nl> - file * f = new file ( this , QDir : : cleanPath ( rel_path + QDir : : separator ( ) + fileName ) , dir , size , index , progress , priority ) ; <nl> + torrent_file * f = new torrent_file ( this , QDir : : cleanPath ( rel_path + QDir : : separator ( ) + fileName ) , dir , size , index , progress , priority ) ; <nl> children < < f ; <nl> if ( size ) { <nl> addBytes ( size ) ; <nl> class file { <nl> return true ; <nl> } <nl> bool success = true ; <nl> - file * f ; <nl> + torrent_file * f ; <nl> qDebug ( " We have % d children " , children . size ( ) ) ; <nl> foreach ( f , children ) { <nl> bool s = f - > removeFromFS ( saveDir ) ; <nl> class file { <nl> <nl> class arborescence { <nl> private : <nl> - file * root ; <nl> + torrent_file * root ; <nl> <nl> public : <nl> arborescence ( torrent_info t ) { <nl> torrent_info : : file_iterator fi = t . begin_files ( ) ; <nl> if ( t . num_files ( ) > 1 ) { <nl> - root = new file ( 0 , misc : : toQString ( t . name ( ) ) , true ) ; <nl> + root = new torrent_file ( 0 , misc : : toQString ( t . name ( ) ) , true ) ; <nl> } else { <nl> / / XXX : Will crash if there is no file in torrent <nl> - root = new file ( 0 , misc : : toQString ( t . name ( ) ) , false , fi - > size , 0 ) ; <nl> + root = new torrent_file ( 0 , misc : : toQString ( t . name ( ) ) , false , fi - > size , 0 ) ; <nl> return ; <nl> } <nl> int i = 0 ; <nl> class arborescence { <nl> torrent_info : : file_iterator fi = t . begin_files ( ) ; <nl> if ( t . num_files ( ) > 1 ) { <nl> qDebug ( " More than one file in the torrent , setting a folder as root " ) ; <nl> - root = new file ( 0 , misc : : toQString ( t . name ( ) ) , true ) ; <nl> + root = new torrent_file ( 0 , misc : : toQString ( t . name ( ) ) , true ) ; <nl> } else { <nl> / / XXX : Will crash if there is no file in torrent <nl> qDebug ( " one file in the torrent , setting it as root with index 0 " ) ; <nl> - root = new file ( 0 , misc : : toQString ( t . name ( ) ) , false , fi - > size , 0 , fp [ 0 ] , prioritiesTab [ 0 ] ) ; <nl> + root = new torrent_file ( 0 , misc : : toQString ( t . name ( ) ) , false , fi - > size , 0 , fp [ 0 ] , prioritiesTab [ 0 ] ) ; <nl> return ; <nl> } <nl> int i = 0 ; <nl> class arborescence { <nl> delete root ; <nl> } <nl> <nl> - file * getRoot ( ) const { <nl> + torrent_file * getRoot ( ) const { <nl> return root ; <nl> } <nl> <nl> class arborescence { <nl> relative_path . remove ( 0 , 1 ) ; <nl> QStringList fileNames = relative_path . split ( QDir : : separator ( ) ) ; <nl> QString fileName ; <nl> - file * dad = root ; <nl> + torrent_file * dad = root ; <nl> unsigned int nb_i = 0 ; <nl> unsigned int size = fileNames . size ( ) ; <nl> foreach ( fileName , fileNames ) { <nl> + + nb_i ; <nl> if ( fileName = = " . " ) continue ; <nl> - file * child = dad - > getChild ( fileName ) ; <nl> + torrent_file * child = dad - > getChild ( fileName ) ; <nl> if ( ! child ) { <nl> if ( nb_i ! = size ) { <nl> / / Folder <nl> mmm a / src / properties_imp . cpp <nl> ppp b / src / properties_imp . cpp <nl> properties : : ~ properties ( ) { <nl> delete progressBar ; <nl> } <nl> <nl> - void properties : : addFilesToTree ( file * root , QStandardItem * parent ) { <nl> + void properties : : addFilesToTree ( torrent_file * root , QStandardItem * parent ) { <nl> QList < QStandardItem * > child ; <nl> / / Name <nl> QStandardItem * first ; <nl> void properties : : addFilesToTree ( file * root , QStandardItem * parent ) { <nl> / / Add the child to the tree <nl> parent - > appendRow ( child ) ; <nl> / / Add childs <nl> - file * childFile ; <nl> + torrent_file * childFile ; <nl> foreach ( childFile , root - > getChildren ( ) ) { <nl> addFilesToTree ( childFile , first ) ; <nl> } <nl> mmm a / src / properties_imp . h <nl> ppp b / src / properties_imp . h <nl> class PropListDelegate ; <nl> class QTimer ; <nl> class bittorrent ; <nl> class QStandardItemModel ; <nl> - class file ; <nl> + class torrent_file ; <nl> class QStandardItem ; <nl> class RealProgressBar ; <nl> class RealProgressBarThread ; <nl> class properties : public QDialog , private Ui : : properties { <nl> void loadWebSeedsFromFile ( ) ; <nl> void deleteSelectedUrlSeeds ( ) ; <nl> void loadTrackersErrors ( ) ; <nl> - void addFilesToTree ( file * root , QStandardItem * parent ) ; <nl> + void addFilesToTree ( torrent_file * root , QStandardItem * parent ) ; <nl> void updateChildrenPriority ( QStandardItem * item , int priority ) ; <nl> void updateParentsPriority ( QStandardItem * item , int priority ) ; <nl> void updatePriorities ( QStandardItem * item ) ; <nl> mmm a / src / torrentAddition . h <nl> ppp b / src / torrentAddition . h <nl> class torrentAdditionDialog : public QDialog , private Ui_addTorrentDialog { <nl> show ( ) ; <nl> } <nl> <nl> - void addFilesToTree ( file * root , QStandardItem * parent ) { <nl> + void addFilesToTree ( torrent_file * root , QStandardItem * parent ) { <nl> QList < QStandardItem * > child ; <nl> / / Name <nl> QStandardItem * first ; <nl> class torrentAdditionDialog : public QDialog , private Ui_addTorrentDialog { <nl> / / Add the child to the tree <nl> parent - > appendRow ( child ) ; <nl> / / Add childs <nl> - file * childFile ; <nl> + torrent_file * childFile ; <nl> foreach ( childFile , root - > getChildren ( ) ) { <nl> addFilesToTree ( childFile , first ) ; <nl> } <nl>
- Attempt to fix compilation on vc + +
qbittorrent/qBittorrent
8b576fcc50f6e9604fff10895f9651ab5b159b3f
2008-02-03T10:43:12Z
mmm a / tensorflow / contrib / data / python / kernel_tests / bucketing_test . py <nl> ppp b / tensorflow / contrib / data / python / kernel_tests / bucketing_test . py <nl> <nl> from tensorflow . python . framework import dtypes <nl> from tensorflow . python . framework import errors <nl> from tensorflow . python . framework import ops <nl> + from tensorflow . python . framework import sparse_tensor <nl> from tensorflow . python . framework import tensor_shape <nl> from tensorflow . python . ops import array_ops <nl> from tensorflow . python . ops import math_ops <nl> <nl> from tensorflow . python . platform import test <nl> <nl> <nl> + class GroupByReducerTest ( test . TestCase ) : <nl> + <nl> + def checkResults ( self , dataset , shapes , values ) : <nl> + self . assertEqual ( shapes , dataset . output_shapes ) <nl> + get_next = dataset . make_one_shot_iterator ( ) . get_next ( ) <nl> + with self . test_session ( ) as sess : <nl> + for expected in values : <nl> + got = sess . run ( get_next ) <nl> + self . assertEqual ( got , expected ) <nl> + with self . assertRaises ( errors . OutOfRangeError ) : <nl> + sess . run ( get_next ) <nl> + <nl> + def testSum ( self ) : <nl> + reducer = grouping . Reducer ( <nl> + init_func = lambda _ : np . int64 ( 0 ) , <nl> + reduce_func = lambda x , y : x + y , <nl> + finalize_func = lambda x : x ) <nl> + for i in range ( 1 , 11 ) : <nl> + dataset = dataset_ops . Dataset . range ( 2 * i ) . apply ( <nl> + grouping . group_by_reducer ( lambda x : x % 2 , reducer ) ) <nl> + self . checkResults ( <nl> + dataset , shapes = tensor_shape . scalar ( ) , values = [ ( i - 1 ) * i , i * i ] ) <nl> + <nl> + def testAverage ( self ) : <nl> + <nl> + def reduce_fn ( x , y ) : <nl> + return ( x [ 0 ] * x [ 1 ] + math_ops . cast ( y , dtypes . float32 ) ) / ( <nl> + x [ 1 ] + 1 ) , x [ 1 ] + 1 <nl> + <nl> + reducer = grouping . Reducer ( <nl> + init_func = lambda _ : ( 0 . 0 , 0 . 0 ) , <nl> + reduce_func = reduce_fn , <nl> + finalize_func = lambda x : x [ 0 ] ) <nl> + for i in range ( 1 , 11 ) : <nl> + dataset = dataset_ops . Dataset . range ( 2 * i ) . apply ( <nl> + grouping . group_by_reducer ( <nl> + lambda x : math_ops . cast ( x , dtypes . int64 ) % 2 , reducer ) ) <nl> + self . checkResults ( <nl> + dataset , shapes = tensor_shape . scalar ( ) , values = [ i - 1 , i ] ) <nl> + <nl> + def testConcat ( self ) : <nl> + components = np . array ( list ( " abcdefghijklmnopqrst " ) ) . view ( np . chararray ) <nl> + reducer = grouping . Reducer ( <nl> + init_func = lambda x : " " , <nl> + reduce_func = lambda x , y : x + y [ 0 ] , <nl> + finalize_func = lambda x : x ) <nl> + for i in range ( 1 , 11 ) : <nl> + dataset = dataset_ops . Dataset . zip ( <nl> + ( dataset_ops . Dataset . from_tensor_slices ( components ) , <nl> + dataset_ops . Dataset . range ( 2 * i ) ) ) . apply ( <nl> + grouping . group_by_reducer ( lambda x , y : y % 2 , reducer ) ) <nl> + self . checkResults ( <nl> + dataset , <nl> + shapes = tensor_shape . scalar ( ) , <nl> + values = [ b " acegikmoqs " [ : i ] , b " bdfhjlnprt " [ : i ] ] ) <nl> + <nl> + def testSparseSum ( self ) : <nl> + def _sparse ( i ) : <nl> + return sparse_tensor . SparseTensorValue ( <nl> + indices = np . array ( [ [ 0 , 0 ] ] ) , <nl> + values = ( i * np . array ( [ 1 ] , dtype = np . int64 ) ) , <nl> + dense_shape = np . array ( [ 1 , 1 ] ) ) <nl> + <nl> + reducer = grouping . Reducer ( <nl> + init_func = lambda _ : _sparse ( np . int64 ( 0 ) ) , <nl> + reduce_func = lambda x , y : _sparse ( x . values [ 0 ] + y . values [ 0 ] ) , <nl> + finalize_func = lambda x : x . values [ 0 ] ) <nl> + for i in range ( 1 , 11 ) : <nl> + dataset = dataset_ops . Dataset . range ( 2 * i ) . map ( _sparse ) . apply ( <nl> + grouping . group_by_reducer ( lambda x : x . values [ 0 ] % 2 , reducer ) ) <nl> + self . checkResults ( <nl> + dataset , shapes = tensor_shape . scalar ( ) , values = [ ( i - 1 ) * i , i * i ] ) <nl> + <nl> + def testChangingStateShape ( self ) : <nl> + <nl> + def reduce_fn ( x , _ ) : <nl> + # Statically known rank , but dynamic length . <nl> + larger_dim = array_ops . concat ( [ x [ 0 ] , x [ 0 ] ] , 0 ) <nl> + # Statically unknown rank . <nl> + larger_rank = array_ops . expand_dims ( x [ 1 ] , 0 ) <nl> + return larger_dim , larger_rank <nl> + <nl> + reducer = grouping . Reducer ( <nl> + init_func = lambda x : ( [ 0 ] , 1 ) , <nl> + reduce_func = reduce_fn , <nl> + finalize_func = lambda x : x ) <nl> + <nl> + for i in range ( 1 , 11 ) : <nl> + dataset = dataset_ops . Dataset . from_tensors ( np . int64 ( 0 ) ) . repeat ( i ) . apply ( <nl> + grouping . group_by_reducer ( lambda x : x , reducer ) ) <nl> + self . assertEqual ( [ None ] , dataset . output_shapes [ 0 ] . as_list ( ) ) <nl> + self . assertIs ( None , dataset . output_shapes [ 1 ] . ndims ) <nl> + iterator = dataset . make_one_shot_iterator ( ) <nl> + get_next = iterator . get_next ( ) <nl> + with self . test_session ( ) as sess : <nl> + x , y = sess . run ( get_next ) <nl> + self . assertAllEqual ( [ 0 ] * ( 2 * * i ) , x ) <nl> + self . assertAllEqual ( np . array ( 1 , ndmin = i ) , y ) <nl> + with self . assertRaises ( errors . OutOfRangeError ) : <nl> + sess . run ( get_next ) <nl> + <nl> + def testTypeMismatch ( self ) : <nl> + reducer = grouping . Reducer ( <nl> + init_func = lambda x : constant_op . constant ( 1 , dtype = dtypes . int32 ) , <nl> + reduce_func = lambda x , y : constant_op . constant ( 1 , dtype = dtypes . int64 ) , <nl> + finalize_func = lambda x : x ) <nl> + <nl> + dataset = dataset_ops . Dataset . range ( 10 ) <nl> + with self . assertRaisesRegexp ( <nl> + TypeError , <nl> + " The element types for the new state must match the initial state . " ) : <nl> + dataset . apply ( <nl> + grouping . group_by_reducer ( lambda _ : np . int64 ( 0 ) , reducer ) ) <nl> + <nl> + # TODO ( b / 78665031 ) : Remove once non - scalar keys are supported . <nl> + def testInvalidKeyShape ( self ) : <nl> + reducer = grouping . Reducer ( <nl> + init_func = lambda x : np . int64 ( 0 ) , <nl> + reduce_func = lambda x , y : x + y , <nl> + finalize_func = lambda x : x ) <nl> + <nl> + dataset = dataset_ops . Dataset . range ( 10 ) <nl> + with self . assertRaisesRegexp ( <nl> + ValueError , " ` key_func ` must return a single tf . int64 tensor . " ) : <nl> + dataset . apply ( <nl> + grouping . group_by_reducer ( lambda _ : np . int64 ( ( 0 , 0 ) ) , reducer ) ) <nl> + <nl> + # TODO ( b / 78665031 ) : Remove once non - int64 keys are supported . <nl> + def testInvalidKeyType ( self ) : <nl> + reducer = grouping . Reducer ( <nl> + init_func = lambda x : np . int64 ( 0 ) , <nl> + reduce_func = lambda x , y : x + y , <nl> + finalize_func = lambda x : x ) <nl> + <nl> + dataset = dataset_ops . Dataset . range ( 10 ) <nl> + with self . assertRaisesRegexp ( <nl> + ValueError , " ` key_func ` must return a single tf . int64 tensor . " ) : <nl> + dataset . apply ( <nl> + grouping . group_by_reducer ( lambda _ : " wrong " , reducer ) ) <nl> + <nl> + <nl> + class GroupByReducerSerializationTest ( <nl> + dataset_serialization_test_base . DatasetSerializationTestBase ) : <nl> + <nl> + def _build_dataset ( self , components ) : <nl> + reducer = grouping . Reducer ( <nl> + init_func = lambda _ : np . int64 ( 0 ) , <nl> + reduce_func = lambda x , y : x + y , <nl> + finalize_func = lambda x : x ) <nl> + <nl> + return dataset_ops . Dataset . from_tensor_slices ( components ) . apply ( <nl> + grouping . group_by_reducer ( lambda x : x % 5 , reducer ) ) <nl> + <nl> + def testCoreGroupByReducer ( self ) : <nl> + components = np . array ( [ 0 , 1 , 2 , 3 , 4 , 5 , 6 , 7 , 8 , 9 ] , dtype = np . int64 ) <nl> + self . verify_unused_iterator ( <nl> + lambda : self . _build_dataset ( components ) , 5 , verify_exhausted = True ) <nl> + self . verify_init_before_restore ( <nl> + lambda : self . _build_dataset ( components ) , 5 , verify_exhausted = True ) <nl> + self . verify_multiple_breaks ( <nl> + lambda : self . _build_dataset ( components ) , 5 , verify_exhausted = True ) <nl> + self . verify_reset_restored_iterator ( <nl> + lambda : self . _build_dataset ( components ) , 5 , verify_exhausted = True ) <nl> + self . verify_restore_in_empty_graph ( <nl> + lambda : self . _build_dataset ( components ) , 5 , verify_exhausted = True ) <nl> + diff_components = np . array ( [ 5 , 4 , 3 , 2 , 1 , 0 ] , dtype = np . int64 ) <nl> + self . verify_restore_in_modified_graph ( <nl> + lambda : self . _build_dataset ( components ) , <nl> + lambda : self . _build_dataset ( diff_components ) , <nl> + 5 , <nl> + verify_exhausted = True ) <nl> + <nl> + <nl> class GroupByWindowTest ( test . TestCase ) : <nl> <nl> def testSimple ( self ) : <nl> mmm a / tensorflow / contrib / data / python / kernel_tests / scan_dataset_op_test . py <nl> ppp b / tensorflow / contrib / data / python / kernel_tests / scan_dataset_op_test . py <nl> def _scan_fn ( unused_state , unused_input_value ) : <nl> scan_ops . scan ( constant_op . constant ( 1 , dtype = dtypes . int32 ) , _scan_fn ) ) <nl> <nl> <nl> - class ScanDatasetSerialzationTest ( <nl> + class ScanDatasetSerializationTest ( <nl> dataset_serialization_test_base . DatasetSerializationTestBase ) : <nl> <nl> def _build_dataset ( self , num_elements ) : <nl> mmm a / tensorflow / contrib / data / python / ops / grouping . py <nl> ppp b / tensorflow / contrib / data / python / ops / grouping . py <nl> <nl> from tensorflow . python . framework import dtypes <nl> from tensorflow . python . framework import function <nl> from tensorflow . python . framework import ops <nl> + from tensorflow . python . framework import sparse_tensor <nl> from tensorflow . python . framework import tensor_shape <nl> from tensorflow . python . ops import array_ops <nl> from tensorflow . python . ops import check_ops <nl> <nl> from tensorflow . python . ops import math_ops <nl> <nl> <nl> + def group_by_reducer ( key_func , reducer ) : <nl> + " " " A transformation that groups elements and performs a reduction . <nl> + <nl> + This transformation maps element of a dataset to a key using ` key_func ` and <nl> + groups the elements by key . The ` reducer ` is used to process each group ; its <nl> + ` init_func ` is used to initialize state for each group when it is created , the <nl> + ` reduce_func ` is used to update the state every time an element is mapped to <nl> + the matching group , and the ` finalize_func ` is used to map the final state to <nl> + an output value . <nl> + <nl> + Args : <nl> + key_func : A function mapping a nested structure of tensors <nl> + ( having shapes and types defined by ` self . output_shapes ` and <nl> + ` self . output_types ` ) to a scalar ` tf . int64 ` tensor . <nl> + reducer : An instance of ` Reducer ` , which captures the reduction logic using <nl> + the ` init_func ` , ` reduce_func ` , and ` finalize_func ` functions . <nl> + <nl> + Returns : <nl> + A ` Dataset ` transformation function , which can be passed to <nl> + @ { tf . data . Dataset . apply } . <nl> + " " " <nl> + <nl> + def _apply_fn ( dataset ) : <nl> + " " " Function from ` Dataset ` to ` Dataset ` that applies the transformation . " " " <nl> + return GroupByReducerDataset ( dataset , key_func , reducer ) <nl> + <nl> + return _apply_fn <nl> + <nl> + <nl> def group_by_window ( key_func , <nl> reduce_func , <nl> window_size = None , <nl> def output_types ( self ) : <nl> return self . _output_types <nl> <nl> <nl> + class GroupByReducerDataset ( dataset_ops . Dataset ) : <nl> + " " " A ` Dataset ` that groups its input and performs a reduction . " " " <nl> + <nl> + def __init__ ( self , input_dataset , key_func , reducer ) : <nl> + " " " See ` group_by_reducer ( ) ` for details . " " " <nl> + super ( GroupByReducerDataset , self ) . __init__ ( ) <nl> + <nl> + self . _input_dataset = input_dataset <nl> + <nl> + self . _make_key_func ( key_func , input_dataset ) <nl> + self . _make_init_func ( reducer . init_func ) <nl> + self . _make_reduce_func ( reducer . reduce_func , input_dataset ) <nl> + self . _make_finalize_func ( reducer . finalize_func ) <nl> + <nl> + def _make_key_func ( self , key_func , input_dataset ) : <nl> + " " " Make wrapping Defun for key_func . " " " <nl> + <nl> + @ function . Defun ( * nest . flatten ( <nl> + sparse . as_dense_types ( input_dataset . output_types , <nl> + input_dataset . output_classes ) ) ) <nl> + def tf_key_func ( * args ) : <nl> + " " " A wrapper for Defun that facilitates shape inference . " " " <nl> + # Pass in shape information from the input_dataset . <nl> + dense_shapes = sparse . as_dense_shapes ( input_dataset . output_shapes , <nl> + input_dataset . output_classes ) <nl> + for arg , shape in zip ( args , nest . flatten ( dense_shapes ) ) : <nl> + arg . set_shape ( shape ) <nl> + <nl> + nested_args = nest . pack_sequence_as ( input_dataset . output_types , args ) <nl> + nested_args = sparse . deserialize_sparse_tensors ( <nl> + nested_args , input_dataset . output_types , input_dataset . output_shapes , <nl> + input_dataset . output_classes ) <nl> + # pylint : disable = protected - access <nl> + if dataset_ops . _should_unpack_args ( nested_args ) : <nl> + ret = key_func ( * nested_args ) <nl> + # pylint : enable = protected - access <nl> + else : <nl> + ret = key_func ( nested_args ) <nl> + ret = ops . convert_to_tensor ( ret ) <nl> + if ret . dtype ! = dtypes . int64 or ret . get_shape ( ) ! = tensor_shape . scalar ( ) : <nl> + raise ValueError ( <nl> + " ` key_func ` must return a single tf . int64 tensor . " <nl> + " Got type = % s and shape = % s " % ( ret . dtype , ret . get_shape ( ) ) ) <nl> + return ret <nl> + <nl> + self . _key_func = tf_key_func <nl> + self . _key_func . add_to_graph ( ops . get_default_graph ( ) ) <nl> + <nl> + def _make_init_func ( self , init_func ) : <nl> + " " " Make wrapping Defun for init_func . " " " <nl> + <nl> + @ function . Defun ( dtypes . int64 ) <nl> + def tf_init_func ( key ) : <nl> + " " " A wrapper for Defun that facilitates shape inference . " " " <nl> + key . set_shape ( [ ] ) <nl> + ret = init_func ( key ) <nl> + # Convert any ` SparseTensorValue ` s to ` SparseTensor ` s and all other <nl> + # values to tensors . <nl> + ret = nest . pack_sequence_as ( ret , [ <nl> + sparse_tensor . SparseTensor . from_value ( t ) <nl> + if sparse_tensor . is_sparse ( t ) else ops . convert_to_tensor ( t ) <nl> + for t in nest . flatten ( ret ) <nl> + ] ) <nl> + <nl> + self . _state_classes = sparse . get_classes ( ret ) <nl> + self . _state_shapes = nest . pack_sequence_as ( <nl> + ret , [ t . get_shape ( ) for t in nest . flatten ( ret ) ] ) <nl> + self . _state_types = nest . pack_sequence_as ( <nl> + ret , [ t . dtype for t in nest . flatten ( ret ) ] ) <nl> + <nl> + # Serialize any sparse tensors . <nl> + ret = nest . pack_sequence_as ( <nl> + ret , [ t for t in nest . flatten ( sparse . serialize_sparse_tensors ( ret ) ) ] ) <nl> + return nest . flatten ( ret ) <nl> + <nl> + self . _init_func = tf_init_func <nl> + self . _init_func . add_to_graph ( ops . get_default_graph ( ) ) <nl> + <nl> + def _make_reduce_func ( self , reduce_func , input_dataset ) : <nl> + " " " Make wrapping Defun for reduce_func . " " " <nl> + <nl> + # Iteratively rerun the reduce function until reaching a fixed point on <nl> + # ` self . _state_shapes ` . <nl> + need_to_rerun = True <nl> + while need_to_rerun : <nl> + <nl> + # Create a list in which ` tf_reduce_func ` will store the new shapes . <nl> + flat_new_state_shapes = [ ] <nl> + <nl> + @ function . Defun ( * ( nest . flatten ( <nl> + sparse . as_dense_types ( <nl> + self . _state_types , self . _state_classes ) ) + nest . flatten ( <nl> + sparse . as_dense_types ( input_dataset . output_types , <nl> + input_dataset . output_classes ) ) ) ) <nl> + def tf_reduce_func ( * args ) : <nl> + " " " A wrapper for Defun that facilitates shape inference . " " " <nl> + for arg , shape in zip ( <nl> + args , <nl> + nest . flatten ( <nl> + sparse . as_dense_shapes ( self . _state_shapes , self . _state_classes ) ) <nl> + + nest . flatten ( <nl> + sparse . as_dense_shapes ( input_dataset . output_shapes , <nl> + input_dataset . output_classes ) ) ) : <nl> + arg . set_shape ( shape ) <nl> + <nl> + pivot = len ( nest . flatten ( self . _state_shapes ) ) <nl> + nested_state_args = nest . pack_sequence_as ( self . _state_types , <nl> + args [ : pivot ] ) <nl> + nested_state_args = sparse . deserialize_sparse_tensors ( <nl> + nested_state_args , self . _state_types , self . _state_shapes , <nl> + self . _state_classes ) <nl> + nested_input_args = nest . pack_sequence_as ( input_dataset . output_types , <nl> + args [ pivot : ] ) <nl> + nested_input_args = sparse . deserialize_sparse_tensors ( <nl> + nested_input_args , input_dataset . output_types , <nl> + input_dataset . output_shapes , input_dataset . output_classes ) <nl> + <nl> + ret = reduce_func ( nested_state_args , nested_input_args ) <nl> + <nl> + # Convert any ` SparseTensorValue ` s to ` SparseTensor ` s and all other <nl> + # values to tensors . <nl> + ret = nest . pack_sequence_as ( ret , [ <nl> + sparse_tensor . SparseTensor . from_value ( t ) <nl> + if sparse_tensor . is_sparse ( t ) else ops . convert_to_tensor ( t ) <nl> + for t in nest . flatten ( ret ) <nl> + ] ) <nl> + <nl> + # Extract shape information from the returned values . <nl> + flat_new_state = nest . flatten ( ret ) <nl> + flat_new_state_shapes . extend ( [ t . get_shape ( ) for t in flat_new_state ] ) <nl> + <nl> + # Extract and validate type information from the returned values . <nl> + for t , dtype in zip ( flat_new_state , nest . flatten ( self . _state_types ) ) : <nl> + if t . dtype ! = dtype : <nl> + raise TypeError ( <nl> + " The element types for the new state must match the initial " <nl> + " state . Expected % s ; got % s . " % <nl> + ( self . _state_types , <nl> + nest . pack_sequence_as ( self . _state_types , <nl> + [ t . dtype for t in flat_new_state ] ) ) ) <nl> + <nl> + # Serialize any sparse tensors . <nl> + ret = nest . pack_sequence_as ( <nl> + ret , <nl> + [ t for t in nest . flatten ( sparse . serialize_sparse_tensors ( ret ) ) ] ) <nl> + return nest . flatten ( ret ) <nl> + <nl> + # Use the private method that will execute ` tf_reduce_func ` but delay <nl> + # adding it to the graph in case we need to rerun the function . <nl> + tf_reduce_func . _create_definition_if_needed ( ) # pylint : disable = protected - access <nl> + <nl> + flat_state_shapes = nest . flatten ( self . _state_shapes ) <nl> + weakened_state_shapes = [ <nl> + old . most_specific_compatible_shape ( new ) <nl> + for old , new in zip ( flat_state_shapes , flat_new_state_shapes ) <nl> + ] <nl> + <nl> + need_to_rerun = False <nl> + for old_shape , weakened_shape in zip ( flat_state_shapes , <nl> + weakened_state_shapes ) : <nl> + if old_shape . ndims is not None and ( <nl> + weakened_shape . ndims is None or <nl> + old_shape . as_list ( ) ! = weakened_shape . as_list ( ) ) : <nl> + need_to_rerun = True <nl> + break <nl> + <nl> + if need_to_rerun : <nl> + self . _state_shapes = nest . pack_sequence_as ( self . _state_shapes , <nl> + weakened_state_shapes ) <nl> + <nl> + self . _reduce_func = tf_reduce_func <nl> + self . _reduce_func . add_to_graph ( ops . get_default_graph ( ) ) <nl> + <nl> + def _make_finalize_func ( self , finalize_func ) : <nl> + " " " Make wrapping Defun for finalize_func . " " " <nl> + <nl> + @ function . Defun ( * ( nest . flatten ( <nl> + sparse . as_dense_types ( self . _state_types , self . _state_classes ) ) ) ) <nl> + def tf_finalize_func ( * args ) : <nl> + " " " A wrapper for Defun that facilitates shape inference . " " " <nl> + for arg , shape in zip ( <nl> + args , <nl> + nest . flatten ( <nl> + sparse . as_dense_shapes ( self . _state_shapes , self . _state_classes ) ) ) : <nl> + arg . set_shape ( shape ) <nl> + <nl> + nested_args = nest . pack_sequence_as ( self . _state_types , args ) <nl> + nested_args = sparse . deserialize_sparse_tensors ( <nl> + nested_args , self . _state_types , self . _state_shapes , <nl> + self . _state_classes ) <nl> + <nl> + ret = finalize_func ( nested_args ) <nl> + <nl> + # Convert any ` SparseTensorValue ` s to ` SparseTensor ` s and all other <nl> + # values to tensors . <nl> + ret = nest . pack_sequence_as ( ret , [ <nl> + sparse_tensor . SparseTensor . from_value ( t ) <nl> + if sparse_tensor . is_sparse ( t ) else ops . convert_to_tensor ( t ) <nl> + for t in nest . flatten ( ret ) <nl> + ] ) <nl> + <nl> + self . _output_classes = sparse . get_classes ( ret ) <nl> + self . _output_shapes = nest . pack_sequence_as ( <nl> + ret , [ t . get_shape ( ) for t in nest . flatten ( ret ) ] ) <nl> + self . _output_types = nest . pack_sequence_as ( <nl> + ret , [ t . dtype for t in nest . flatten ( ret ) ] ) <nl> + <nl> + # Serialize any sparse tensors . <nl> + ret = nest . pack_sequence_as ( <nl> + ret , [ t for t in nest . flatten ( sparse . serialize_sparse_tensors ( ret ) ) ] ) <nl> + return nest . flatten ( ret ) <nl> + <nl> + self . _finalize_func = tf_finalize_func <nl> + self . _finalize_func . add_to_graph ( ops . get_default_graph ( ) ) <nl> + <nl> + @ property <nl> + def output_classes ( self ) : <nl> + return self . _output_classes <nl> + <nl> + @ property <nl> + def output_shapes ( self ) : <nl> + return self . _output_shapes <nl> + <nl> + @ property <nl> + def output_types ( self ) : <nl> + return self . _output_types <nl> + <nl> + def _as_variant_tensor ( self ) : <nl> + return gen_dataset_ops . group_by_reducer_dataset ( <nl> + self . _input_dataset . _as_variant_tensor ( ) , # pylint : disable = protected - access <nl> + self . _key_func . captured_inputs , <nl> + self . _init_func . captured_inputs , <nl> + self . _reduce_func . captured_inputs , <nl> + self . _finalize_func . captured_inputs , <nl> + key_func = self . _key_func , <nl> + init_func = self . _init_func , <nl> + reduce_func = self . _reduce_func , <nl> + finalize_func = self . _finalize_func , <nl> + output_types = nest . flatten ( <nl> + sparse . as_dense_types ( self . output_types , self . output_classes ) ) , <nl> + output_shapes = nest . flatten ( <nl> + sparse . as_dense_shapes ( self . output_shapes , self . output_classes ) ) ) <nl> + <nl> + <nl> class GroupByWindowDataset ( dataset_ops . Dataset ) : <nl> " " " A ` Dataset ` that groups its input and performs a windowed reduction . " " " <nl> <nl> def _as_variant_tensor ( self ) : <nl> sparse . as_dense_types ( self . output_types , self . output_classes ) ) , <nl> output_shapes = nest . flatten ( <nl> sparse . as_dense_shapes ( self . output_shapes , self . output_classes ) ) ) <nl> + <nl> + <nl> + class Reducer ( object ) : <nl> + " " " A reducer is used for reducing a set of elements . <nl> + <nl> + A reducer is represented as a tuple of the three functions : <nl> + 1 ) initialization function : key = > initial state <nl> + 2 ) reduce function : ( old state , input ) = > new state <nl> + 3 ) finalization function : state = > result <nl> + " " " <nl> + <nl> + def __init__ ( self , init_func , reduce_func , finalize_func ) : <nl> + self . _init_func = init_func <nl> + self . _reduce_func = reduce_func <nl> + self . _finalize_func = finalize_func <nl> + <nl> + @ property <nl> + def init_func ( self ) : <nl> + return self . _init_func <nl> + <nl> + @ property <nl> + def reduce_func ( self ) : <nl> + return self . _reduce_func <nl> + <nl> + @ property <nl> + def finalize_func ( self ) : <nl> + return self . _finalize_func <nl> new file mode 100644 <nl> index 0000000000000 . . 067ad4018b09d <nl> mmm / dev / null <nl> ppp b / tensorflow / core / api_def / base_api / api_def_GroupByReducerDataset . pbtxt <nl> <nl> + op { <nl> + graph_op_name : " GroupByReducerDataset " <nl> + visibility : HIDDEN <nl> + in_arg { <nl> + name : " input_dataset " <nl> + description : < < END <nl> + A variant tensor representing the input dataset . <nl> + END <nl> + } <nl> + in_arg { <nl> + name : " key_func_other_arguments " <nl> + description : < < END <nl> + A list of tensors , typically values that were captured when <nl> + building a closure for ` key_func ` . <nl> + END <nl> + } <nl> + attr { <nl> + name : " key_func " <nl> + description : < < END <nl> + A function mapping an element of ` input_dataset ` , concatenated <nl> + with ` key_func_other_arguments ` to a scalar value of type DT_INT64 . <nl> + END <nl> + } <nl> + in_arg { <nl> + name : " init_func_other_arguments " <nl> + description : < < END <nl> + A list of tensors , typically values that were captured when <nl> + building a closure for ` init_func ` . <nl> + END <nl> + } <nl> + attr { <nl> + name : " init_func " <nl> + description : < < END <nl> + A function mapping a key of type DT_INT64 , concatenated with <nl> + ` init_func_other_arguments ` to the initial reducer state . <nl> + END <nl> + } <nl> + in_arg { <nl> + name : " reduce_func_other_arguments " <nl> + description : < < END <nl> + A list of tensors , typically values that were captured when <nl> + building a closure for ` reduce_func ` . <nl> + END <nl> + } <nl> + attr { <nl> + name : " reduce_func " <nl> + description : < < END <nl> + A function mapping the current reducer state and an element of ` input_dataset ` , <nl> + concatenated with ` reduce_func_other_arguments ` to a new reducer state . <nl> + END <nl> + } <nl> + in_arg { <nl> + name : " finalize_func_other_arguments " <nl> + description : < < END <nl> + A list of tensors , typically values that were captured when <nl> + building a closure for ` finalize_func ` . <nl> + END <nl> + } <nl> + attr { <nl> + name : " finalize_func " <nl> + description : < < END <nl> + A function mapping the final reducer state to an output element . <nl> + END <nl> + } <nl> + summary : " Creates a dataset that computes a group - by on ` input_dataset ` . " <nl> + description : < < END <nl> + Creates a dataset that computes a group - by on ` input_dataset ` . <nl> + END <nl> + } <nl> mmm a / tensorflow / core / kernels / data / BUILD <nl> ppp b / tensorflow / core / kernels / data / BUILD <nl> tf_kernel_library ( <nl> ] , <nl> ) <nl> <nl> + tf_kernel_library ( <nl> + name = " group_by_reducer_dataset_op " , <nl> + srcs = [ " group_by_reducer_dataset_op . cc " ] , <nl> + deps = [ <nl> + " : captured_function " , <nl> + " : dataset " , <nl> + " / / tensorflow / core : core_cpu_internal " , <nl> + " / / tensorflow / core : dataset_ops_op_lib " , <nl> + " / / tensorflow / core : framework " , <nl> + " / / tensorflow / core : lib " , <nl> + " / / tensorflow / core : lib_internal " , <nl> + ] , <nl> + ) <nl> + <nl> tf_kernel_library ( <nl> name = " group_by_window_dataset_op " , <nl> srcs = [ " group_by_window_dataset_op . cc " ] , <nl> tf_kernel_library ( <nl> " : filter_dataset_op " , <nl> " : flat_map_dataset_op " , <nl> " : generator_dataset_op " , <nl> + " : group_by_reducer_dataset_op " , <nl> " : group_by_window_dataset_op " , <nl> " : interleave_dataset_op " , <nl> " : iterator_ops " , <nl> mmm a / tensorflow / core / kernels / data / captured_function . cc <nl> ppp b / tensorflow / core / kernels / data / captured_function . cc <nl> Status CapturedFunction : : Create ( <nl> return Status : : OK ( ) ; <nl> } <nl> <nl> + / * static * / <nl> + Status CapturedFunction : : Create ( <nl> + const NameAttrList & func , OpKernelContext * ctx , const string & argument , <nl> + std : : unique_ptr < CapturedFunction > * out_function ) { <nl> + OpInputList argument_inputs ; <nl> + TF_RETURN_IF_ERROR ( ctx - > input_list ( argument , & argument_inputs ) ) ; <nl> + std : : vector < Tensor > arguments_t ; <nl> + arguments_t . reserve ( argument_inputs . size ( ) ) ; <nl> + for ( const Tensor & t : argument_inputs ) { <nl> + arguments_t . push_back ( t ) ; <nl> + } <nl> + return CapturedFunction : : Create ( func , std : : move ( arguments_t ) , out_function ) ; <nl> + } <nl> + <nl> CapturedFunction : : ~ CapturedFunction ( ) { <nl> if ( lib_ ! = nullptr & & f_handle_ ! = kInvalidHandle ) { <nl> lib_ - > ReleaseHandle ( f_handle_ ) . IgnoreError ( ) ; <nl> mmm a / tensorflow / core / kernels / data / captured_function . h <nl> ppp b / tensorflow / core / kernels / data / captured_function . h <nl> class ResourceMgr ; <nl> / / context . <nl> class CapturedFunction { <nl> public : <nl> + / / Creates a new instance from a list of named attributes and captured inputs . <nl> + / / <nl> / / NOTE ( mrry ) : The ` captured_inputs ` are passed by value . For <nl> / / efficiency , you are recommended to move this argument into the call . <nl> static Status Create ( const NameAttrList & func , <nl> std : : vector < Tensor > captured_inputs , <nl> std : : unique_ptr < CapturedFunction > * out_function ) ; <nl> <nl> + / / Creates a new instance using a list of named attributes , fetching captured <nl> + / / inputs from a context argument . <nl> + static Status Create ( const NameAttrList & func , OpKernelContext * ctx , <nl> + const string & argument , <nl> + std : : unique_ptr < CapturedFunction > * out_function ) ; <nl> + <nl> ~ CapturedFunction ( ) ; <nl> <nl> / / Runs the " Captured function " using the given FLR and caches the lib and <nl> class CapturedFunction { <nl> std : : vector < Tensor > * rets , <nl> FunctionLibraryRuntime : : DoneCallback done ) ; <nl> <nl> + / / Returns the named list of function arguments . <nl> + const NameAttrList & func ( ) { return func_ ; } <nl> + <nl> / / Returns that additional captured inputs that will be passed to the function <nl> / / when ` Run * ( ) ` is called . <nl> const std : : vector < Tensor > & captured_inputs ( ) { return captured_inputs_ ; } <nl> new file mode 100644 <nl> index 0000000000000 . . c8aeaab9cba5e <nl> mmm / dev / null <nl> ppp b / tensorflow / core / kernels / data / group_by_reducer_dataset_op . cc <nl> <nl> + / * Copyright 2018 The TensorFlow Authors . All Rights Reserved . <nl> + <nl> + Licensed under the Apache License , Version 2 . 0 ( the " License " ) ; <nl> + you may not use this file except in compliance with the License . <nl> + You may obtain a copy of the License at <nl> + <nl> + http : / / www . apache . org / licenses / LICENSE - 2 . 0 <nl> + <nl> + Unless required by applicable law or agreed to in writing , software <nl> + distributed under the License is distributed on an " AS IS " BASIS , <nl> + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied . <nl> + See the License for the specific language governing permissions and <nl> + limitations under the License . <nl> + = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = * / <nl> + # include < map > <nl> + <nl> + # include " tensorflow / core / common_runtime / function . h " <nl> + # include " tensorflow / core / framework / partial_tensor_shape . h " <nl> + # include " tensorflow / core / framework / tensor . h " <nl> + # include " tensorflow / core / kernels / data / captured_function . h " <nl> + # include " tensorflow / core / kernels / data / dataset . h " <nl> + # include " tensorflow / core / lib / random / random . h " <nl> + <nl> + namespace tensorflow { <nl> + namespace { <nl> + <nl> + / / See documentation in . . / ops / dataset_ops . cc for a high - level <nl> + / / description of the following op . <nl> + class GroupByReducerDatasetOp : public UnaryDatasetOpKernel { <nl> + public : <nl> + explicit GroupByReducerDatasetOp ( OpKernelConstruction * ctx ) <nl> + : UnaryDatasetOpKernel ( ctx ) , <nl> + graph_def_version_ ( ctx - > graph_def_version ( ) ) { <nl> + OP_REQUIRES_OK ( ctx , ctx - > GetAttr ( " key_func " , & key_func_ ) ) ; <nl> + OP_REQUIRES_OK ( ctx , ctx - > GetAttr ( " init_func " , & init_func_ ) ) ; <nl> + OP_REQUIRES_OK ( ctx , ctx - > GetAttr ( " reduce_func " , & reduce_func_ ) ) ; <nl> + OP_REQUIRES_OK ( ctx , ctx - > GetAttr ( " finalize_func " , & finalize_func_ ) ) ; <nl> + OP_REQUIRES_OK ( ctx , ctx - > GetAttr ( " output_types " , & output_types_ ) ) ; <nl> + OP_REQUIRES_OK ( ctx , ctx - > GetAttr ( " output_shapes " , & output_shapes_ ) ) ; <nl> + } <nl> + <nl> + void MakeDataset ( OpKernelContext * ctx , DatasetBase * input , <nl> + DatasetBase * * output ) override { <nl> + std : : unique_ptr < CapturedFunction > captured_key_func ; <nl> + OP_REQUIRES_OK ( ctx , CapturedFunction : : Create ( key_func_ , ctx , <nl> + " key_func_other_arguments " , <nl> + & captured_key_func ) ) ; <nl> + std : : unique_ptr < CapturedFunction > captured_init_func ; <nl> + OP_REQUIRES_OK ( ctx , CapturedFunction : : Create ( init_func_ , ctx , <nl> + " init_func_other_arguments " , <nl> + & captured_init_func ) ) ; <nl> + std : : unique_ptr < CapturedFunction > captured_reduce_func ; <nl> + OP_REQUIRES_OK ( ctx , CapturedFunction : : Create ( reduce_func_ , ctx , <nl> + " reduce_func_other_arguments " , <nl> + & captured_reduce_func ) ) ; <nl> + std : : unique_ptr < CapturedFunction > captured_finalize_func ; <nl> + OP_REQUIRES_OK ( ctx , <nl> + CapturedFunction : : Create ( finalize_func_ , ctx , <nl> + " finalize_func_other_arguments " , <nl> + & captured_finalize_func ) ) ; <nl> + <nl> + * output = new Dataset ( <nl> + ctx , input , std : : move ( captured_key_func ) , std : : move ( captured_init_func ) , <nl> + std : : move ( captured_reduce_func ) , std : : move ( captured_finalize_func ) , <nl> + output_types_ , output_shapes_ ) ; <nl> + } <nl> + <nl> + private : <nl> + class Dataset : public GraphDatasetBase { <nl> + public : <nl> + Dataset ( OpKernelContext * ctx , const DatasetBase * input , <nl> + std : : unique_ptr < CapturedFunction > captured_key_func , <nl> + std : : unique_ptr < CapturedFunction > captured_init_func , <nl> + std : : unique_ptr < CapturedFunction > captured_reduce_func , <nl> + std : : unique_ptr < CapturedFunction > captured_finalize_func , <nl> + const DataTypeVector & output_types , <nl> + const std : : vector < PartialTensorShape > & output_shapes ) <nl> + : GraphDatasetBase ( ctx ) , <nl> + input_ ( input ) , <nl> + captured_key_func_ ( std : : move ( captured_key_func ) ) , <nl> + captured_init_func_ ( std : : move ( captured_init_func ) ) , <nl> + captured_reduce_func_ ( std : : move ( captured_reduce_func ) ) , <nl> + captured_finalize_func_ ( std : : move ( captured_finalize_func ) ) , <nl> + output_types_ ( output_types ) , <nl> + output_shapes_ ( output_shapes ) { <nl> + input_ - > Ref ( ) ; <nl> + } <nl> + <nl> + ~ Dataset ( ) override { input_ - > Unref ( ) ; } <nl> + <nl> + std : : unique_ptr < IteratorBase > MakeIterator ( <nl> + const string & prefix ) const override { <nl> + return std : : unique_ptr < IteratorBase > ( <nl> + new Iterator ( { this , strings : : StrCat ( prefix , " : : GroupByReducer " ) } ) ) ; <nl> + } <nl> + <nl> + const DataTypeVector & output_dtypes ( ) const override { <nl> + return output_types_ ; <nl> + } <nl> + const std : : vector < PartialTensorShape > & output_shapes ( ) const override { <nl> + return output_shapes_ ; <nl> + } <nl> + <nl> + string DebugString ( ) override { return " GroupByReducerDatasetOp : : Dataset " ; } <nl> + <nl> + protected : <nl> + Status AsGraphDefInternal ( OpKernelContext * ctx , DatasetGraphDefBuilder * b , <nl> + Node * * output ) const override { <nl> + TF_RETURN_IF_ERROR ( b - > AddFunction ( ctx , key_func ( ) . name ( ) ) ) ; <nl> + TF_RETURN_IF_ERROR ( b - > AddFunction ( ctx , init_func ( ) . name ( ) ) ) ; <nl> + TF_RETURN_IF_ERROR ( b - > AddFunction ( ctx , reduce_func ( ) . name ( ) ) ) ; <nl> + TF_RETURN_IF_ERROR ( b - > AddFunction ( ctx , finalize_func ( ) . name ( ) ) ) ; <nl> + Node * input_graph_node = nullptr ; <nl> + TF_RETURN_IF_ERROR ( b - > AddParentDataset ( ctx , input_ , & input_graph_node ) ) ; <nl> + <nl> + std : : vector < Node * > key_func_other_arguments_node ; <nl> + DataTypeVector key_func_other_arguments_types ; <nl> + TF_RETURN_IF_ERROR ( OtherArgumentsNodeAndType ( <nl> + b , captured_key_func_ , & key_func_other_arguments_node , <nl> + & key_func_other_arguments_types ) ) ; <nl> + <nl> + std : : vector < Node * > init_func_other_arguments_node ; <nl> + DataTypeVector init_func_other_arguments_types ; <nl> + TF_RETURN_IF_ERROR ( OtherArgumentsNodeAndType ( <nl> + b , captured_init_func_ , & init_func_other_arguments_node , <nl> + & init_func_other_arguments_types ) ) ; <nl> + <nl> + std : : vector < Node * > reduce_func_other_arguments_node ; <nl> + DataTypeVector reduce_func_other_arguments_types ; <nl> + TF_RETURN_IF_ERROR ( OtherArgumentsNodeAndType ( <nl> + b , captured_reduce_func_ , & reduce_func_other_arguments_node , <nl> + & reduce_func_other_arguments_types ) ) ; <nl> + <nl> + std : : vector < Node * > finalize_func_other_arguments_node ; <nl> + DataTypeVector finalize_func_other_arguments_types ; <nl> + TF_RETURN_IF_ERROR ( OtherArgumentsNodeAndType ( <nl> + b , captured_finalize_func_ , & finalize_func_other_arguments_node , <nl> + & finalize_func_other_arguments_types ) ) ; <nl> + <nl> + AttrValue key_func ; <nl> + b - > BuildAttrValue ( this - > key_func ( ) , & key_func ) ; <nl> + AttrValue init_func ; <nl> + b - > BuildAttrValue ( this - > init_func ( ) , & init_func ) ; <nl> + AttrValue reduce_func ; <nl> + b - > BuildAttrValue ( this - > reduce_func ( ) , & reduce_func ) ; <nl> + AttrValue finalize_func ; <nl> + b - > BuildAttrValue ( this - > finalize_func ( ) , & finalize_func ) ; <nl> + <nl> + AttrValue key_func_other_arguments_types_attr ; <nl> + b - > BuildAttrValue ( key_func_other_arguments_types , <nl> + & key_func_other_arguments_types_attr ) ; <nl> + AttrValue init_func_other_arguments_types_attr ; <nl> + b - > BuildAttrValue ( init_func_other_arguments_types , <nl> + & init_func_other_arguments_types_attr ) ; <nl> + AttrValue reduce_func_other_arguments_types_attr ; <nl> + b - > BuildAttrValue ( reduce_func_other_arguments_types , <nl> + & reduce_func_other_arguments_types_attr ) ; <nl> + AttrValue finalize_func_other_arguments_types_attr ; <nl> + b - > BuildAttrValue ( finalize_func_other_arguments_types , <nl> + & finalize_func_other_arguments_types_attr ) ; <nl> + <nl> + TF_RETURN_IF_ERROR ( b - > AddDataset ( <nl> + this , { { 0 , input_graph_node } } , <nl> + { { 1 , key_func_other_arguments_node } , <nl> + { 2 , init_func_other_arguments_node } , <nl> + { 3 , reduce_func_other_arguments_node } , <nl> + { 4 , finalize_func_other_arguments_node } } , <nl> + { { " key_func " , key_func } , <nl> + { " init_func " , init_func } , <nl> + { " reduce_func " , reduce_func } , <nl> + { " finalize_func " , finalize_func } , <nl> + { " Tkey_func_other_arguments " , key_func_other_arguments_types_attr } , <nl> + { " Tinit_func_other_arguments " , init_func_other_arguments_types_attr } , <nl> + { " Treduce_func_other_arguments " , <nl> + reduce_func_other_arguments_types_attr } , <nl> + { " Tfinalize_func_other_arguments " , <nl> + finalize_func_other_arguments_types_attr } } , <nl> + output ) ) ; <nl> + return Status : : OK ( ) ; <nl> + } <nl> + <nl> + private : <nl> + class Iterator : public DatasetIterator < Dataset > { <nl> + public : <nl> + explicit Iterator ( const Params & params ) <nl> + : DatasetIterator < Dataset > ( params ) , <nl> + input_impl_ ( params . dataset - > input_ - > MakeIterator ( params . prefix ) ) { } <nl> + <nl> + Status GetNextInternal ( IteratorContext * ctx , <nl> + std : : vector < Tensor > * out_tensors , <nl> + bool * end_of_sequence ) override { <nl> + mutex_lock l ( mu_ ) ; <nl> + <nl> + / / Iterate through the input dataset , keying input elements to reducers . <nl> + while ( ! end_of_input_ ) { <nl> + std : : vector < Tensor > next_input_element ; <nl> + TF_RETURN_IF_ERROR ( <nl> + input_impl_ - > GetNext ( ctx , & next_input_element , & end_of_input_ ) ) ; <nl> + <nl> + if ( ! end_of_input_ ) { <nl> + / / Run the key function on the input element . <nl> + std : : vector < Tensor > key_func_output ; <nl> + TF_RETURN_IF_ERROR ( <nl> + dataset ( ) - > captured_key_func_ - > RunWithBorrowedArgs ( <nl> + ctx , next_input_element , & key_func_output ) ) ; <nl> + <nl> + if ( key_func_output . size ( ) ! = 1 | | <nl> + key_func_output [ 0 ] . dtype ( ) ! = DT_INT64 | | <nl> + key_func_output [ 0 ] . NumElements ( ) ! = 1 ) { <nl> + / / TODO ( b / 78665031 ) : Support non - int64 keys . <nl> + return errors : : InvalidArgument ( <nl> + " ` key_func ` must return a scalar int64 . " ) ; <nl> + } <nl> + const int64 key = key_func_output [ 0 ] . scalar < int64 > ( ) ( ) ; <nl> + <nl> + if ( states_ . find ( key ) = = states_ . end ( ) ) { <nl> + / / Run the init function to create the initial state . <nl> + std : : vector < Tensor > init_func_output ; <nl> + TF_RETURN_IF_ERROR ( dataset ( ) - > captured_init_func_ - > Run ( <nl> + ctx , std : : move ( key_func_output ) , & init_func_output ) ) ; <nl> + states_ [ key ] = init_func_output ; <nl> + } <nl> + <nl> + / / Run the reduce function to update the current state . <nl> + std : : vector < Tensor > args ; <nl> + args . reserve ( states_ [ key ] . size ( ) + next_input_element . size ( ) ) ; <nl> + std : : copy ( states_ [ key ] . begin ( ) , states_ [ key ] . end ( ) , <nl> + std : : back_inserter ( args ) ) ; <nl> + std : : copy ( next_input_element . begin ( ) , next_input_element . end ( ) , <nl> + std : : back_inserter ( args ) ) ; <nl> + <nl> + std : : vector < Tensor > reduce_func_output ; <nl> + TF_RETURN_IF_ERROR ( dataset ( ) - > captured_reduce_func_ - > Run ( <nl> + ctx , std : : move ( args ) , & reduce_func_output ) ) ; <nl> + states_ [ key ] = reduce_func_output ; <nl> + } else { <nl> + keys_ . resize ( states_ . size ( ) ) ; <nl> + int idx = 0 ; <nl> + for ( auto it = states_ . begin ( ) ; it ! = states_ . end ( ) ; + + idx , + + it ) { <nl> + keys_ [ idx ] = it - > first ; <nl> + } <nl> + } <nl> + } <nl> + <nl> + if ( keys_index_ = = keys_ . size ( ) ) { <nl> + * end_of_sequence = true ; <nl> + return Status : : OK ( ) ; <nl> + } <nl> + TF_RETURN_IF_ERROR ( <nl> + dataset ( ) - > captured_finalize_func_ - > RunWithBorrowedArgs ( <nl> + ctx , states_ [ keys_ [ keys_index_ + + ] ] , out_tensors ) ) ; <nl> + return Status : : OK ( ) ; <nl> + } <nl> + <nl> + protected : <nl> + Status SaveInternal ( IteratorStateWriter * writer ) override { <nl> + mutex_lock l ( mu_ ) ; <nl> + TF_RETURN_IF_ERROR ( SaveParent ( writer , input_impl_ ) ) ; <nl> + <nl> + if ( end_of_input_ ) { <nl> + TF_RETURN_IF_ERROR ( <nl> + writer - > WriteScalar ( full_name ( " end_of_input " ) , " " ) ) ; <nl> + } <nl> + <nl> + / / Saving states_ . <nl> + if ( ! states_ . empty ( ) ) { <nl> + TF_RETURN_IF_ERROR ( <nl> + writer - > WriteScalar ( full_name ( " states_size " ) , states_ . size ( ) ) ) ; <nl> + int idx = 0 ; <nl> + for ( auto it = states_ . begin ( ) ; it ! = states_ . end ( ) ; + + idx , + + it ) { <nl> + int64 key = it - > first ; <nl> + TF_RETURN_IF_ERROR ( writer - > WriteScalar ( <nl> + full_name ( strings : : StrCat ( " states [ " , idx , " ] - > key " ) ) , key ) ) ; <nl> + if ( ! it - > second . empty ( ) ) { <nl> + TF_RETURN_IF_ERROR ( writer - > WriteScalar ( <nl> + full_name ( strings : : StrCat ( " states [ " , idx , " ] - > state_size " ) ) , <nl> + it - > second . size ( ) ) ) ; <nl> + for ( int j = 0 ; j < it - > second . size ( ) ; + + j ) { <nl> + TF_RETURN_IF_ERROR ( writer - > WriteTensor ( <nl> + full_name ( <nl> + strings : : StrCat ( " states [ " , idx , " ] - > state [ " , j , " ] " ) ) , <nl> + it - > second [ j ] ) ) ; <nl> + } <nl> + } <nl> + } <nl> + } <nl> + <nl> + / / Saving keys_index_ and keys_ . <nl> + if ( end_of_input_ ) { <nl> + TF_RETURN_IF_ERROR ( <nl> + writer - > WriteScalar ( full_name ( " keys_index " ) , keys_index_ ) ) ; <nl> + if ( ! keys_ . empty ( ) ) { <nl> + TF_RETURN_IF_ERROR ( <nl> + writer - > WriteScalar ( full_name ( " keys_size " ) , keys_ . size ( ) ) ) ; <nl> + for ( int idx = 0 ; idx < keys_ . size ( ) ; + + idx ) { <nl> + TF_RETURN_IF_ERROR ( writer - > WriteScalar ( <nl> + full_name ( strings : : StrCat ( " keys [ " , idx , " ] " ) ) , keys_ [ idx ] ) ) ; <nl> + } <nl> + } <nl> + } <nl> + <nl> + return Status : : OK ( ) ; <nl> + } <nl> + <nl> + Status RestoreInternal ( IteratorContext * ctx , <nl> + IteratorStateReader * reader ) override { <nl> + mutex_lock l ( mu_ ) ; <nl> + TF_RETURN_IF_ERROR ( RestoreParent ( ctx , reader , input_impl_ ) ) ; <nl> + <nl> + if ( reader - > Contains ( full_name ( " end_of_input " ) ) ) end_of_input_ = true ; <nl> + <nl> + / / Restoring states_ . <nl> + if ( reader - > Contains ( full_name ( " states_size " ) ) ) { <nl> + int64 size ; <nl> + TF_RETURN_IF_ERROR ( <nl> + reader - > ReadScalar ( full_name ( " states_size " ) , & size ) ) ; <nl> + for ( int idx = 0 ; idx < size ; + + idx ) { <nl> + int64 key ; <nl> + TF_RETURN_IF_ERROR ( reader - > ReadScalar ( <nl> + full_name ( strings : : StrCat ( " states [ " , idx , " ] - > key " ) ) , & key ) ) ; <nl> + std : : vector < Tensor > state ; <nl> + if ( reader - > Contains ( full_name ( <nl> + strings : : StrCat ( " states [ " , idx , " ] - > state_size " ) ) ) ) { <nl> + int64 state_size ; <nl> + TF_RETURN_IF_ERROR ( reader - > ReadScalar ( <nl> + full_name ( strings : : StrCat ( " states [ " , idx , " ] - > state_size " ) ) , <nl> + & state_size ) ) ; <nl> + state . resize ( state_size ) ; <nl> + for ( int j = 0 ; j < state_size ; + + j ) { <nl> + TF_RETURN_IF_ERROR ( reader - > ReadTensor ( <nl> + full_name ( <nl> + strings : : StrCat ( " states [ " , idx , " ] - > state [ " , j , " ] " ) ) , <nl> + & state [ j ] ) ) ; <nl> + } <nl> + } <nl> + states_ [ key ] = state ; <nl> + } <nl> + } <nl> + <nl> + / / Restoring keys_index_ and keys_ . <nl> + if ( end_of_input_ ) { <nl> + TF_RETURN_IF_ERROR ( <nl> + reader - > ReadScalar ( full_name ( " keys_index " ) , & keys_index_ ) ) ; <nl> + if ( reader - > Contains ( full_name ( " keys_size " ) ) ) { <nl> + int64 size ; <nl> + TF_RETURN_IF_ERROR ( <nl> + reader - > ReadScalar ( full_name ( " keys_size " ) , & size ) ) ; <nl> + keys_ . resize ( size ) ; <nl> + for ( int idx = 0 ; idx < size ; + + idx ) { <nl> + int64 key ; <nl> + TF_RETURN_IF_ERROR ( reader - > ReadScalar ( <nl> + full_name ( strings : : StrCat ( " keys [ " , idx , " ] " ) ) , & key ) ) ; <nl> + keys_ [ idx ] = key ; <nl> + } <nl> + } <nl> + } <nl> + <nl> + return Status : : OK ( ) ; <nl> + } <nl> + <nl> + private : <nl> + mutex mu_ ; <nl> + std : : unique_ptr < IteratorBase > input_impl_ GUARDED_BY ( mu_ ) ; <nl> + bool end_of_input_ GUARDED_BY ( mu_ ) = false ; <nl> + std : : map < int64 , std : : vector < Tensor > > states_ GUARDED_BY ( mu_ ) ; <nl> + std : : vector < int64 > keys_ GUARDED_BY ( mu_ ) ; <nl> + int64 keys_index_ GUARDED_BY ( mu_ ) = 0 ; <nl> + } ; <nl> + <nl> + const NameAttrList & key_func ( ) const { return captured_key_func_ - > func ( ) ; } <nl> + <nl> + const NameAttrList & init_func ( ) const { <nl> + return captured_init_func_ - > func ( ) ; <nl> + } <nl> + <nl> + const NameAttrList & reduce_func ( ) const { <nl> + return captured_reduce_func_ - > func ( ) ; <nl> + } <nl> + <nl> + const NameAttrList & finalize_func ( ) const { <nl> + return captured_finalize_func_ - > func ( ) ; <nl> + } <nl> + <nl> + Status OtherArgumentsNodeAndType ( <nl> + DatasetGraphDefBuilder * b , <nl> + const std : : unique_ptr < CapturedFunction > & captured_func , <nl> + std : : vector < Node * > * other_arguments_node , <nl> + DataTypeVector * other_arguments_types ) const { <nl> + other_arguments_node - > reserve ( captured_func - > captured_inputs ( ) . size ( ) ) ; <nl> + other_arguments_types - > reserve ( captured_func - > captured_inputs ( ) . size ( ) ) ; <nl> + for ( const Tensor & t : captured_func - > captured_inputs ( ) ) { <nl> + Node * node ; <nl> + TF_RETURN_IF_ERROR ( b - > AddTensor ( t , & node ) ) ; <nl> + other_arguments_node - > emplace_back ( node ) ; <nl> + other_arguments_types - > emplace_back ( t . dtype ( ) ) ; <nl> + } <nl> + return Status : : OK ( ) ; <nl> + } <nl> + <nl> + const DatasetBase * const input_ ; <nl> + const std : : unique_ptr < CapturedFunction > captured_key_func_ ; <nl> + const std : : unique_ptr < CapturedFunction > captured_init_func_ ; <nl> + const std : : unique_ptr < CapturedFunction > captured_reduce_func_ ; <nl> + const std : : unique_ptr < CapturedFunction > captured_finalize_func_ ; <nl> + const DataTypeVector output_types_ ; <nl> + const std : : vector < PartialTensorShape > output_shapes_ ; <nl> + } ; <nl> + <nl> + const int graph_def_version_ ; <nl> + DataTypeVector output_types_ ; <nl> + std : : vector < PartialTensorShape > output_shapes_ ; <nl> + NameAttrList key_func_ ; <nl> + NameAttrList init_func_ ; <nl> + NameAttrList reduce_func_ ; <nl> + NameAttrList finalize_func_ ; <nl> + } ; <nl> + <nl> + REGISTER_KERNEL_BUILDER ( Name ( " GroupByReducerDataset " ) . Device ( DEVICE_CPU ) , <nl> + GroupByReducerDatasetOp ) ; <nl> + <nl> + } / / namespace <nl> + } / / namespace tensorflow <nl> mmm a / tensorflow / core / kernels / data / group_by_window_dataset_op . cc <nl> ppp b / tensorflow / core / kernels / data / group_by_window_dataset_op . cc <nl> class GroupByWindowDatasetOp : public UnaryDatasetOpKernel { <nl> if ( key_func_output . size ( ) ! = 1 | | <nl> key_func_output [ 0 ] . dtype ( ) ! = DT_INT64 | | <nl> key_func_output [ 0 ] . NumElements ( ) ! = 1 ) { <nl> - / / TODO ( mrry ) : Support non - int64 keys . <nl> + / / TODO ( b / 78665031 ) : Support non - int64 keys . <nl> return errors : : InvalidArgument ( <nl> " ` key_func ` must return a scalar int64 . " ) ; <nl> } <nl> mmm a / tensorflow / core / ops / dataset_ops . cc <nl> ppp b / tensorflow / core / ops / dataset_ops . cc <nl> REGISTER_OP ( " ParallelInterleaveDataset " ) <nl> . Attr ( " output_shapes : list ( shape ) > = 1 " ) <nl> . SetShapeFn ( shape_inference : : ScalarShape ) ; <nl> <nl> + REGISTER_OP ( " GroupByReducerDataset " ) <nl> + . Input ( " input_dataset : variant " ) <nl> + . Input ( " key_func_other_arguments : Tkey_func_other_arguments " ) <nl> + . Input ( " init_func_other_arguments : Tinit_func_other_arguments " ) <nl> + . Input ( " reduce_func_other_arguments : Treduce_func_other_arguments " ) <nl> + . Input ( " finalize_func_other_arguments : Tfinalize_func_other_arguments " ) <nl> + . Output ( " handle : variant " ) <nl> + . Attr ( " key_func : func " ) <nl> + . Attr ( " init_func : func " ) <nl> + . Attr ( " reduce_func : func " ) <nl> + . Attr ( " finalize_func : func " ) <nl> + . Attr ( " Tkey_func_other_arguments : list ( type ) > = 0 " ) <nl> + . Attr ( " Tinit_func_other_arguments : list ( type ) > = 0 " ) <nl> + . Attr ( " Treduce_func_other_arguments : list ( type ) > = 0 " ) <nl> + . Attr ( " Tfinalize_func_other_arguments : list ( type ) > = 0 " ) <nl> + . Attr ( " output_types : list ( type ) > = 1 " ) <nl> + . Attr ( " output_shapes : list ( shape ) > = 1 " ) <nl> + . SetIsStateful ( ) <nl> + . SetShapeFn ( shape_inference : : ScalarShape ) ; <nl> + <nl> REGISTER_OP ( " GroupByWindowDataset " ) <nl> . Input ( " input_dataset : variant " ) <nl> . Input ( " key_func_other_arguments : Tkey_func_other_arguments " ) <nl>
[ tf . data ] Adding an experimental ` group_by_reducer ` transformation which groups elements of an input pipeline by a key , applies a reduce function to elements of each group " on - the - fly " , and outputs the results once all input elements have been processed .
tensorflow/tensorflow
c89a1d9605427d74079774af7da37933f9ca153c
2018-05-01T00:40:46Z
mmm a / src / settings . js <nl> ppp b / src / settings . js <nl> var DOUBLE_MODE = 1 ; / / How to load and store 64 - bit doubles . Without typed arra <nl> / / 0 we will simply store and load doubles as 32 - bit floats , so when they are stored / loaded <nl> / / they will truncate from 64 to 32 bits , and lose precision . This is faster , and might <nl> / / work for some code ( but probably that code should just use floats and not doubles anyhow ) . <nl> + / / Note that a downside of DOUBLE_MODE 1 is that we currently store the double in parts , <nl> + / / then load it aligned , and that load - store will make JS engines alter it if it is being <nl> + / / stored to a typed array for security reasons . That will ' fix ' the number from being a <nl> + / / NaN or an infinite number . <nl> var EMULATE_UNALIGNED_ACCESSES = 1 ; / / If set , the compiler will ' emulate ' loads and stores that are not known to <nl> / / be sufficiently aligned , by working on individual bytes . This can be <nl> / / important in USE_TYPED_ARRAYS = = 2 , where unaligned accesses do not work , <nl>
comment
emscripten-core/emscripten
02b8a149f5e501c54a90b525099de19a45b3be5b
2011-12-07T22:27:39Z
mmm a / src / frontend / mosh - server . cc <nl> ppp b / src / frontend / mosh - server . cc <nl> <nl> # include < sys / socket . h > <nl> # include < netinet / in . h > <nl> # include < arpa / inet . h > <nl> + # include < getopt . h > <nl> <nl> extern " C " { <nl> # include " selfpipe . h " <nl> int run_server ( const char * desired_ip , const char * desired_port , <nl> <nl> using namespace std ; <nl> <nl> + void print_usage ( const char * argv0 ) <nl> + { <nl> + fprintf ( stderr , " Usage : % s new [ - s ] [ - i LOCALADDR ] [ - p PORT ] [ - - COMMAND . . . ] \ n " , argv0 ) ; <nl> + } <nl> + <nl> + string get_SSH_IP ( void ) <nl> + { <nl> + const char * SSH_CONNECTION = getenv ( " SSH_CONNECTION " ) ; <nl> + fatal_assert ( SSH_CONNECTION ) ; <nl> + char * SSH_writable = strdup ( SSH_CONNECTION ) ; <nl> + fatal_assert ( SSH_writable ) ; <nl> + strtok ( SSH_writable , " " ) ; <nl> + const char * local_interface_IP = strtok ( NULL , " " ) ; <nl> + fatal_assert ( local_interface_IP ) ; <nl> + return string ( local_interface_IP ) ; <nl> + } <nl> + <nl> int main ( int argc , char * argv [ ] ) <nl> { <nl> char * desired_ip = NULL ; <nl> char * desired_port = NULL ; <nl> char * * command = NULL ; <nl> <nl> - / * Look for command * / <nl> + / * strip off command * / <nl> for ( int i = 0 ; i < argc ; i + + ) { <nl> if ( 0 = = strcmp ( argv [ i ] , " - - " ) ) { / * start of command * / <nl> if ( i ! = argc - 1 ) { <nl> int main ( int argc , char * argv [ ] ) <nl> } <nl> } <nl> <nl> - if ( argc = = 1 ) { <nl> - desired_ip = NULL ; <nl> + / * Parse new command - line syntax * / <nl> + if ( ( argc > = 2 ) <nl> + & & ( strcmp ( argv [ 1 ] , " new " ) = = 0 ) ) { <nl> + / * new option syntax * / <nl> + int opt ; <nl> + while ( ( opt = getopt ( argc , argv , " i : p : s " ) ) ! = - 1 ) { <nl> + switch ( opt ) { <nl> + case ' i ' : <nl> + desired_ip = optarg ; <nl> + break ; <nl> + case ' p ' : <nl> + desired_port = optarg ; <nl> + break ; <nl> + case ' s ' : <nl> + desired_ip = strdup ( get_SSH_IP ( ) . c_str ( ) ) ; <nl> + fatal_assert ( desired_ip ) ; <nl> + break ; <nl> + default : <nl> + print_usage ( argv [ 0 ] ) ; <nl> + exit ( 1 ) ; <nl> + } <nl> + } <nl> + } else if ( argc = = 1 ) { <nl> + / * do nothing * / <nl> } else if ( argc = = 2 ) { <nl> desired_ip = argv [ 1 ] ; <nl> } else if ( argc = = 3 ) { <nl> desired_ip = argv [ 1 ] ; <nl> desired_port = argv [ 2 ] ; <nl> } else { <nl> - fprintf ( stderr , " Usage : % s [ LOCALADDR ] [ PORT ] [ - - COMMAND1 COMMAND2 . . . ] \ n " , argv [ 0 ] ) ; <nl> + print_usage ( argv [ 0 ] ) ; <nl> + exit ( 1 ) ; <nl> + } <nl> + <nl> + / * Sanity - check arguments * / <nl> + if ( desired_ip <nl> + & & ( strspn ( desired_ip , " 0123456789 . " ) ! = strlen ( desired_ip ) ) ) { <nl> + print_usage ( argv [ 0 ] ) ; <nl> + exit ( 1 ) ; <nl> + } <nl> + <nl> + if ( desired_port <nl> + & & ( strspn ( desired_ip , " 0123456789 " ) ! = strlen ( desired_ip ) ) ) { <nl> + print_usage ( argv [ 0 ] ) ; <nl> exit ( 1 ) ; <nl> } <nl> <nl>
New option - parsing for mosh - server
mobile-shell/mosh
0e5fd920b133dd715c30cbefd57afe8ff572fd27
2012-03-18T07:13:50Z
mmm a / modules / features2d / src / blobdetector . cpp <nl> ppp b / modules / features2d / src / blobdetector . cpp <nl> void SimpleBlobDetector : : detectImpl ( const cv : : Mat & image , std : : vector < cv : : KeyPoi <nl> KeyPoint kpt ( sumPoint , ( float ) params . defaultKeypointSize ) ; <nl> keypoints . push_back ( kpt ) ; <nl> } <nl> + <nl> + # ifdef DEBUG_BLOB_DETECTOR <nl> + namedWindow ( " keypoints " , CV_WINDOW_NORMAL ) ; <nl> + Mat outImg = image . clone ( ) ; <nl> + for ( size_t i = 0 ; i < keypoints . size ( ) ; i + + ) <nl> + { <nl> + circle ( outImg , keypoints [ i ] . pt , 2 , Scalar ( 255 , 0 , 255 ) , - 1 ) ; <nl> + } <nl> + / / drawKeypoints ( image , keypoints , outImg ) ; <nl> + imshow ( " keypoints " , outImg ) ; <nl> + waitKey ( ) ; <nl> + # endif <nl> } <nl>
Added drawing of a new image for debugging of the SimpleBlobDetector class .
opencv/opencv
5a3e7d041f99384b5d029feceb256530ac37f6cf
2011-05-30T06:54:59Z
mmm a / tensorflow / core / util / mkl_util . h <nl> ppp b / tensorflow / core / util / mkl_util . h <nl> inline void SetDummyMklDnnShapeOutput ( OpKernelContext * context , <nl> } <nl> <nl> / / If the input tensor has ref count as 1 , it is forwarded to the desired <nl> - / / output port and the function reutrns true . In that case , it also allocates <nl> + / / output port and the function returns true . In that case , it also allocates <nl> / / the serialized MklDnnShape object . Otherwise , the function returns false . <nl> inline bool ForwardMklTensorInToOutWithMklShape ( OpKernelContext * context , <nl> int idx_in , int idx_out , <nl> inline bool ForwardMklTensorInToOutWithMklShape ( OpKernelContext * context , <nl> if ( is_forwarded | | always_forward ) { <nl> AllocateOutputSetMklShape ( context , idx_out , mkl_shape ) ; <nl> return true ; <nl> - } else { <nl> - return false ; <nl> } <nl> + return false ; <nl> } <nl> <nl> / / Forward the MKL shape ONLY ( used in elementwise and other ops where <nl> mmm a / tensorflow / python / kernel_tests / conv_ops_test . py <nl> ppp b / tensorflow / python / kernel_tests / conv_ops_test . py <nl> def Test ( self ) : <nl> class FusedConv2DTest ( test . TestCase ) : <nl> <nl> def _CreateNumpyTensor ( self , shape ) : <nl> - total_size = 1 <nl> - for s in shape : <nl> - total_size * = s <nl> + total_size = np . prod ( shape ) <nl> return np . arange ( 1 , total_size + 1 , dtype = np . float32 ) . reshape ( shape ) <nl> <nl> def _CreateConv2D ( self , input_values , filters , <nl> def _CreateConv2D ( self , input_values , filters , <nl> strides = strides , <nl> padding = padding ) <nl> <nl> - @ test_util . deprecated_graph_mode_only <nl> + # Tests tensor forwarding of a fused Conv2D + BiasAdd + Add op when the input to <nl> + # Add has refcount 1 . <nl> + @ test_util . run_in_graph_and_eager_modes ( use_gpu = False ) <nl> def testAddWithRefCountOne ( self ) : <nl> expected_output = [ <nl> 113377 , 125570 , 77305 , 86738 , 19433 , 22226 , 60681 , <nl> def testAddWithRefCountOne ( self ) : <nl> filter_in = self . _CreateNumpyTensor ( filter_in_sizes ) <nl> bias_in = self . _CreateNumpyTensor ( bias_in_sizes ) <nl> # To get different weights for filter <nl> - ofs = 1 <nl> + offset = 1 <nl> <nl> conv1 = self . _CreateConv2D ( x , filter_in ) <nl> - conv2 = self . _CreateConv2D ( conv1 , filter_in + ofs ) <nl> + conv2 = self . _CreateConv2D ( conv1 , filter_in + offset ) <nl> <nl> - conv = self . _CreateConv2D ( conv1 , filter_in - ofs ) <nl> + conv = self . _CreateConv2D ( conv1 , filter_in - offset ) <nl> bias_add = nn_ops . bias_add ( conv , bias_in ) <nl> add = math_ops . add_n ( [ bias_add , conv2 ] ) <nl> <nl> def testAddWithRefCountOne ( self ) : <nl> np . rint ( expected_output ) , <nl> self . evaluate ( add ) . reshape ( - 1 ) ) <nl> <nl> - @ test_util . deprecated_graph_mode_only <nl> + # Tests tensor forwarding of a fused Conv2D + BiasAdd + Add op when the input to <nl> + # Add has a total refcount of 2 , and Add is its last consumer . <nl> + @ test_util . run_in_graph_and_eager_modes ( use_gpu = False ) <nl> def testAddWithRefCountTwoAndRunAddLast ( self ) : <nl> expected_output = [ <nl> 1 . 907175e + 06 , 2 . 253505e + 06 , 7 . 809210e + 05 , 9 . 537180e + 05 , <nl> def testAddWithRefCountTwoAndRunAddLast ( self ) : <nl> filter_in = self . _CreateNumpyTensor ( filter_in_sizes ) <nl> bias_in = self . _CreateNumpyTensor ( bias_in_sizes ) <nl> # To get different weights for filter <nl> - ofs = 1 <nl> + offset = 1 <nl> <nl> conv1 = self . _CreateConv2D ( x , filter_in ) <nl> - conv2 = self . _CreateConv2D ( conv1 , filter_in + ofs ) <nl> + conv2 = self . _CreateConv2D ( conv1 , filter_in + offset ) <nl> <nl> - conv = self . _CreateConv2D ( conv2 , filter_in - ofs ) <nl> + conv = self . _CreateConv2D ( conv2 , filter_in - offset ) <nl> bias_add = nn_ops . bias_add ( conv , bias_in ) <nl> add = math_ops . add_n ( [ bias_add , conv1 ] ) <nl> <nl> def testAddWithRefCountTwoAndRunAddLast ( self ) : <nl> np . rint ( expected_output ) , <nl> self . evaluate ( add ) . reshape ( - 1 ) ) <nl> <nl> - @ test_util . deprecated_graph_mode_only <nl> + # Tests tensor forwarding of a fused Conv2D + BiasAdd + Add op when the input to <nl> + # Add has refcount 2 and Add ( in the fused Conv2D op ) is its first consumer . <nl> + @ test_util . run_in_graph_and_eager_modes ( use_gpu = False ) <nl> def testAddWithRefCountTwoAndRunAddFirst ( self ) : <nl> expected_output = [ <nl> 176161 , 194450 , 120673 , 134822 , 30545 , 34734 , 96041 , <nl> def testAddWithRefCountTwoAndRunAddFirst ( self ) : <nl> filter_in = self . _CreateNumpyTensor ( filter_in_sizes ) <nl> bias_in = self . _CreateNumpyTensor ( bias_in_sizes ) <nl> # To get different weights for filter <nl> - ofs = 1 <nl> + offset = 1 <nl> <nl> conv1 = self . _CreateConv2D ( x , filter_in ) <nl> - conv2 = self . _CreateConv2D ( conv1 , filter_in + ofs ) <nl> + conv2 = self . _CreateConv2D ( conv1 , filter_in + offset ) <nl> <nl> - conv = self . _CreateConv2D ( conv1 , filter_in - ofs ) <nl> + conv = self . _CreateConv2D ( conv1 , filter_in - offset ) <nl> bias_add = nn_ops . bias_add ( conv , bias_in ) <nl> add = math_ops . add_n ( [ bias_add , conv2 ] ) <nl> <nl> def testAddWithRefCountTwoAndRunAddFirst ( self ) : <nl> np . rint ( expected_output ) , <nl> self . evaluate ( output ) . reshape ( - 1 ) ) <nl> <nl> - @ test_util . deprecated_graph_mode_only <nl> + # Tests tensor forwarding of a fused Conv2D + BiasAdd + Add op when the input to <nl> + # Add has refcount 2 , and there is no dependency between its two consumers . <nl> + @ test_util . run_in_graph_and_eager_modes ( use_gpu = False ) <nl> def testAddWithRefCountTwoAndNoDependence ( self ) : <nl> expected_output = [ <nl> 176161 , 194450 , 120673 , 134822 , 30545 , 34734 , 96041 , <nl> def testAddWithRefCountTwoAndNoDependence ( self ) : <nl> filter_in = self . _CreateNumpyTensor ( filter_in_sizes ) <nl> bias_in = self . _CreateNumpyTensor ( bias_in_sizes ) <nl> # To get different weights for filter <nl> - ofs = 1 <nl> + offset = 1 <nl> <nl> conv1 = self . _CreateConv2D ( x , filter_in ) <nl> - conv2 = self . _CreateConv2D ( conv1 , filter_in + ofs ) <nl> + conv2 = self . _CreateConv2D ( conv1 , filter_in + offset ) <nl> <nl> - conv = self . _CreateConv2D ( conv1 , filter_in - ofs ) <nl> + conv = self . _CreateConv2D ( conv1 , filter_in - offset ) <nl> bias_add = nn_ops . bias_add ( conv , bias_in ) <nl> add = math_ops . add_n ( [ bias_add , conv2 ] ) <nl> <nl> def testAddWithRefCountTwoAndNoDependence ( self ) : <nl> np . rint ( expected_output ) , <nl> self . evaluate ( output ) . reshape ( - 1 ) ) <nl> <nl> - <nl> - @ test_util . deprecated_graph_mode_only <nl> + # Tests tensor forwarding of a fused Conv2D + BiasAdd + Add op when the input to <nl> + # Add is the same as the input to the fused Conv2D op and needs a tensor <nl> + # buffer . <nl> + @ test_util . run_in_graph_and_eager_modes ( use_gpu = False ) <nl> def testAddWithSameSrcAndAddTensorBuffer ( self ) : <nl> expected_output = [ <nl> 57157 , 63298 , 39249 , 44026 , 9971 , 11402 , 31193 , 36306 , <nl>
Addressed comments .
tensorflow/tensorflow
c53f7a7b1b386b4031d27dcd198c203921e9eda8
2020-10-09T00:15:18Z
mmm a / Changelog <nl> ppp b / Changelog <nl> <nl> * Unknown - Christophe Dumez < chris @ qbittorrent . org > - v1 . 4 . 0 <nl> - FEATURE : Allow to define temporary download folder <nl> - FEATURE : Display total amount of uploaded data in finished list <nl> + - FEATURE : Resizing a column in a search results tab affects all tabs <nl> + - FEATURE : Search results tab columns are now remembered upon startup <nl> - COSMETIC : Redesigned program preferences <nl> - COSMETIC : Updated icons set <nl> <nl> mmm a / src / FinishedTorrents . cpp <nl> ppp b / src / FinishedTorrents . cpp <nl> void FinishedTorrents : : saveColWidthFinishedList ( ) const { <nl> width_list = line . split ( ' ' ) ; <nl> } <nl> for ( short i = 0 ; i < nbColumns ; + + i ) { <nl> - if ( finishedList - > columnWidth ( i ) < 1 & & width_list . size ( ) = = finishedListModel - > columnCount ( ) - 1 & & width_list . at ( i ) . toInt ( ) > = 1 ) { <nl> + if ( finishedList - > columnWidth ( i ) < 1 & & width_list . size ( ) = = nbColumns & & width_list . at ( i ) . toInt ( ) > = 1 ) { <nl> / / load the former width <nl> new_width_list < < width_list . at ( i ) ; <nl> } else if ( finishedList - > columnWidth ( i ) > = 1 ) { <nl> mmm a / src / SearchTab . cpp <nl> ppp b / src / SearchTab . cpp <nl> SearchTab : : SearchTab ( SearchEngine * parent ) : QWidget ( ) <nl> connect ( resultsBrowser - > header ( ) , SIGNAL ( sectionPressed ( int ) ) , this , SLOT ( sortSearchList ( int ) ) ) ; <nl> <nl> / / Load last columns width for search results list <nl> - if ( ! loadColWidthSearchList ( ) ) { <nl> + if ( ! loadColWidthResultsList ( ) ) { <nl> resultsBrowser - > header ( ) - > resizeSection ( 0 , 275 ) ; <nl> } <nl> } <nl> <nl> - SearchTab : : ~ SearchTab ( ) <nl> - { <nl> - saveColWidthSearchList ( ) ; <nl> + SearchTab : : ~ SearchTab ( ) { <nl> delete resultsBrowser ; <nl> delete SearchListModel ; <nl> delete SearchDelegate ; <nl> } <nl> <nl> + QHeaderView * SearchTab : : header ( ) const { <nl> + return resultsBrowser - > header ( ) ; <nl> + } <nl> + <nl> + bool SearchTab : : loadColWidthResultsList ( ) { <nl> + QSettings settings ( " qBittorrent " , " qBittorrent " ) ; <nl> + QString line = settings . value ( " SearchResultsColsWidth " , QString ( ) ) . toString ( ) ; <nl> + if ( line . isEmpty ( ) ) <nl> + return false ; <nl> + QStringList width_list = line . split ( ' ' ) ; <nl> + if ( width_list . size ( ) < SearchListModel - > columnCount ( ) ) <nl> + return false ; <nl> + unsigned int listSize = width_list . size ( ) ; <nl> + for ( unsigned int i = 0 ; i < listSize ; + + i ) { <nl> + resultsBrowser - > header ( ) - > resizeSection ( i , width_list . at ( i ) . toInt ( ) ) ; <nl> + } <nl> + return true ; <nl> + } <nl> + <nl> QLabel * SearchTab : : getCurrentLabel ( ) <nl> { <nl> return results_lbl ; <nl> void SearchTab : : sortSearchListString ( int index , Qt : : SortOrder sortOrder ) { <nl> SearchListModel - > removeRows ( 0 , nbRows_old ) ; <nl> } <nl> <nl> - / / Save columns width in a file to remember them <nl> - / / ( download list ) <nl> - void SearchTab : : saveColWidthSearchList ( ) const { <nl> - qDebug ( " Saving columns width in search list " ) ; <nl> - QSettings settings ( " qBittorrent " , " qBittorrent " ) ; <nl> - QStringList width_list ; <nl> - for ( int i = 0 ; i < SearchListModel - > columnCount ( ) ; + + i ) { <nl> - width_list < < misc : : toQString ( resultsBrowser - > columnWidth ( i ) ) ; <nl> - } <nl> - settings . setValue ( " SearchListColsWidth " , width_list . join ( " " ) ) ; <nl> - qDebug ( " Search list columns width saved " ) ; <nl> - } <nl> - <nl> - / / Load columns width in a file that were saved previously <nl> - / / ( search list ) <nl> - bool SearchTab : : loadColWidthSearchList ( ) { <nl> - qDebug ( " Loading columns width for search list " ) ; <nl> - QSettings settings ( " qBittorrent " , " qBittorrent " ) ; <nl> - QString line = settings . value ( " SearchListColsWidth " , QString ( ) ) . toString ( ) ; <nl> - if ( line . isEmpty ( ) ) <nl> - return false ; <nl> - QStringList width_list = line . split ( ' ' ) ; <nl> - if ( width_list . size ( ) ! = SearchListModel - > columnCount ( ) ) <nl> - return false ; <nl> - for ( int i = 0 ; i < width_list . size ( ) ; + + i ) { <nl> - resultsBrowser - > header ( ) - > resizeSection ( i , width_list . at ( i ) . toInt ( ) ) ; <nl> - } <nl> - qDebug ( " Search list columns width loaded " ) ; <nl> - return true ; <nl> - } <nl> mmm a / src / SearchTab . h <nl> ppp b / src / SearchTab . h <nl> class SearchTab : public QWidget , public Ui : : search_engine <nl> public : <nl> SearchTab ( SearchEngine * parent ) ; <nl> ~ SearchTab ( ) ; <nl> - bool loadColWidthSearchList ( ) ; <nl> + bool loadColWidthResultsList ( ) ; <nl> QLabel * getCurrentLabel ( ) ; <nl> QStandardItemModel * getCurrentSearchListModel ( ) ; <nl> QTreeView * getCurrentTreeView ( ) ; <nl> void setRowColor ( int row , QString color ) ; <nl> + QHeaderView * header ( ) const ; <nl> + <nl> protected slots : <nl> void sortSearchList ( int index ) ; <nl> void sortSearchListInt ( int index , Qt : : SortOrder sortOrder ) ; <nl> void sortSearchListString ( int index , Qt : : SortOrder sortOrder ) ; <nl> - void saveColWidthSearchList ( ) const ; <nl> <nl> } ; <nl> <nl> mmm a / src / searchEngine . cpp <nl> ppp b / src / searchEngine . cpp <nl> void SearchEngine : : on_search_button_clicked ( ) { <nl> } <nl> / / Tab Addition <nl> currentSearchTab = new SearchTab ( this ) ; <nl> + connect ( currentSearchTab - > header ( ) , SIGNAL ( sectionResized ( int , int , int ) ) , this , SLOT ( propagateSectionResized ( int , int , int ) ) ) ; <nl> all_tab . append ( currentSearchTab ) ; <nl> tabWidget - > addTab ( currentSearchTab , pattern ) ; <nl> tabWidget - > setCurrentWidget ( currentSearchTab ) ; <nl> void SearchEngine : : on_search_button_clicked ( ) { <nl> searchTimeout - > start ( 180000 ) ; / / 3min <nl> } <nl> <nl> + void SearchEngine : : propagateSectionResized ( int index , int , int newsize ) { <nl> + foreach ( SearchTab * tab , all_tab ) { <nl> + tab - > getCurrentTreeView ( ) - > setColumnWidth ( index , newsize ) ; <nl> + } <nl> + saveResultsColumnsWidth ( ) ; <nl> + } <nl> + <nl> + void SearchEngine : : saveResultsColumnsWidth ( ) { <nl> + if ( all_tab . size ( ) > 0 ) { <nl> + QTreeView * treeview = all_tab . first ( ) - > getCurrentTreeView ( ) ; <nl> + QSettings settings ( " qBittorrent " , " qBittorrent " ) ; <nl> + QStringList width_list ; <nl> + QStringList new_width_list ; <nl> + short nbColumns = all_tab . first ( ) - > getCurrentSearchListModel ( ) - > columnCount ( ) ; <nl> + <nl> + QString line = settings . value ( " SearchResultsColsWidth " , QString ( ) ) . toString ( ) ; <nl> + if ( ! line . isEmpty ( ) ) { <nl> + width_list = line . split ( ' ' ) ; <nl> + } <nl> + for ( short i = 0 ; i < nbColumns ; + + i ) { <nl> + if ( treeview - > columnWidth ( i ) < 1 & & width_list . size ( ) = = nbColumns & & width_list . at ( i ) . toInt ( ) > = 1 ) { <nl> + / / load the former width <nl> + new_width_list < < width_list . at ( i ) ; <nl> + } else if ( treeview - > columnWidth ( i ) > = 1 ) { <nl> + / / usual case , save the current width <nl> + new_width_list < < QString : : fromUtf8 ( misc : : toString ( treeview - > columnWidth ( i ) ) . c_str ( ) ) ; <nl> + } else { <nl> + / / default width <nl> + treeview - > resizeColumnToContents ( i ) ; <nl> + new_width_list < < QString : : fromUtf8 ( misc : : toString ( treeview - > columnWidth ( i ) ) . c_str ( ) ) ; <nl> + } <nl> + } <nl> + settings . setValue ( " SearchResultsColsWidth " , new_width_list . join ( " " ) ) ; <nl> + } <nl> + } <nl> + <nl> void SearchEngine : : searchStarted ( ) { <nl> / / Update SearchEngine widgets <nl> search_status - > setText ( tr ( " Searching . . . " ) ) ; <nl> mmm a / src / searchEngine . h <nl> ppp b / src / searchEngine . h <nl> class SearchEngine : public QWidget , public Ui : : search_engine { <nl> void saveSearchHistory ( ) ; <nl> void on_enginesButton_clicked ( ) ; <nl> void on_clearPatternButton_clicked ( ) ; <nl> + void propagateSectionResized ( int index , int oldsize , int newsize ) ; <nl> + void saveResultsColumnsWidth ( ) ; <nl> } ; <nl> <nl> # endif <nl>
- FEATURE : Resizing a column in a search results tab affects all tabs
qbittorrent/qBittorrent
a2bcfa9192949315b0d9e72d3f2ccef74d6ec0d7
2009-03-09T21:24:40Z
mmm a / utils / build - script - impl <nl> ppp b / utils / build - script - impl <nl> else <nl> ) <nl> fi <nl> <nl> + <nl> + CMAKE_JOBS = " $ { BUILD_JOBS } " <nl> if [ [ " $ { DISTCC } " ] ] ; then <nl> - BUILD_ARGS = " $ { BUILD_ARGS } - j $ ( distcc - j ) " <nl> + CMAKE_JOBS = " $ ( distcc - j ) " <nl> fi <nl> <nl> case " $ { CMAKE_GENERATOR } " in <nl> Ninja ) <nl> - BUILD_ARGS = " $ { BUILD_ARGS } - j $ { BUILD_JOBS } " <nl> + BUILD_ARGS = " $ { BUILD_ARGS } - j $ { CMAKE_JOBS } " <nl> if [ [ " $ { VERBOSE_BUILD } " ] ] ; then <nl> BUILD_ARGS = " $ { BUILD_ARGS } - v " <nl> fi <nl> ; ; <nl> ' Unix Makefiles ' ) <nl> - BUILD_ARGS = " $ { BUILD_ARGS } - j $ { BUILD_JOBS } " <nl> + BUILD_ARGS = " $ { BUILD_ARGS } - j $ { CMAKE_JOBS } " <nl> if [ [ " $ { VERBOSE_BUILD } " ] ] ; then <nl> BUILD_ARGS = " $ { BUILD_ARGS } VERBOSE = 1 " <nl> fi <nl> case " $ { CMAKE_GENERATOR } " in <nl> # but since we ' re not using proper Xcode 4 schemes , this is the <nl> # only way to get target - level parallelism . <nl> BUILD_ARGS = " $ { BUILD_ARGS } - parallelizeTargets " <nl> - BUILD_ARGS = " $ { BUILD_ARGS } - jobs $ { BUILD_JOBS } " <nl> + BUILD_ARGS = " $ { BUILD_ARGS } - jobs $ { CMAKE_JOBS } " <nl> BUILD_TARGET_FLAG = " - target " <nl> COMMON_CMAKE_OPTIONS = ( <nl> " $ { COMMON_CMAKE_OPTIONS [ @ ] } " <nl>
build - script : let distcc dictate parallelism
apple/swift
887c3bbe82eda25f7e86d6211d669151dd3635b1
2016-02-23T02:57:41Z
mmm a / src / csharp / Grpc . Core . Tests / Internal / FakeNativeCall . cs <nl> ppp b / src / csharp / Grpc . Core . Tests / Internal / FakeNativeCall . cs <nl> public void StartSendCloseFromClient ( SendCompletionHandler callback ) <nl> SendCompletionHandler = callback ; <nl> } <nl> <nl> - public void StartSendStatusFromServer ( SendCompletionHandler callback , Status status , MetadataArraySafeHandle metadataArray , bool sendEmptyInitialMetadata ) <nl> + public void StartSendStatusFromServer ( SendCompletionHandler callback , Status status , MetadataArraySafeHandle metadataArray , bool sendEmptyInitialMetadata , <nl> + byte [ ] optionalPayload , WriteFlags writeFlags ) <nl> { <nl> SendStatusFromServerHandler = callback ; <nl> } <nl> mmm a / src / csharp / Grpc . Core / Internal / AsyncCallServer . cs <nl> ppp b / src / csharp / Grpc . Core / Internal / AsyncCallServer . cs <nl> public Task SendStatusFromServerAsync ( Status status , Metadata trailers ) <nl> <nl> using ( var metadataArray = MetadataArraySafeHandle . Create ( trailers ) ) <nl> { <nl> - call . StartSendStatusFromServer ( HandleSendStatusFromServerFinished , status , metadataArray , ! initialMetadataSent ) ; <nl> + call . StartSendStatusFromServer ( HandleSendStatusFromServerFinished , status , metadataArray , ! initialMetadataSent , <nl> + null , new WriteFlags ( ) ) ; <nl> } <nl> halfcloseRequested = true ; <nl> initialMetadataSent = true ; <nl> mmm a / src / csharp / Grpc . Core / Internal / CallSafeHandle . cs <nl> ppp b / src / csharp / Grpc . Core / Internal / CallSafeHandle . cs <nl> public void StartSendCloseFromClient ( SendCompletionHandler callback ) <nl> } <nl> } <nl> <nl> - public void StartSendStatusFromServer ( SendCompletionHandler callback , Status status , MetadataArraySafeHandle metadataArray , bool sendEmptyInitialMetadata ) <nl> + public void StartSendStatusFromServer ( SendCompletionHandler callback , Status status , MetadataArraySafeHandle metadataArray , bool sendEmptyInitialMetadata , <nl> + byte [ ] optionalPayload , WriteFlags writeFlags ) <nl> { <nl> using ( completionQueue . NewScope ( ) ) <nl> { <nl> var ctx = BatchContextSafeHandle . Create ( ) ; <nl> + var optionalPayloadLength = optionalPayload ! = null ? new UIntPtr ( ( ulong ) optionalPayload . Length ) : UIntPtr . Zero ; <nl> completionRegistry . RegisterBatchCompletion ( ctx , ( success , context ) = > callback ( success ) ) ; <nl> - Native . grpcsharp_call_send_status_from_server ( this , ctx , status . StatusCode , status . Detail , metadataArray , sendEmptyInitialMetadata ) . CheckOk ( ) ; <nl> + Native . grpcsharp_call_send_status_from_server ( this , ctx , status . StatusCode , status . Detail , metadataArray , sendEmptyInitialMetadata , <nl> + optionalPayload , optionalPayloadLength , writeFlags ) . CheckOk ( ) ; <nl> } <nl> } <nl> <nl> mmm a / src / csharp / Grpc . Core / Internal / INativeCall . cs <nl> ppp b / src / csharp / Grpc . Core / Internal / INativeCall . cs <nl> internal interface INativeCall : IDisposable <nl> <nl> void StartSendCloseFromClient ( SendCompletionHandler callback ) ; <nl> <nl> - void StartSendStatusFromServer ( SendCompletionHandler callback , Grpc . Core . Status status , MetadataArraySafeHandle metadataArray , bool sendEmptyInitialMetadata ) ; <nl> + void StartSendStatusFromServer ( SendCompletionHandler callback , Grpc . Core . Status status , MetadataArraySafeHandle metadataArray , bool sendEmptyInitialMetadata , byte [ ] optionalPayload , Grpc . Core . WriteFlags writeFlags ) ; <nl> <nl> void StartServerSide ( ReceivedCloseOnServerHandler callback ) ; <nl> } <nl> mmm a / src / csharp / Grpc . Core / Internal / NativeMethods . cs <nl> ppp b / src / csharp / Grpc . Core / Internal / NativeMethods . cs <nl> public class Delegates <nl> public delegate GRPCCallError grpcsharp_call_cancel_delegate ( CallSafeHandle call ) ; <nl> public delegate GRPCCallError grpcsharp_call_cancel_with_status_delegate ( CallSafeHandle call , StatusCode status , string description ) ; <nl> public delegate GRPCCallError grpcsharp_call_start_unary_delegate ( CallSafeHandle call , <nl> - BatchContextSafeHandle ctx , byte [ ] send_buffer , UIntPtr send_buffer_len , MetadataArraySafeHandle metadataArray , WriteFlags writeFlags ) ; <nl> + BatchContextSafeHandle ctx , byte [ ] sendBuffer , UIntPtr sendBufferLen , MetadataArraySafeHandle metadataArray , WriteFlags writeFlags ) ; <nl> public delegate GRPCCallError grpcsharp_call_start_client_streaming_delegate ( CallSafeHandle call , <nl> BatchContextSafeHandle ctx , MetadataArraySafeHandle metadataArray ) ; <nl> public delegate GRPCCallError grpcsharp_call_start_server_streaming_delegate ( CallSafeHandle call , <nl> - BatchContextSafeHandle ctx , byte [ ] send_buffer , UIntPtr send_buffer_len , <nl> + BatchContextSafeHandle ctx , byte [ ] sendBuffer , UIntPtr sendBufferLen , <nl> MetadataArraySafeHandle metadataArray , WriteFlags writeFlags ) ; <nl> public delegate GRPCCallError grpcsharp_call_start_duplex_streaming_delegate ( CallSafeHandle call , <nl> BatchContextSafeHandle ctx , MetadataArraySafeHandle metadataArray ) ; <nl> public delegate GRPCCallError grpcsharp_call_send_message_delegate ( CallSafeHandle call , <nl> - BatchContextSafeHandle ctx , byte [ ] send_buffer , UIntPtr send_buffer_len , WriteFlags writeFlags , bool sendEmptyInitialMetadata ) ; <nl> + BatchContextSafeHandle ctx , byte [ ] sendBuffer , UIntPtr sendBufferLen , WriteFlags writeFlags , bool sendEmptyInitialMetadata ) ; <nl> public delegate GRPCCallError grpcsharp_call_send_close_from_client_delegate ( CallSafeHandle call , <nl> BatchContextSafeHandle ctx ) ; <nl> public delegate GRPCCallError grpcsharp_call_send_status_from_server_delegate ( CallSafeHandle call , <nl> - BatchContextSafeHandle ctx , StatusCode statusCode , string statusMessage , MetadataArraySafeHandle metadataArray , bool sendEmptyInitialMetadata ) ; <nl> + BatchContextSafeHandle ctx , StatusCode statusCode , string statusMessage , MetadataArraySafeHandle metadataArray , bool sendEmptyInitialMetadata , <nl> + byte [ ] optionalSendBuffer , UIntPtr optionalSendBufferLen , WriteFlags writeFlags ) ; <nl> public delegate GRPCCallError grpcsharp_call_recv_message_delegate ( CallSafeHandle call , <nl> BatchContextSafeHandle ctx ) ; <nl> public delegate GRPCCallError grpcsharp_call_recv_initial_metadata_delegate ( CallSafeHandle call , <nl> private class PInvokeMethods <nl> <nl> [ DllImport ( " grpc_csharp_ext . dll " ) ] <nl> public static extern GRPCCallError grpcsharp_call_start_unary ( CallSafeHandle call , <nl> - BatchContextSafeHandle ctx , byte [ ] send_buffer , UIntPtr send_buffer_len , MetadataArraySafeHandle metadataArray , WriteFlags writeFlags ) ; <nl> + BatchContextSafeHandle ctx , byte [ ] sendBuffer , UIntPtr sendBufferLen , MetadataArraySafeHandle metadataArray , WriteFlags writeFlags ) ; <nl> <nl> [ DllImport ( " grpc_csharp_ext . dll " ) ] <nl> public static extern GRPCCallError grpcsharp_call_start_client_streaming ( CallSafeHandle call , <nl> private class PInvokeMethods <nl> <nl> [ DllImport ( " grpc_csharp_ext . dll " ) ] <nl> public static extern GRPCCallError grpcsharp_call_start_server_streaming ( CallSafeHandle call , <nl> - BatchContextSafeHandle ctx , byte [ ] send_buffer , UIntPtr send_buffer_len , <nl> + BatchContextSafeHandle ctx , byte [ ] sendBuffer , UIntPtr sendBufferLen , <nl> MetadataArraySafeHandle metadataArray , WriteFlags writeFlags ) ; <nl> <nl> [ DllImport ( " grpc_csharp_ext . dll " ) ] <nl> private class PInvokeMethods <nl> <nl> [ DllImport ( " grpc_csharp_ext . dll " ) ] <nl> public static extern GRPCCallError grpcsharp_call_send_message ( CallSafeHandle call , <nl> - BatchContextSafeHandle ctx , byte [ ] send_buffer , UIntPtr send_buffer_len , WriteFlags writeFlags , bool sendEmptyInitialMetadata ) ; <nl> + BatchContextSafeHandle ctx , byte [ ] sendBuffer , UIntPtr sendBufferLen , WriteFlags writeFlags , bool sendEmptyInitialMetadata ) ; <nl> <nl> [ DllImport ( " grpc_csharp_ext . dll " ) ] <nl> public static extern GRPCCallError grpcsharp_call_send_close_from_client ( CallSafeHandle call , <nl> private class PInvokeMethods <nl> <nl> [ DllImport ( " grpc_csharp_ext . dll " ) ] <nl> public static extern GRPCCallError grpcsharp_call_send_status_from_server ( CallSafeHandle call , <nl> - BatchContextSafeHandle ctx , StatusCode statusCode , string statusMessage , MetadataArraySafeHandle metadataArray , bool sendEmptyInitialMetadata ) ; <nl> + BatchContextSafeHandle ctx , StatusCode statusCode , string statusMessage , MetadataArraySafeHandle metadataArray , bool sendEmptyInitialMetadata , <nl> + byte [ ] optionalSendBuffer , UIntPtr optionalSendBufferLen , WriteFlags writeFlags ) ; <nl> <nl> [ DllImport ( " grpc_csharp_ext . dll " ) ] <nl> public static extern GRPCCallError grpcsharp_call_recv_message ( CallSafeHandle call , <nl> mmm a / src / csharp / ext / grpc_csharp_ext . c <nl> ppp b / src / csharp / ext / grpc_csharp_ext . c <nl> grpcsharp_call_send_close_from_client ( grpc_call * call , <nl> GPR_EXPORT grpc_call_error GPR_CALLTYPE grpcsharp_call_send_status_from_server ( <nl> grpc_call * call , grpcsharp_batch_context * ctx , grpc_status_code status_code , <nl> const char * status_details , grpc_metadata_array * trailing_metadata , <nl> - int32_t send_empty_initial_metadata ) { <nl> + int32_t send_empty_initial_metadata , const char * optional_send_buffer , <nl> + size_t optional_send_buffer_len , uint32_t write_flags ) { <nl> / * TODO : don ' t use magic number * / <nl> - grpc_op ops [ 2 ] ; <nl> - size_t nops = send_empty_initial_metadata ? 2 : 1 ; <nl> + grpc_op ops [ 3 ] ; <nl> + size_t nops = 1 ; <nl> ops [ 0 ] . op = GRPC_OP_SEND_STATUS_FROM_SERVER ; <nl> ops [ 0 ] . data . send_status_from_server . status = status_code ; <nl> ops [ 0 ] . data . send_status_from_server . status_details = <nl> GPR_EXPORT grpc_call_error GPR_CALLTYPE grpcsharp_call_send_status_from_server ( <nl> ctx - > send_status_from_server . trailing_metadata . metadata ; <nl> ops [ 0 ] . flags = 0 ; <nl> ops [ 0 ] . reserved = NULL ; <nl> - ops [ 1 ] . op = GRPC_OP_SEND_INITIAL_METADATA ; <nl> - ops [ 1 ] . data . send_initial_metadata . count = 0 ; <nl> - ops [ 1 ] . data . send_initial_metadata . metadata = NULL ; <nl> - ops [ 1 ] . flags = 0 ; <nl> - ops [ 1 ] . reserved = NULL ; <nl> - <nl> + if ( optional_send_buffer ) { <nl> + ops [ nops ] . op = GRPC_OP_SEND_MESSAGE ; <nl> + ctx - > send_message = string_to_byte_buffer ( optional_send_buffer , <nl> + optional_send_buffer_len ) ; <nl> + ops [ nops ] . data . send_message = ctx - > send_message ; <nl> + ops [ nops ] . flags = write_flags ; <nl> + ops [ nops ] . reserved = NULL ; <nl> + nops + + ; <nl> + } <nl> + if ( send_empty_initial_metadata ) { <nl> + ops [ nops ] . op = GRPC_OP_SEND_INITIAL_METADATA ; <nl> + ops [ nops ] . data . send_initial_metadata . count = 0 ; <nl> + ops [ nops ] . data . send_initial_metadata . metadata = NULL ; <nl> + ops [ nops ] . flags = 0 ; <nl> + ops [ nops ] . reserved = NULL ; <nl> + nops + + ; <nl> + } <nl> return grpc_call_start_batch ( call , ops , nops , ctx , NULL ) ; <nl> } <nl> <nl>
make SendStatusFromServer optionally send a message as well
grpc/grpc
305ffd4847617c7206b0b4ccec0fcd8977b2e095
2016-05-04T21:37:18Z
mmm a / tensorflow / contrib / framework / python / framework / experimental_test . py <nl> ppp b / tensorflow / contrib / framework / python / framework / experimental_test . py <nl> def _fn ( arg0 , arg1 ) : <nl> <nl> # Assert function docs are properly updated . <nl> self . assertEqual ( " _fn " , _fn . __name__ ) <nl> - self . assertEqual ( " fn doc . ( experimental ) " <nl> - " \ n " <nl> - " \ nTHIS FUNCTION IS EXPERIMENTAL . It may change or " <nl> - " be removed at any time , and without warning . " <nl> - " \ n " <nl> - " \ nArgs : " <nl> - " \ n arg0 : Arg 0 . " <nl> - " \ n arg1 : Arg 1 . " <nl> - " \ n " <nl> - " \ nReturns : " <nl> - " \ n Sum of args . " , _fn . __doc__ ) <nl> + self . assertEqual ( <nl> + " fn doc . ( experimental ) " <nl> + " \ n " <nl> + " \ nWarning : THIS FUNCTION IS EXPERIMENTAL . It may change " <nl> + " or be removed at any time , and without warning . " <nl> + " \ n " <nl> + " \ nArgs : " <nl> + " \ n arg0 : Arg 0 . " <nl> + " \ n arg1 : Arg 1 . " <nl> + " \ n " <nl> + " \ nReturns : " <nl> + " \ n Sum of args . " , _fn . __doc__ ) <nl> <nl> # Assert calling new fn issues log warning . <nl> self . assertEqual ( 3 , _fn ( 1 , 2 ) ) <nl> mmm a / tensorflow / python / util / decorator_utils . py <nl> ppp b / tensorflow / python / util / decorator_utils . py <nl> def _normalize_docstring ( docstring ) : <nl> <nl> def add_notice_to_docstring ( <nl> doc , instructions , no_doc_str , suffix_str , notice ) : <nl> - " " " Adds a deprecation notice to a docstring . " " " <nl> + " " " Adds a deprecation notice to a docstring . <nl> + <nl> + Args : <nl> + doc : The original docstring . <nl> + instructions : A string , describing how to fix the problem . <nl> + no_doc_str : The default value to use for ` doc ` if ` doc ` is empty . <nl> + suffix_str : Is added to the end of the first line . <nl> + notice : A list of strings . The main notice warning body . <nl> + <nl> + Returns : <nl> + A new docstring , with the notice attached . <nl> + <nl> + Raises : <nl> + ValueError : If ` notice ` is empty . <nl> + " " " <nl> if not doc : <nl> lines = [ no_doc_str ] <nl> else : <nl> lines = _normalize_docstring ( doc ) . splitlines ( ) <nl> lines [ 0 ] + = ' ' + suffix_str <nl> <nl> + if not notice : <nl> + raise ValueError ( ' The ` notice ` arg must not be empty . ' ) <nl> + <nl> + notice [ 0 ] = ' Warning : ' + notice [ 0 ] <nl> notice = [ ' ' ] + notice + ( [ instructions ] if instructions else [ ] ) <nl> <nl> if len ( lines ) > 1 : <nl> mmm a / tensorflow / python / util / decorator_utils_test . py <nl> ppp b / tensorflow / python / util / decorator_utils_test . py <nl> def _check ( self , doc , expected ) : <nl> expected ) <nl> <nl> def test_regular ( self ) : <nl> - expected = ( " Brief ( suffix ) \ n \ nGo away \ nInstructions \ n \ nDocstring \ n \ n " <nl> - " Args : \ n arg1 : desc " ) <nl> + expected = ( <nl> + " Brief ( suffix ) \ n \ nWarning : Go away \ nInstructions \ n \ nDocstring \ n \ n " <nl> + " Args : \ n arg1 : desc " ) <nl> # No indent for main docstring <nl> self . _check ( " Brief \ n \ nDocstring \ n \ nArgs : \ n arg1 : desc " , expected ) <nl> # 2 space indent for main docstring , blank lines not indented <nl> def test_regular ( self ) : <nl> expected ) <nl> <nl> def test_brief_only ( self ) : <nl> - expected = " Brief ( suffix ) \ n \ nGo away \ nInstructions " <nl> + expected = " Brief ( suffix ) \ n \ nWarning : Go away \ nInstructions " <nl> self . _check ( " Brief " , expected ) <nl> self . _check ( " Brief \ n " , expected ) <nl> self . _check ( " Brief \ n " , expected ) <nl> def test_brief_only ( self ) : <nl> self . _check ( " \ n Brief \ n " , expected ) <nl> <nl> def test_no_docstring ( self ) : <nl> - expected = " Nothing here \ n \ nGo away \ nInstructions " <nl> + expected = " Nothing here \ n \ nWarning : Go away \ nInstructions " <nl> self . _check ( None , expected ) <nl> self . _check ( " " , expected ) <nl> <nl> def test_no_empty_line ( self ) : <nl> - expected = " Brief ( suffix ) \ n \ nGo away \ nInstructions \ n \ nDocstring " <nl> + expected = " Brief ( suffix ) \ n \ nWarning : Go away \ nInstructions \ n \ nDocstring " <nl> # No second line indent <nl> self . _check ( " Brief \ nDocstring " , expected ) <nl> # 2 space second line indent <nl> mmm a / tensorflow / python / util / deprecation . py <nl> ppp b / tensorflow / python / util / deprecation . py <nl> def _add_deprecated_function_notice_to_docstring ( doc , date , instructions ) : <nl> ' ( deprecated ) ' , main_text ) <nl> <nl> <nl> - def _add_deprecated_arg_notice_to_docstring ( doc , date , instructions ) : <nl> + def _add_deprecated_arg_notice_to_docstring ( doc , date , instructions , <nl> + deprecated_names ) : <nl> " " " Adds a deprecation notice to a docstring for deprecated arguments . " " " <nl> + <nl> + deprecation_string = ' , ' . join ( sorted ( deprecated_names ) ) <nl> + <nl> return decorator_utils . add_notice_to_docstring ( <nl> - doc , instructions , <nl> - ' DEPRECATED FUNCTION ARGUMENTS ' , <nl> + doc , instructions , ' DEPRECATED FUNCTION ARGUMENTS ' , <nl> ' ( deprecated arguments ) ' , [ <nl> - ' SOME ARGUMENTS ARE DEPRECATED . ' <nl> - ' They will be removed % s . ' % ( <nl> - ' in a future version ' if date is None else ( ' after % s ' % date ) ) , <nl> - ' Instructions for updating : ' ] ) <nl> + ' SOME ARGUMENTS ARE DEPRECATED : ` ( % s ) ` . ' <nl> + ' They will be removed % s . ' % <nl> + ( deprecation_string , ' in a future version ' if date is None else <nl> + ( ' after % s ' % date ) ) , ' Instructions for updating : ' <nl> + ] ) <nl> + <nl> + <nl> + def _add_deprecated_arg_value_notice_to_docstring ( doc , date , instructions , <nl> + deprecated_name_value_dict ) : <nl> + " " " Adds a deprecation notice to a docstring for deprecated arguments . " " " <nl> + <nl> + deprecation_string = ' , ' . join ( <nl> + ' % s = % r ' % ( key , value ) <nl> + for key , value in sorted ( deprecated_name_value_dict . items ( ) ) ) <nl> + <nl> + when = ' in a future version ' if date is None else ( ' after % s ' % date ) <nl> + <nl> + return decorator_utils . add_notice_to_docstring ( <nl> + doc , instructions , ' DEPRECATED FUNCTION ARGUMENT VALUES ' , <nl> + ' ( deprecated argument values ) ' , [ <nl> + ' SOME ARGUMENT VALUES ARE DEPRECATED : ` ( % s ) ` . ' <nl> + ' They will be removed % s . ' % ( deprecation_string , when ) , <nl> + ' Instructions for updating : ' <nl> + ] ) <nl> <nl> <nl> def _validate_deprecation_args ( date , instructions ) : <nl> def _get_deprecated_positional_arguments ( names_to_ok_vals , arg_spec ) : <nl> pos , spec . has_ok_value , spec . ok_value ) <nl> return deprecated_positional_args <nl> <nl> + deprecated_arg_names = _get_arg_names_to_ok_vals ( ) <nl> + <nl> def deprecated_wrapper ( func ) : <nl> " " " Deprecation decorator . " " " <nl> decorator_utils . validate_callable ( func , ' deprecated_args ' ) <nl> - deprecated_arg_names = _get_arg_names_to_ok_vals ( ) <nl> <nl> arg_spec = tf_inspect . getfullargspec ( func ) <nl> deprecated_positions = _get_deprecated_positional_arguments ( <nl> def new_func ( * args , * * kwargs ) : <nl> ' in a future version ' if date is None else ( ' after % s ' % date ) , <nl> instructions ) <nl> return func ( * args , * * kwargs ) <nl> - return tf_decorator . make_decorator ( func , new_func , ' deprecated ' , <nl> - _add_deprecated_arg_notice_to_docstring ( <nl> - func . __doc__ , date , instructions ) ) <nl> + <nl> + doc = _add_deprecated_arg_notice_to_docstring ( <nl> + func . __doc__ , date , instructions , sorted ( deprecated_arg_names . keys ( ) ) ) <nl> + return tf_decorator . make_decorator ( func , new_func , ' deprecated ' , doc ) <nl> + <nl> return deprecated_wrapper <nl> <nl> <nl> def new_func ( * args , * * kwargs ) : <nl> func . __module__ , arg_name , arg_value , ' in a future version ' <nl> if date is None else ( ' after % s ' % date ) , instructions ) <nl> return func ( * args , * * kwargs ) <nl> - return tf_decorator . make_decorator ( func , new_func , ' deprecated ' , <nl> - _add_deprecated_arg_notice_to_docstring ( <nl> - func . __doc__ , date , instructions ) ) <nl> + <nl> + doc = _add_deprecated_arg_value_notice_to_docstring ( <nl> + func . __doc__ , date , instructions , deprecated_kwargs ) <nl> + return tf_decorator . make_decorator ( func , new_func , ' deprecated ' , doc ) <nl> + <nl> return deprecated_wrapper <nl> <nl> <nl> mmm a / tensorflow / python / util / deprecation_test . py <nl> ppp b / tensorflow / python / util / deprecation_test . py <nl> def _fn ( arg0 , arg1 ) : <nl> self . assertEqual ( <nl> " fn doc . ( deprecated ) " <nl> " \ n " <nl> - " \ nTHIS FUNCTION IS DEPRECATED . It will be removed in a future version . " <nl> + " \ nWarning : THIS FUNCTION IS DEPRECATED . " <nl> + " It will be removed in a future version . " <nl> " \ nInstructions for updating : \ n % s " <nl> " \ n " <nl> " \ nArgs : " <nl> def _fn ( arg0 , arg1 ) : <nl> self . assertEqual ( <nl> " fn doc . ( deprecated ) " <nl> " \ n " <nl> - " \ nTHIS FUNCTION IS DEPRECATED . It will be removed after % s . " <nl> + " \ nWarning : THIS FUNCTION IS DEPRECATED . It will be removed after % s . " <nl> " \ nInstructions for updating : \ n % s " <nl> " \ n " <nl> " \ nArgs : " <nl> def _fn ( arg0 , arg1 ) : <nl> self . assertEqual ( <nl> " fn doc . ( deprecated ) " <nl> " \ n " <nl> - " \ nTHIS FUNCTION IS DEPRECATED . It will be removed after % s . " <nl> + " \ nWarning : THIS FUNCTION IS DEPRECATED . It will be removed after % s . " <nl> " \ nInstructions for updating : \ n % s " % ( date , instructions ) , _fn . __doc__ ) <nl> <nl> # Assert calling new fn issues log warning . <nl> def _fn ( arg0 , arg1 ) : <nl> self . assertEqual ( <nl> " DEPRECATED FUNCTION " <nl> " \ n " <nl> - " \ nTHIS FUNCTION IS DEPRECATED . It will be removed after % s . " <nl> + " \ nWarning : THIS FUNCTION IS DEPRECATED . It will be removed after % s . " <nl> " \ nInstructions for updating : " <nl> " \ n % s " % ( date , instructions ) , _fn . __doc__ ) <nl> <nl> def _fn ( self , arg0 , arg1 ) : <nl> self . assertEqual ( <nl> " fn doc . ( deprecated ) " <nl> " \ n " <nl> - " \ nTHIS FUNCTION IS DEPRECATED . It will be removed after % s . " <nl> + " \ nWarning : THIS FUNCTION IS DEPRECATED . It will be removed after % s . " <nl> " \ nInstructions for updating : \ n % s " <nl> " \ n " <nl> " \ nArgs : " <nl> def _fn ( self , arg0 , arg1 ) : <nl> self . assertEqual ( <nl> " fn doc . ( deprecated ) " <nl> " \ n " <nl> - " \ nTHIS FUNCTION IS DEPRECATED . It will be removed after % s . " <nl> + " \ nWarning : THIS FUNCTION IS DEPRECATED . It will be removed after % s . " <nl> " \ nInstructions for updating : \ n % s " % ( date , instructions ) , <nl> getattr ( _Object , " _fn " ) . __doc__ ) <nl> <nl> def _fn ( self , arg0 , arg1 ) : <nl> self . assertEqual ( <nl> " DEPRECATED FUNCTION " <nl> " \ n " <nl> - " \ nTHIS FUNCTION IS DEPRECATED . It will be removed after % s . " <nl> + " \ nWarning : THIS FUNCTION IS DEPRECATED . It will be removed after % s . " <nl> " \ nInstructions for updating : " <nl> - " \ n % s " % ( date , instructions ) , getattr ( _Object , " _fn " ) . __doc__ ) <nl> + " \ n % s " % ( date , instructions ) , <nl> + getattr ( _Object , " _fn " ) . __doc__ ) <nl> <nl> # Assert calling new fn issues log warning . <nl> self . assertEqual ( 3 , _Object ( ) . _fn ( 1 , 2 ) ) <nl> def _prop ( self ) : <nl> self . assertEqual ( <nl> " prop doc . ( deprecated ) " <nl> " \ n " <nl> - " \ nTHIS FUNCTION IS DEPRECATED . It will be removed after % s . " <nl> + " \ nWarning : THIS FUNCTION IS DEPRECATED . It will be removed after % s . " <nl> " \ nInstructions for updating : " <nl> " \ n % s " <nl> " \ n " <nl> " \ nReturns : " <nl> - " \ n String . " % ( date , instructions ) , getattr ( _Object , " _prop " ) . __doc__ ) <nl> + " \ n String . " % ( date , instructions ) , <nl> + getattr ( _Object , " _prop " ) . __doc__ ) <nl> <nl> # Assert calling new fn issues log warning . <nl> self . assertEqual ( " prop_with_doc " , _Object ( ) . _prop ) <nl> def _prop ( self ) : <nl> self . assertEqual ( <nl> " DEPRECATED FUNCTION " <nl> " \ n " <nl> - " \ nTHIS FUNCTION IS DEPRECATED . It will be removed after % s . " <nl> + " \ nWarning : THIS FUNCTION IS DEPRECATED . It will be removed after % s . " <nl> " \ nInstructions for updating : " <nl> - " \ n % s " % ( date , instructions ) , getattr ( _Object , " _prop " ) . __doc__ ) <nl> + " \ n % s " % ( date , instructions ) , <nl> + getattr ( _Object , " _prop " ) . __doc__ ) <nl> <nl> # Assert calling new fn issues log warning . <nl> self . assertEqual ( " prop_no_doc " , _Object ( ) . _prop ) <nl> def _fn ( arg0 , arg1 , deprecated = True ) : <nl> self . assertEqual ( <nl> " fn doc . ( deprecated arguments ) " <nl> " \ n " <nl> - " \ nSOME ARGUMENTS ARE DEPRECATED . They will be removed after % s . " <nl> + " \ nWarning : SOME ARGUMENTS ARE DEPRECATED : ` ( deprecated ) ` . " <nl> + " They will be removed after % s . " <nl> " \ nInstructions for updating : \ n % s " <nl> " \ n " <nl> " \ nArgs : " <nl> def _fn ( arg0 , arg1 , deprecated = True ) : <nl> self . assertEqual ( <nl> " fn doc . ( deprecated arguments ) " <nl> " \ n " <nl> - " \ nSOME ARGUMENTS ARE DEPRECATED . They will be removed after % s . " <nl> + " \ nWarning : SOME ARGUMENTS ARE DEPRECATED : ` ( deprecated ) ` . " <nl> + " They will be removed after % s . " <nl> " \ nInstructions for updating : \ n % s " % ( date , instructions ) , _fn . __doc__ ) <nl> <nl> # Assert calls without the deprecated argument log nothing . <nl> def _fn ( arg0 , arg1 , deprecated = True ) : <nl> self . assertEqual ( <nl> " DEPRECATED FUNCTION ARGUMENTS " <nl> " \ n " <nl> - " \ nSOME ARGUMENTS ARE DEPRECATED . They will be removed after % s . " <nl> + " \ nWarning : SOME ARGUMENTS ARE DEPRECATED : ` ( deprecated ) ` . " <nl> + " They will be removed after % s . " <nl> " \ nInstructions for updating : " <nl> " \ n % s " % ( date , instructions ) , _fn . __doc__ ) <nl> <nl> def _fn ( arg0 , arg1 , deprecated = True ) : <nl> # Assert function docs are properly updated . <nl> self . assertEqual ( " _fn " , _fn . __name__ ) <nl> self . assertEqual ( <nl> - " fn doc . ( deprecated arguments ) " <nl> + " fn doc . ( deprecated argument values ) " <nl> " \ n " <nl> - " \ nSOME ARGUMENTS ARE DEPRECATED . They will be removed after % s . " <nl> + " \ nWarning : SOME ARGUMENT VALUES ARE DEPRECATED : ` ( deprecated = True ) ` . " <nl> + " They will be removed after % s . " <nl> " \ nInstructions for updating : \ n % s " <nl> " \ n " <nl> " \ nArgs : " <nl> def _fn ( arg0 , arg1 , deprecated = True ) : <nl> # Assert function docs are properly updated . <nl> self . assertEqual ( " _fn " , _fn . __name__ ) <nl> self . assertEqual ( <nl> - " fn doc . ( deprecated arguments ) " <nl> + " fn doc . ( deprecated argument values ) " <nl> " \ n " <nl> - " \ nSOME ARGUMENTS ARE DEPRECATED . They will be removed after % s . " <nl> + " \ nWarning : SOME ARGUMENT VALUES ARE DEPRECATED : ` ( deprecated = True ) ` . " <nl> + " They will be removed after % s . " <nl> " \ nInstructions for updating : \ n % s " % ( date , instructions ) , _fn . __doc__ ) <nl> <nl> # Assert calling new fn with non - deprecated value logs nothing . <nl> def _fn ( arg0 , arg1 , deprecated = True ) : <nl> # Assert function docs are properly updated . <nl> self . assertEqual ( " _fn " , _fn . __name__ ) <nl> self . assertEqual ( <nl> - " DEPRECATED FUNCTION ARGUMENTS " <nl> + " DEPRECATED FUNCTION ARGUMENT VALUES " <nl> " \ n " <nl> - " \ nSOME ARGUMENTS ARE DEPRECATED . They will be removed after % s . " <nl> + " \ nWarning : SOME ARGUMENT VALUES ARE DEPRECATED : ` ( deprecated = True ) ` . " <nl> + " They will be removed after % s . " <nl> " \ nInstructions for updating : " <nl> " \ n % s " % ( date , instructions ) , _fn . __doc__ ) <nl> <nl>
Add better warnings for deprecation tools .
tensorflow/tensorflow
c975cefc3d9516e1e7b8799ac86b908fd9a1dcdc
2018-11-06T06:35:23Z
mmm a / editor / plugins / texture_editor_plugin . cpp <nl> ppp b / editor / plugins / texture_editor_plugin . cpp <nl> void TextureEditor : : _notification ( int p_what ) { <nl> / / In the case of CurveTextures we know they are 1 in height , so fill the preview to see the gradient <nl> ofs_y = 0 ; <nl> tex_height = size . height ; <nl> + } else if ( Object : : cast_to < GradientTexture > ( * texture ) ) { <nl> + ofs_y = size . height / 4 . 0 ; <nl> + tex_height = size . height / 2 . 0 ; <nl> } <nl> <nl> draw_texture_rect ( texture , Rect2 ( ofs_x , ofs_y , tex_width , tex_height ) ) ; <nl>
Fix gradient texture preview
godotengine/godot
3ba43ac975fae174454f38ec7ae9979ea0eb5964
2018-02-03T09:10:44Z
mmm a / caffe2 / python / layers_test . py <nl> ppp b / caffe2 / python / layers_test . py <nl> def testPairwiseDotProductWithXandYEmbeddingsAndGather ( self ) : <nl> <nl> output_idx = [ 1 , 3 , 5 ] <nl> output_idx_blob = self . model . add_global_constant ( <nl> - self . model . net . NextScopedBlob ( ' pairwise_dot_product_gather ' ) , <nl> + str ( self . model . net . NextScopedBlob ( ' pairwise_dot_product_gather ' ) ) , <nl> output_idx , <nl> dtype = np . int32 , <nl> ) <nl>
enforce global_constant name should be a string
pytorch/pytorch
95626737d09b729e6bb88f40a4356eb136c965f8
2018-02-04T09:02:27Z
mmm a / src / mongo / db / repl / SConscript <nl> ppp b / src / mongo / db / repl / SConscript <nl> env . Library ( <nl> ] , <nl> LIBDEPS = [ <nl> ' oplog_entry ' , <nl> - ' replication_executor ' , <nl> + ' $ BUILD_DIR / mongo / db / service_context ' , <nl> + ' $ BUILD_DIR / mongo / executor / task_executor_interface ' , <nl> ] , <nl> ) <nl> <nl> env . CppUnitTest ( <nl> ' data_replicator ' , <nl> ' data_replicator_external_state_mock ' , <nl> ' replication_executor_test_fixture ' , <nl> + ' $ BUILD_DIR / mongo / executor / thread_pool_task_executor_test_fixture ' , <nl> ' $ BUILD_DIR / mongo / unittest / concurrency ' , <nl> ] , <nl> ) <nl> mmm a / src / mongo / db / repl / data_replicator . cpp <nl> ppp b / src / mongo / db / repl / data_replicator . cpp <nl> Status DataReplicator : : _scheduleApplyBatch_inlock ( const Operations & ops ) { <nl> stdx : : placeholders : : _3 ) ; <nl> <nl> auto lambda = [ this ] ( const TimestampStatus & ts , const Operations & theOps ) { <nl> + if ( ErrorCodes : : CallbackCanceled = = ts ) { <nl> + return ; <nl> + } <nl> CBHStatus status = _exec - > scheduleWork ( stdx : : bind ( & DataReplicator : : _onApplyBatchFinish , <nl> this , <nl> stdx : : placeholders : : _1 , <nl> Status DataReplicator : : _scheduleApplyBatch_inlock ( const Operations & ops ) { <nl> _exec - > wait ( status . getValue ( ) ) ; <nl> } ; <nl> <nl> - _applier . reset ( new MultiApplier ( _exec , ops , applierFn , multiApplyFn , lambda ) ) ; <nl> + auto executor = _dataReplicatorExternalState - > getTaskExecutor ( ) ; <nl> + _applier = stdx : : make_unique < MultiApplier > ( executor , ops , applierFn , multiApplyFn , lambda ) ; <nl> return _applier - > start ( ) ; <nl> } <nl> <nl> mmm a / src / mongo / db / repl / data_replicator_external_state_mock . cpp <nl> ppp b / src / mongo / db / repl / data_replicator_external_state_mock . cpp <nl> DataReplicatorExternalStateMock : : DataReplicatorExternalStateMock ( ) <nl> MultiApplier : : ApplyOperationFn ) { return ops . back ( ) . getOpTime ( ) ; } ) { } <nl> <nl> executor : : TaskExecutor * DataReplicatorExternalStateMock : : getTaskExecutor ( ) const { <nl> - return nullptr ; <nl> + return taskExecutor ; <nl> } <nl> <nl> OpTimeWithTerm DataReplicatorExternalStateMock : : getCurrentTermAndLastCommittedOpTime ( ) { <nl> mmm a / src / mongo / db / repl / data_replicator_external_state_mock . h <nl> ppp b / src / mongo / db / repl / data_replicator_external_state_mock . h <nl> class DataReplicatorExternalStateMock : public DataReplicatorExternalState { <nl> <nl> std : : unique_ptr < OplogBuffer > makeSteadyStateOplogBuffer ( OperationContext * txn ) const override ; <nl> <nl> + / / Task executor . Not owned by us . <nl> + executor : : TaskExecutor * taskExecutor = nullptr ; <nl> + <nl> / / Returned by getCurrentTermAndLastCommittedOpTime . <nl> long long currentTerm = OpTime : : kUninitializedTerm ; <nl> OpTime lastCommittedOpTime ; <nl> mmm a / src / mongo / db / repl / data_replicator_test . cpp <nl> ppp b / src / mongo / db / repl / data_replicator_test . cpp <nl> <nl> # include " mongo / db / repl / sync_source_resolver . h " <nl> # include " mongo / db / repl / sync_source_selector . h " <nl> # include " mongo / db / repl / update_position_args . h " <nl> + # include " mongo / executor / network_interface_factory . h " <nl> # include " mongo / executor / network_interface_mock . h " <nl> + # include " mongo / executor / thread_pool_task_executor . h " <nl> # include " mongo / stdx / mutex . h " <nl> # include " mongo / util / concurrency / thread_name . h " <nl> + # include " mongo / util / concurrency / thread_pool . h " <nl> # include " mongo / util / fail_point_service . h " <nl> # include " mongo / util / log . h " <nl> # include " mongo / util / mongoutils / str . h " <nl> class DataReplicatorTest : public ReplicationExecutorTest , public SyncSourceSele <nl> return config ; <nl> } ; <nl> <nl> + ThreadPool : : Options threadPoolOptions ; <nl> + threadPoolOptions . poolName = " replication " ; <nl> + threadPoolOptions . minThreads = 1U ; <nl> + threadPoolOptions . maxThreads = 1U ; <nl> + threadPoolOptions . onCreateThread = [ ] ( const std : : string & threadName ) { <nl> + Client : : initThread ( threadName . c_str ( ) ) ; <nl> + } ; <nl> + / / This task executor is used by the MultiApplier only and should not be used to schedule <nl> + / / remote commands . <nl> + _applierTaskExecutor = stdx : : make_unique < executor : : ThreadPoolTaskExecutor > ( <nl> + stdx : : make_unique < ThreadPool > ( threadPoolOptions ) , <nl> + executor : : makeNetworkInterface ( " DataReplicatorTest - ASIO " ) ) ; <nl> + _applierTaskExecutor - > startup ( ) ; <nl> + <nl> auto dataReplicatorExternalState = stdx : : make_unique < DataReplicatorExternalStateMock > ( ) ; <nl> + dataReplicatorExternalState - > taskExecutor = _applierTaskExecutor . get ( ) ; <nl> dataReplicatorExternalState - > currentTerm = 1LL ; <nl> dataReplicatorExternalState - > lastCommittedOpTime = _myLastOpTime ; <nl> _externalState = dataReplicatorExternalState . get ( ) ; <nl> class DataReplicatorTest : public ReplicationExecutorTest , public SyncSourceSele <nl> void tearDown ( ) override { <nl> ReplicationExecutorTest : : tearDown ( ) ; <nl> _dr . reset ( ) ; <nl> + _applierTaskExecutor - > shutdown ( ) ; <nl> + _applierTaskExecutor - > join ( ) ; <nl> / / Executor may still invoke callback before shutting down . <nl> } <nl> <nl> class DataReplicatorTest : public ReplicationExecutorTest , public SyncSourceSele <nl> OpTime _myLastOpTime ; <nl> MemberState _memberState ; <nl> std : : unique_ptr < SyncSourceSelector > _syncSourceSelector ; <nl> + std : : unique_ptr < executor : : TaskExecutor > _applierTaskExecutor ; <nl> <nl> private : <nl> DataReplicatorExternalStateMock * _externalState ; <nl> mmm a / src / mongo / db / repl / multiapplier . cpp <nl> ppp b / src / mongo / db / repl / multiapplier . cpp <nl> <nl> <nl> # include < algorithm > <nl> <nl> + # include " mongo / db / client . h " <nl> # include " mongo / db / operation_context . h " <nl> # include " mongo / db / repl / optime . h " <nl> - # include " mongo / db / repl / replication_executor . h " <nl> # include " mongo / util / destructor_guard . h " <nl> <nl> namespace mongo { <nl> namespace repl { <nl> <nl> - MultiApplier : : MultiApplier ( ReplicationExecutor * executor , <nl> + MultiApplier : : MultiApplier ( executor : : TaskExecutor * executor , <nl> const Operations & operations , <nl> const ApplyOperationFn & applyOperation , <nl> const MultiApplyFn & multiApply , <nl> Status MultiApplier : : start ( ) { <nl> return Status ( ErrorCodes : : IllegalOperation , " applier already started " ) ; <nl> } <nl> <nl> - auto scheduleResult = _executor - > scheduleDBWork ( <nl> - stdx : : bind ( & MultiApplier : : _callback , this , stdx : : placeholders : : _1 ) ) ; <nl> + auto scheduleResult = <nl> + _executor - > scheduleWork ( stdx : : bind ( & MultiApplier : : _callback , this , stdx : : placeholders : : _1 ) ) ; <nl> if ( ! scheduleResult . isOK ( ) ) { <nl> return scheduleResult . getStatus ( ) ; <nl> } <nl> Status MultiApplier : : start ( ) { <nl> } <nl> <nl> void MultiApplier : : cancel ( ) { <nl> - ReplicationExecutor : : CallbackHandle dbWorkCallbackHandle ; <nl> + executor : : TaskExecutor : : CallbackHandle dbWorkCallbackHandle ; <nl> { <nl> stdx : : lock_guard < stdx : : mutex > lk ( _mutex ) ; <nl> <nl> void MultiApplier : : wait ( ) { <nl> } <nl> <nl> / / TODO change the passed in function to be multiapply instead of apply inlock <nl> - void MultiApplier : : _callback ( const ReplicationExecutor : : CallbackArgs & cbd ) { <nl> + void MultiApplier : : _callback ( const executor : : TaskExecutor : : CallbackArgs & cbd ) { <nl> if ( ! cbd . status . isOK ( ) ) { <nl> _finishCallback ( cbd . status , _operations ) ; <nl> return ; <nl> } <nl> <nl> - invariant ( cbd . txn ) ; <nl> + auto txn = cc ( ) . makeOperationContext ( ) ; <nl> <nl> / / Refer to multiSyncApply ( ) and multiInitialSyncApply ( ) in sync_tail . cpp . <nl> - cbd . txn - > setReplicatedWrites ( false ) ; <nl> + txn - > setReplicatedWrites ( false ) ; <nl> <nl> / / allow us to get through the magic barrier <nl> - cbd . txn - > lockState ( ) - > setIsBatchWriter ( true ) ; <nl> + txn - > lockState ( ) - > setIsBatchWriter ( true ) ; <nl> <nl> StatusWith < OpTime > applyStatus ( ErrorCodes : : InternalError , " not mutated " ) ; <nl> <nl> void MultiApplier : : _callback ( const ReplicationExecutor : : CallbackArgs & cbd ) { <nl> try { <nl> / / TODO restructure to support moving _operations into this call . Can ' t do it today since <nl> / / _finishCallback gets _operations on failure . <nl> - applyStatus = _multiApply ( cbd . txn , _operations , _applyOperation ) ; <nl> + applyStatus = _multiApply ( txn . get ( ) , _operations , _applyOperation ) ; <nl> } catch ( . . . ) { <nl> applyStatus = exceptionToStatus ( ) ; <nl> } <nl> void pauseBeforeCompletion ( const StatusWith < Timestamp > & result , <nl> } / / namespace <nl> <nl> StatusWith < std : : pair < std : : unique_ptr < MultiApplier > , MultiApplier : : Operations > > applyUntilAndPause ( <nl> - ReplicationExecutor * executor , <nl> + executor : : TaskExecutor * executor , <nl> const MultiApplier : : Operations & operations , <nl> const MultiApplier : : ApplyOperationFn & applyOperation , <nl> const MultiApplier : : MultiApplyFn & multiApply , <nl> mmm a / src / mongo / db / repl / multiapplier . h <nl> ppp b / src / mongo / db / repl / multiapplier . h <nl> <nl> # include " mongo / base / status_with . h " <nl> # include " mongo / db / jsobj . h " <nl> # include " mongo / db / repl / oplog_entry . h " <nl> - # include " mongo / db / repl / replication_executor . h " <nl> # include " mongo / db / service_context . h " <nl> + # include " mongo / executor / task_executor . h " <nl> # include " mongo / stdx / condition_variable . h " <nl> # include " mongo / stdx / functional . h " <nl> # include " mongo / stdx / mutex . h " <nl> class MultiApplier { <nl> * It is an error for ' operations ' to be empty but individual oplog entries <nl> * contained in ' operations ' are not validated . <nl> * / <nl> - MultiApplier ( ReplicationExecutor * executor , <nl> + MultiApplier ( executor : : TaskExecutor * executor , <nl> const Operations & operations , <nl> const ApplyOperationFn & applyOperation , <nl> const MultiApplyFn & multiApply , <nl> class MultiApplier { <nl> / * * <nl> * DB worker callback function - applies all operations . <nl> * / <nl> - void _callback ( const ReplicationExecutor : : CallbackArgs & cbd ) ; <nl> + void _callback ( const executor : : TaskExecutor : : CallbackArgs & cbd ) ; <nl> void _finishCallback ( const StatusWith < Timestamp > & result , const Operations & operations ) ; <nl> <nl> / / Not owned by us . <nl> - ReplicationExecutor * _executor ; <nl> + executor : : TaskExecutor * _executor ; <nl> <nl> Operations _operations ; <nl> ApplyOperationFn _applyOperation ; <nl> class MultiApplier { <nl> / / _active is true when MultiApplier is scheduled to be run by the executor . <nl> bool _active ; <nl> <nl> - ReplicationExecutor : : CallbackHandle _dbWorkCallbackHandle ; <nl> + executor : : TaskExecutor : : CallbackHandle _dbWorkCallbackHandle ; <nl> } ; <nl> <nl> <nl> class MultiApplier { <nl> using PauseDataReplicatorFn = stdx : : function < void ( ) > ; <nl> <nl> StatusWith < std : : pair < std : : unique_ptr < MultiApplier > , MultiApplier : : Operations > > applyUntilAndPause ( <nl> - ReplicationExecutor * executor , <nl> + executor : : TaskExecutor * executor , <nl> const MultiApplier : : Operations & operations , <nl> const MultiApplier : : ApplyOperationFn & applyOperation , <nl> const MultiApplier : : ApplyOperationFn & multiApply , <nl>
SERVER - 24784 migrated MultiApplier to use task executor instead of replication executor
mongodb/mongo
548115d4b81e9c90a021d3f21d4afea4bccb22d1
2016-06-29T02:07:14Z
new file mode 100644 <nl> index 000000000 . . d5217692c <nl> mmm / dev / null <nl> ppp b / . github / workflows / cmake . yml <nl> <nl> + name : CMake <nl> + <nl> + on : [ push ] <nl> + <nl> + env : <nl> + # Customize the CMake build type here ( Release , Debug , RelWithDebInfo , etc . ) <nl> + BUILD_TYPE : Release <nl> + <nl> + jobs : <nl> + build : <nl> + # The CMake configure and build commands are platform agnostic and should work equally <nl> + # well on Windows or Mac . You can convert this to a matrix build if you need <nl> + # cross - platform coverage . <nl> + # See : https : / / docs . github . com / en / free - pro - team @ latest / actions / learn - github - actions / managing - complex - workflows # using - a - build - matrix <nl> + runs - on : ubuntu - latest <nl> + <nl> + steps : <nl> + - uses : actions / checkout @ v2 <nl> + <nl> + - name : Create Build Environment <nl> + # Some projects don ' t allow in - source building , so create a separate build directory <nl> + # We ' ll use this as our working directory for all subsequent commands <nl> + run : cmake - E make_directory $ { { runner . workspace } } / build <nl> + <nl> + - name : Configure CMake <nl> + # Use a bash shell so we can use the same syntax for environment variable <nl> + # access regardless of the host operating system <nl> + shell : bash <nl> + working - directory : $ { { runner . workspace } } / build <nl> + # Note the current convention is to use the - S and - B options here to specify source <nl> + # and build directories , but this is only available with CMake 3 . 13 and higher . <nl> + # The CMake binaries on the Github Actions machines are ( as of this writing ) 3 . 12 <nl> + run : cmake $ GITHUB_WORKSPACE - DCMAKE_BUILD_TYPE = $ BUILD_TYPE <nl> + <nl> + - name : Build <nl> + working - directory : $ { { runner . workspace } } / build <nl> + shell : bash <nl> + # Execute the build . You can specify a specific target with " - - target < NAME > " <nl> + run : cmake - - build . - - config $ BUILD_TYPE <nl> + <nl> + - name : Test <nl> + working - directory : $ { { runner . workspace } } / build <nl> + shell : bash <nl> + # Execute tests defined by the CMake configuration . <nl> + # See https : / / cmake . org / cmake / help / latest / manual / ctest . 1 . html for more detail <nl> + run : ctest - C $ BUILD_TYPE <nl>
Create cmake . yml
fmtlib/fmt
1f4ff47b418cc78cfe26a21c503a9036e585b0db
2020-11-03T18:04:28Z
mmm a / hphp / runtime / base / execution - context . cpp <nl> ppp b / hphp / runtime / base / execution - context . cpp <nl> bool ExecutionContext : : callUserErrorHandler ( const Exception & e , int errnum , <nl> <nl> bool ExecutionContext : : onFatalError ( const Exception & e ) { <nl> MM ( ) . resetCouldOOM ( isStandardRequest ( ) ) ; <nl> + ThreadInfo : : s_threadInfo . getNoCheck ( ) - > m_reqInjectionData . resetTimer ( ) ; <nl> <nl> auto prefix = " \ nFatal error : " ; <nl> int errnum = static_cast < int > ( ErrorConstants : : ErrorModes : : FATAL_ERROR ) ; <nl> mmm a / hphp / runtime / base / program - functions . cpp <nl> ppp b / hphp / runtime / base / program - functions . cpp <nl> static void handle_exception_helper ( bool & ret , <nl> ContextOfException where , <nl> bool & error , <nl> bool richErrorMsg ) { <nl> + / / Clear oom / timeout while handling exception and restore them afterwards . <nl> + auto & data = ThreadInfo : : s_threadInfo . getNoCheck ( ) - > m_reqInjectionData ; <nl> + auto flags = data . getConditionFlags ( ) ; <nl> + auto origFlags = flags - > load ( ) & RequestInjectionData : : ResourceFlags ; <nl> + flags - > fetch_and ( ~ RequestInjectionData : : ResourceFlags ) ; <nl> + <nl> + SCOPE_EXIT { <nl> + flags - > fetch_or ( origFlags ) ; <nl> + } ; <nl> + <nl> try { <nl> bump_counter_and_rethrow ( false / * isPsp * / ) ; <nl> } catch ( const Eval : : DebuggerException & e ) { <nl> bool hphp_invoke ( ExecutionContext * context , const std : : string & cmd , <nl> } <nl> <nl> MM ( ) . resetCouldOOM ( isStandardRequest ( ) ) ; <nl> + ThreadInfo : : s_threadInfo . getNoCheck ( ) - > m_reqInjectionData . resetTimer ( ) ; <nl> <nl> LitstrTable : : get ( ) . setReading ( ) ; <nl> <nl> mmm a / hphp / runtime / base / request - injection - data . h <nl> ppp b / hphp / runtime / base / request - injection - data . h <nl> struct RequestInjectionData { <nl> / / flags that shouldn ' t be cleared by fetchAndClearFlags , because : <nl> / / fetchAndClearFlags is only supposed to touch flags related to PHP - visible <nl> / / signals / exceptions and resource limits <nl> + static const ssize_t ResourceFlags = RequestInjectionData : : MemExceededFlag | <nl> + RequestInjectionData : : TimedOutFlag | <nl> + RequestInjectionData : : CPUTimedOutFlag ; <nl> static const ssize_t StickyFlags = RequestInjectionData : : AsyncEventHookFlag | <nl> RequestInjectionData : : DebuggerHookFlag | <nl> RequestInjectionData : : EventHookFlag | <nl> RequestInjectionData : : InterceptFlag | <nl> - RequestInjectionData : : MemExceededFlag | <nl> - RequestInjectionData : : XenonSignalFlag ; <nl> + RequestInjectionData : : XenonSignalFlag | <nl> + RequestInjectionData : : ResourceFlags ; <nl> <nl> RequestInjectionData ( ) <nl> : cflagsPtr ( nullptr ) , <nl>
Make timeout flag sticky
facebook/hhvm
40f9d9547b3e13767575534686e5e1b061d2d029
2014-12-08T19:30:33Z
mmm a / stdlib / public / core / ThreadLocalStorage . swift <nl> ppp b / stdlib / public / core / ThreadLocalStorage . swift <nl> <nl> + / / = = = mmm ThreadLocalStorage . swift mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - = = = / / <nl> + / / <nl> + / / This source file is part of the Swift . org open source project <nl> + / / <nl> + / / Copyright ( c ) 2014 - 2017 Apple Inc . and the Swift project authors <nl> + / / Licensed under Apache License v2 . 0 with Runtime Library Exception <nl> + / / <nl> + / / See https : / / swift . org / LICENSE . txt for license information <nl> + / / See https : / / swift . org / CONTRIBUTORS . txt for the list of Swift project authors <nl> + / / <nl> + / / = = = mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - = = = / / <nl> + <nl> import SwiftShims <nl> <nl> / / For testing purposes , a thread - safe counter to guarantee that destructors get <nl>
[ gardening ] Add missing copyright header
apple/swift
1283248ebd228774a7829fa647b89b99da8a20b7
2017-05-24T19:27:28Z
mmm a / src / crankshaft / x87 / lithium - codegen - x87 . cc <nl> ppp b / src / crankshaft / x87 / lithium - codegen - x87 . cc <nl> void LCodeGen : : DoDeferredMaybeGrowElements ( LMaybeGrowElements * instr ) { <nl> <nl> LOperand * key = instr - > key ( ) ; <nl> if ( key - > IsConstantOperand ( ) ) { <nl> - __ mov ( ebx , ToImmediate ( key , Representation : : Smi ( ) ) ) ; <nl> + LConstantOperand * constant_key = LConstantOperand : : cast ( key ) ; <nl> + int32_t int_key = ToInteger32 ( constant_key ) ; <nl> + if ( Smi : : IsValid ( int_key ) ) { <nl> + __ mov ( ebx , Immediate ( Smi : : FromInt ( int_key ) ) ) ; <nl> + } else { <nl> + / / We should never get here at runtime because there is a smi check on <nl> + / / the key before this point . <nl> + __ int3 ( ) ; <nl> + } <nl> } else { <nl> __ Move ( ebx , ToRegister ( key ) ) ; <nl> __ SmiTag ( ebx ) ; <nl>
X87 : Bugfix : assert in lithium compile for LMaybeGrowElements .
v8/v8
4035d55c0c7090237371b39bffaffc91b61005b7
2016-04-18T02:12:02Z
mmm a / src / search_engine / engines / versions . txt <nl> ppp b / src / search_engine / engines / versions . txt <nl> btjunkie : 2 . 21 <nl> mininova : 1 . 40 <nl> piratebay : 1 . 30 <nl> vertor : 1 . 0 <nl> - torrentdownloads : 1 . 03 <nl> + torrentdownloads : 1 . 04 <nl>
Update version file
qbittorrent/qBittorrent
a150e24ce46c53e6d6674e8622ef29c2a6a01d07
2010-09-25T13:08:07Z
mmm a / samples / Cpp / TestCpp / Classes / RenderTextureTest / RenderTextureTest . cpp <nl> ppp b / samples / Cpp / TestCpp / Classes / RenderTextureTest / RenderTextureTest . cpp <nl> RenderTextureTestDepthStencil : : RenderTextureTestDepthStencil ( ) <nl> { <nl> auto s = Director : : getInstance ( ) - > getWinSize ( ) ; <nl> <nl> - auto sprite = Sprite : : create ( " Images / fire . png " ) ; <nl> - sprite - > setPosition ( Point ( s . width * 0 . 25f , 0 ) ) ; <nl> - sprite - > setScale ( 10 ) ; <nl> - auto rend = RenderTexture : : create ( s . width , s . height , Texture2D : : PixelFormat : : RGBA4444 , GL_DEPTH24_STENCIL8 ) ; <nl> + _spriteDS = Sprite : : create ( " Images / fire . png " ) ; <nl> + _spriteDS - > retain ( ) ; <nl> + _spriteDS - > setPosition ( Point ( s . width * 0 . 25f , 0 ) ) ; <nl> + _spriteDS - > setScale ( 10 ) ; <nl> + <nl> + _spriteDraw = Sprite : : create ( " Images / fire . png " ) ; <nl> + _spriteDraw - > retain ( ) ; <nl> + _spriteDraw - > setPosition ( Point ( s . width * 0 . 25f , 0 ) ) ; <nl> + _spriteDraw - > setScale ( 10 ) ; <nl> + / / ! move sprite half width and height , and draw only where not marked <nl> + _spriteDraw - > setPosition ( _spriteDraw - > getPosition ( ) + Point ( _spriteDraw - > getContentSize ( ) . width * _spriteDraw - > getScale ( ) * 0 . 5 , _spriteDraw - > getContentSize ( ) . height * _spriteDraw - > getScale ( ) * 0 . 5 ) ) ; <nl> + <nl> + _rend = RenderTexture : : create ( s . width , s . height , Texture2D : : PixelFormat : : RGBA4444 , GL_DEPTH24_STENCIL8 ) ; <nl> + <nl> + _rend - > setPosition ( Point ( s . width * 0 . 5f , s . height * 0 . 5f ) ) ; <nl> + <nl> + this - > addChild ( _rend ) ; <nl> + } <nl> + <nl> + RenderTextureTestDepthStencil : : ~ RenderTextureTestDepthStencil ( ) <nl> + { <nl> + CC_SAFE_RELEASE ( _spriteDraw ) ; <nl> + CC_SAFE_RELEASE ( _spriteDS ) ; <nl> + } <nl> <nl> + void RenderTextureTestDepthStencil : : draw ( ) <nl> + { <nl> _renderCmds [ 0 ] . init ( 0 , _vertexZ ) ; <nl> _renderCmds [ 0 ] . func = CC_CALLBACK_0 ( RenderTextureTestDepthStencil : : onBeforeClear , this ) ; <nl> Director : : getInstance ( ) - > getRenderer ( ) - > addCommand ( & _renderCmds [ 0 ] ) ; <nl> <nl> - rend - > beginWithClear ( 0 , 0 , 0 , 0 , 0 , 0 ) ; <nl> + _rend - > beginWithClear ( 0 , 0 , 0 , 0 , 0 , 0 ) ; <nl> <nl> _renderCmds [ 1 ] . init ( 0 , _vertexZ ) ; <nl> _renderCmds [ 1 ] . func = CC_CALLBACK_0 ( RenderTextureTestDepthStencil : : onBeforeStencil , this ) ; <nl> Director : : getInstance ( ) - > getRenderer ( ) - > addCommand ( & _renderCmds [ 1 ] ) ; <nl> <nl> - sprite - > visit ( ) ; <nl> - <nl> - / / ! move sprite half width and height , and draw only where not marked <nl> - sprite - > setPosition ( sprite - > getPosition ( ) + Point ( sprite - > getContentSize ( ) . width * sprite - > getScale ( ) * 0 . 5 , sprite - > getContentSize ( ) . height * sprite - > getScale ( ) * 0 . 5 ) ) ; <nl> - <nl> + _spriteDS - > visit ( ) ; <nl> + <nl> _renderCmds [ 2 ] . init ( 0 , _vertexZ ) ; <nl> _renderCmds [ 2 ] . func = CC_CALLBACK_0 ( RenderTextureTestDepthStencil : : onBeforDraw , this ) ; <nl> Director : : getInstance ( ) - > getRenderer ( ) - > addCommand ( & _renderCmds [ 2 ] ) ; <nl> <nl> - sprite - > visit ( ) ; <nl> - <nl> - rend - > end ( ) ; <nl> + _spriteDraw - > visit ( ) ; <nl> + <nl> + _rend - > end ( ) ; <nl> <nl> _renderCmds [ 3 ] . init ( 0 , _vertexZ ) ; <nl> _renderCmds [ 3 ] . func = CC_CALLBACK_0 ( RenderTextureTestDepthStencil : : onAfterDraw , this ) ; <nl> Director : : getInstance ( ) - > getRenderer ( ) - > addCommand ( & _renderCmds [ 3 ] ) ; <nl> <nl> - rend - > setPosition ( Point ( s . width * 0 . 5f , s . height * 0 . 5f ) ) ; <nl> - <nl> - this - > addChild ( rend ) ; <nl> } <nl> <nl> void RenderTextureTestDepthStencil : : onBeforeClear ( ) <nl> void RenderTextureTestDepthStencil : : onBeforeClear ( ) <nl> void RenderTextureTestDepthStencil : : onBeforeStencil ( ) <nl> { <nl> / / ! mark sprite quad into stencil buffer <nl> - / / glEnable ( GL_STENCIL_TEST ) ; <nl> + glEnable ( GL_STENCIL_TEST ) ; <nl> glStencilFunc ( GL_NEVER , 1 , 0xFF ) ; <nl> glStencilOp ( GL_REPLACE , GL_REPLACE , GL_REPLACE ) ; <nl> } <nl> void RenderTextureTestDepthStencil : : onBeforDraw ( ) <nl> <nl> void RenderTextureTestDepthStencil : : onAfterDraw ( ) <nl> { <nl> - / / glDisable ( GL_STENCIL_TEST ) ; <nl> + glDisable ( GL_STENCIL_TEST ) ; <nl> } <nl> <nl> std : : string RenderTextureTestDepthStencil : : title ( ) const <nl> mmm a / samples / Cpp / TestCpp / Classes / RenderTextureTest / RenderTextureTest . h <nl> ppp b / samples / Cpp / TestCpp / Classes / RenderTextureTest / RenderTextureTest . h <nl> class RenderTextureTestDepthStencil : public RenderTextureTest <nl> public : <nl> CREATE_FUNC ( RenderTextureTestDepthStencil ) ; <nl> RenderTextureTestDepthStencil ( ) ; <nl> + virtual ~ RenderTextureTestDepthStencil ( ) ; <nl> virtual std : : string title ( ) const override ; <nl> virtual std : : string subtitle ( ) const override ; <nl> + virtual void draw ( ) override ; <nl> private : <nl> CustomCommand _renderCmds [ 4 ] ; <nl> void onBeforeClear ( ) ; <nl> void onBeforeStencil ( ) ; <nl> void onBeforDraw ( ) ; <nl> void onAfterDraw ( ) ; <nl> + <nl> + private : <nl> + RenderTexture * _rend ; <nl> + Sprite * _spriteDS ; <nl> + Sprite * _spriteDraw ; <nl> } ; <nl> <nl> class RenderTextureTargetNode : public RenderTextureTest <nl>
fix RenderTextureTestDepthStencil test sample
cocos2d/cocos2d-x
8d9f62731e4a7308c500184f25952efd531d83cf
2013-12-31T06:48:07Z
mmm a / swoole_http_client . c <nl> ppp b / swoole_http_client . c <nl> static PHP_METHOD ( swoole_http_client , on ) <nl> { <nl> zend_update_property ( swoole_http_client_class_entry_ptr , getThis ( ) , ZEND_STRL ( " onClose " ) , zcallback TSRMLS_CC ) ; <nl> hcc - > onClose = sw_zend_read_property ( swoole_http_client_class_entry_ptr , getThis ( ) , ZEND_STRL ( " onClose " ) , 0 TSRMLS_CC ) ; <nl> - sw_copy_to_stack ( hcc - > onClose , hcc - > onClose ) ; <nl> + sw_copy_to_stack ( hcc - > onClose , hcc - > _onClose ) ; <nl> } <nl> else if ( strncasecmp ( " message " , cb_name , cb_name_len ) = = 0 ) <nl> { <nl>
fix swoole_http_client callback name error .
swoole/swoole-src
d7524d0ebc64211414b8172ed77711a55203ea55
2016-05-10T02:41:04Z
mmm a / dbms / tests / queries / 0_stateless / 00926_zookeeper_adaptive_index_granularity_replicated_merge_tree . sql <nl> ppp b / dbms / tests / queries / 0_stateless / 00926_zookeeper_adaptive_index_granularity_replicated_merge_tree . sql <nl> SELECT distinct ( marks ) from system . parts WHERE table = ' zero_rows_per_granule2 ' <nl> <nl> SELECT distinct ( marks ) from system . parts WHERE table = ' zero_rows_per_granule1 ' and database = ' test ' and active = 1 ; <nl> <nl> - SELECT sleep ( 0 . 5 ) Format Null ; <nl> + SELECT sleep ( 0 . 7 ) Format Null ; <nl> <nl> OPTIMIZE TABLE test . zero_rows_per_granule2 FINAL ; <nl> <nl> SELECT distinct ( marks ) from system . parts WHERE table = ' four_rows_per_granule2 ' <nl> <nl> SELECT distinct ( marks ) from system . parts WHERE table = ' four_rows_per_granule1 ' and database = ' test ' and active = 1 ; <nl> <nl> - SELECT sleep ( 0 . 5 ) Format Null ; <nl> + SELECT sleep ( 0 . 7 ) Format Null ; <nl> <nl> OPTIMIZE TABLE test . four_rows_per_granule2 FINAL ; <nl> <nl> ATTACH TABLE test . adaptive_granularity_alter1 ; <nl> <nl> INSERT INTO test . adaptive_granularity_alter1 ( p , k , v1 , v2 ) VALUES ( ' 2018 - 05 - 15 ' , 100 , 1000 , ' aaaa ' ) , ( ' 2018 - 05 - 16 ' , 101 , 3000 , ' bbbb ' ) , ( ' 2018 - 05 - 17 ' , 102 , 5000 , ' cccc ' ) , ( ' 2018 - 05 - 19 ' , 103 , 7000 , ' dddd ' ) ; <nl> <nl> - SELECT sleep ( 0 . 5 ) Format Null ; <nl> + SELECT sleep ( 0 . 7 ) Format Null ; <nl> <nl> OPTIMIZE TABLE test . adaptive_granularity_alter1 FINAL ; <nl> <nl> mmm a / dbms / tests / queries / 0_stateless / 00933_ttl_simple . sql <nl> ppp b / dbms / tests / queries / 0_stateless / 00933_ttl_simple . sql <nl> create table ttl_00933_1 ( d DateTime , a Int ttl toDateTime ( 1 ) ) engine = MergeTre <nl> create table ttl_00933_1 ( d DateTime , a Int ttl d - d ) engine = MergeTree order by tuple ( ) partition by toSecond ( d ) ; - - { serverError 450 } <nl> <nl> drop table if exists ttl_00933_1 ; <nl> - <nl> - / * <nl> - <nl> - Alexey Milovidov , [ 17 . 04 . 19 20 : 09 ] <nl> - sleep ( 0 . 7 ) <nl> - sleep ( 1 . 1 ) <nl> - - почему ? @ Alesapin <nl> - <nl> - Alexander Sapin , [ 17 . 04 . 19 23 : 16 ] <nl> - [ In reply to Alexey Milovidov ] <nl> - 1 . 1 по логике теста , я попробовал с 0 . 5 и у меня флапнуло . С 1 не флапало , но работало долго . Попробовал 0 . 7 и тоже не флапает . <nl> - <nl> - Alexey Milovidov , [ 17 . 04 . 19 23 : 18 ] <nl> - Слабо такой комментарий добавить прямо в тест ? : ) <nl> - <nl> - Alexander Sapin , [ 17 . 04 . 19 23 : 20 ] <nl> - как - то неловко : ) <nl> - <nl> - * / <nl>
Increase sleep time in flappy test and remove jokes
ClickHouse/ClickHouse
7a6365dbf1436e82ddd2bd1c3642040a95e88afe
2019-04-22T10:56:10Z
mmm a / src / share / core_configuration / profile / simple_modifications . hpp <nl> ppp b / src / share / core_configuration / profile / simple_modifications . hpp <nl> <nl> <nl> class simple_modifications final { <nl> public : <nl> + class definition final { <nl> + public : <nl> + definition ( const std : : string & type , <nl> + const std : : string & value ) : type_ ( type ) , <nl> + value_ ( value ) { <nl> + } <nl> + <nl> + nlohmann : : json to_json ( void ) const { <nl> + return nlohmann : : json ( { <nl> + { type_ , value_ } , <nl> + } ) ; <nl> + } <nl> + <nl> + const std : : string & get_type ( void ) const { <nl> + return type_ ; <nl> + } <nl> + <nl> + void set_type ( const std : : string & value ) { <nl> + type_ = value ; <nl> + } <nl> + <nl> + const std : : string & get_value ( void ) const { <nl> + return value_ ; <nl> + } <nl> + <nl> + void set_value ( const std : : string & value ) { <nl> + value_ = value ; <nl> + } <nl> + <nl> + bool valid ( void ) const { <nl> + return ! type_ . empty ( ) & & <nl> + ! value_ . empty ( ) ; <nl> + } <nl> + <nl> + bool compare ( const definition & other ) const { <nl> + if ( type_ ! = other . type_ ) { <nl> + return SI : : natural : : compare < std : : string > ( type_ , other . type_ ) ; <nl> + } else { <nl> + return SI : : natural : : compare < std : : string > ( value_ , other . value_ ) ; <nl> + } <nl> + } <nl> + <nl> + bool operator = = ( const definition & other ) const { <nl> + / / Do not compare ` from_mandatory_modifiers_ ` . <nl> + return type_ = = other . type_ & & <nl> + value_ = = other . value_ ; <nl> + } <nl> + <nl> + private : <nl> + std : : string type_ ; <nl> + std : : string value_ ; <nl> + } ; <nl> + <nl> simple_modifications ( const nlohmann : : json & json ) { <nl> if ( json . is_object ( ) ) { <nl> for ( auto it = json . begin ( ) ; it ! = json . end ( ) ; + + it ) { <nl>
add simple_modifications : : definition
pqrs-org/Karabiner-Elements
6dba39389ea16098076ce298e754face33b518a5
2017-08-27T10:46:27Z
mmm a / jstests / where1 . js <nl> ppp b / jstests / where1 . js <nl> <nl> <nl> - db = connect ( " test " ) ; <nl> t = db . getCollection ( " where1 " ) ; <nl> t . drop ( ) ; <nl> <nl> t . save ( { a : 1 } ) ; <nl> t . save ( { a : 2 } ) ; <nl> t . save ( { a : 3 } ) ; <nl> <nl> - assert . eq ( 1 , t . find ( function ( ) { return this . a = = 2 ; } ) . length ( ) ) ; <nl> + assert . eq ( 1 , t . find ( function ( ) { return this . a = = 2 ; } ) . length ( ) , " A " ) ; <nl> + <nl> + assert . eq ( 1 , t . find ( { $ where : " this . a = = 2 " } ) . toArray ( ) . length , " B " ) ; <nl> + <nl> + assert . eq ( 1 , t . find ( " this . a = = 2 " ) . toArray ( ) . length , " C " ) ; <nl> mmm a / shell / MongoJS . cpp <nl> ppp b / shell / MongoJS . cpp <nl> Local < v8 : : Object > mongoToV8 ( BSONObj & m , bool array ) { <nl> void v8ToMongoElement ( BSONObjBuilder & b , v8 : : Handle < v8 : : String > name , const string sname , v8 : : Handle < v8 : : Value > value ) { <nl> <nl> if ( value - > IsString ( ) ) { <nl> - b . append ( sname . c_str ( ) , toSTLString ( value ) . c_str ( ) ) ; <nl> + if ( sname = = " $ where " ) <nl> + b . appendCode ( sname . c_str ( ) , toSTLString ( value ) . c_str ( ) ) ; <nl> + else <nl> + b . append ( sname . c_str ( ) , toSTLString ( value ) . c_str ( ) ) ; <nl> return ; <nl> } <nl> <nl> mmm a / shell / collection . js <nl> ppp b / shell / collection . js <nl> DBCollection . prototype . _massageObject = function ( q ) { <nl> if ( q . length = = 24 ) <nl> return { _id : q } ; <nl> <nl> - throw " don ' t know how to handle string [ " + q + " ] " ; <nl> + return { $ where : q } ; <nl> } <nl> <nl> throw " don ' t know how to massage : " + type ; <nl>
$ where fix
mongodb/mongo
4a01d610adee3412673a8326c4fbe5261e85db2b
2009-01-29T20:58:54Z
mmm a / test / intl / break - iterator / subclass . js <nl> ppp b / test / intl / break - iterator / subclass . js <nl> <nl> / / Use of this source code is governed by a BSD - style license that can be <nl> / / found in the LICENSE file . <nl> <nl> - / / Flags : - - harmony - intl - list - format <nl> - <nl> var locales = [ " tlh " , " id " , " en " ] ; <nl> var input = " foo and bar " ; <nl> var refBreakIterator = new Intl . v8BreakIterator ( locales ) ; <nl> mmm a / test / intl / list - format / constructor . js <nl> ppp b / test / intl / list - format / constructor . js <nl> <nl> / / Use of this source code is governed by a BSD - style license that can be <nl> / / found in the LICENSE file . <nl> <nl> - / / Flags : - - harmony - intl - list - format <nl> - <nl> / / ListFormat constructor can ' t be called as function . <nl> assertThrows ( ( ) = > Intl . ListFormat ( [ ' sr ' ] ) , TypeError ) ; <nl> <nl> mmm a / test / intl / list - format / format - en . js <nl> ppp b / test / intl / list - format / format - en . js <nl> <nl> / / Use of this source code is governed by a BSD - style license that can be <nl> / / found in the LICENSE file . <nl> <nl> - / / Flags : - - harmony - intl - list - format <nl> - <nl> / / The following test are not part of the comformance . Just some output in <nl> / / English to verify the format does return something reasonable for English . <nl> / / It may be changed when we update the CLDR data . <nl> mmm a / test / intl / list - format / format - to - parts . js <nl> ppp b / test / intl / list - format / format - to - parts . js <nl> <nl> / / Use of this source code is governed by a BSD - style license that can be <nl> / / found in the LICENSE file . <nl> <nl> - / / Flags : - - harmony - intl - list - format <nl> - <nl> function assertListFormat ( listFormat , input ) { <nl> var result ; <nl> try { <nl> mmm a / test / intl / list - format / format . js <nl> ppp b / test / intl / list - format / format . js <nl> <nl> / / Use of this source code is governed by a BSD - style license that can be <nl> / / found in the LICENSE file . <nl> <nl> - / / Flags : - - harmony - intl - list - format <nl> - <nl> function assertListFormat ( listFormat , input ) { <nl> try { <nl> let result = listFormat . format ( input ) ; <nl> mmm a / test / intl / list - format / formatToParts - zh . js <nl> ppp b / test / intl / list - format / formatToParts - zh . js <nl> <nl> / / Use of this source code is governed by a BSD - style license that can be <nl> / / found in the LICENSE file . <nl> <nl> - / / Flags : - - harmony - intl - list - format <nl> - <nl> / / The following test are not part of the comformance . Just some output in <nl> / / Chinese to verify the format does return something reasonable for Chinese . <nl> / / It may be changed when we update the CLDR data . <nl> mmm a / test / intl / list - format / resolved - options . js <nl> ppp b / test / intl / list - format / resolved - options . js <nl> <nl> / / Use of this source code is governed by a BSD - style license that can be <nl> / / found in the LICENSE file . <nl> <nl> - / / Flags : - - harmony - intl - list - format <nl> - <nl> let listFormat = new Intl . ListFormat ( ) ; <nl> / / The default style is ' long ' <nl> assertEquals ( ' long ' , listFormat . resolvedOptions ( ) . style ) ; <nl> mmm a / test / intl / list - format / supported - locale . js <nl> ppp b / test / intl / list - format / supported - locale . js <nl> <nl> / / Use of this source code is governed by a BSD - style license that can be <nl> / / found in the LICENSE file . <nl> <nl> - / / Flags : - - harmony - intl - list - format <nl> assertEquals ( typeof Intl . ListFormat . supportedLocalesOf , " function " , <nl> " Intl . ListFormat . supportedLocalesOf should be a function " ) ; <nl> <nl> mmm a / test / intl / regress - 8031 . js <nl> ppp b / test / intl / regress - 8031 . js <nl> <nl> / / Use of this source code is governed by a BSD - style license that can be <nl> / / found in the LICENSE file . <nl> <nl> - / / Flags : - - harmony - intl - list - format <nl> - <nl> var locales = [ " tlh " , " id " , " en " ] ; <nl> var input = [ " a " , " b " , " c " ] ; <nl> var referenceListFormat = new Intl . ListFormat ( locales ) ; <nl> mmm a / test / message / fail / list - format - style - narrow . js <nl> ppp b / test / message / fail / list - format - style - narrow . js <nl> <nl> / / Copyright 2015 the V8 project authors . All rights reserved . <nl> / / Use of this source code is governed by a BSD - style license that can be <nl> / / found in the LICENSE file . <nl> - / / <nl> - / / Flags : - - harmony - intl - list - format <nl> - <nl> new Intl . ListFormat ( " en " , { style : ' narrow ' } ) <nl> mmm a / test / message / fail / list - format - style - narrow . out <nl> ppp b / test / message / fail / list - format - style - narrow . out <nl> <nl> - * % ( basename ) s : 7 : RangeError : When style is ' narrow ' , ' unit ' is the only allowed value for the type option . <nl> + * % ( basename ) s : 4 : RangeError : When style is ' narrow ' , ' unit ' is the only allowed value for the type option . <nl> new Intl . ListFormat ( " en " , { style : ' narrow ' } ) <nl> ^ <nl> RangeError : When style is ' narrow ' , ' unit ' is the only allowed value for the type option . <nl> at new ListFormat ( < anonymous > ) <nl> - at * % ( basename ) s : 7 : 1 <nl> + at * % ( basename ) s : 4 : 1 <nl> <nl> <nl> mmm a / test / test262 / testcfg . py <nl> ppp b / test / test262 / testcfg . py <nl> <nl> ' String . prototype . matchAll ' : ' - - harmony - string - matchall ' , <nl> ' Symbol . matchAll ' : ' - - harmony - string - matchall ' , <nl> ' numeric - separator - literal ' : ' - - harmony - numeric - separator ' , <nl> - ' Intl . ListFormat ' : ' - - harmony - intl - list - format ' , <nl> ' Intl . Locale ' : ' - - harmony - locale ' , <nl> ' Intl . Segmenter ' : ' - - harmony - intl - segmenter ' , <nl> ' Symbol . prototype . description ' : ' - - harmony - symbol - description ' , <nl>
[ Intl ] Remove - - harmony - intl - list - format flag from test
v8/v8
6ee9ec5ca15492825cab87df4a26e091691a8869
2019-02-13T23:22:43Z
mmm a / tools / editor / io_plugins / editor_font_import_plugin . cpp <nl> ppp b / tools / editor / io_plugins / editor_font_import_plugin . cpp <nl> class EditorFontImportDialog : public ConfirmationDialog { <nl> return ; <nl> } <nl> <nl> + if ( dest - > get_line_edit ( ) - > get_text ( ) . get_file ( ) = = " . fnt " ) { <nl> + dest - > get_line_edit ( ) - > set_text ( dest - > get_line_edit ( ) - > get_text ( ) . get_base_dir ( ) + " / " + source - > get_line_edit ( ) - > get_text ( ) . get_file ( ) . basename ( ) + " . fnt " ) ; <nl> + } <nl> + <nl> Ref < ResourceImportMetadata > rimd = get_rimd ( ) ; <nl> <nl> if ( rimd . is_null ( ) ) { <nl>
Set default destination filename of imported font to be input font filename
godotengine/godot
6ffe1fff2df61a0b6648eea673bd693ea60c85ef
2015-12-04T18:29:27Z
mmm a / emcc . py <nl> ppp b / emcc . py <nl> def check ( input_file ) : <nl> value = ' " ' + value + ' " ' <nl> else : <nl> value = value . replace ( ' \ \ ' , ' \ \ \ \ ' ) <nl> - exec ( ' shared . Settings . ' + key + ' = ' + value , globals ( ) , locals ( ) ) <nl> + setattr ( shared . Settings , key , eval ( value ) ) <nl> if key = = ' EXPORTED_FUNCTIONS ' : <nl> # used for warnings in emscripten . py <nl> shared . Settings . ORIGINAL_EXPORTED_FUNCTIONS = original_exported_response or shared . Settings . EXPORTED_FUNCTIONS [ : ] <nl>
Use eval and setattr instead of exec ( )
emscripten-core/emscripten
63f446fc5f82a5642b80b9cab7258cd16b363e42
2017-10-18T22:28:39Z
mmm a / include / mlir / Dialect / StandardOps / Ops . td <nl> ppp b / include / mlir / Dialect / StandardOps / Ops . td <nl> def BranchOp : Std_Op < " br " , [ Terminator ] > { <nl> / / / Erase the operand at ' index ' from the operand list . <nl> void eraseOperand ( unsigned index ) ; <nl> } ] ; <nl> + <nl> + let hasCanonicalizer = 1 ; <nl> } <nl> <nl> def CallOp : Std_Op < " call " , [ CallOpInterface ] > { <nl> mmm a / include / mlir / IR / PatternMatch . h <nl> ppp b / include / mlir / IR / PatternMatch . h <nl> class PatternRewriter : public OpBuilder { <nl> / / / This method erases an operation that is known to have no uses . <nl> virtual void eraseOp ( Operation * op ) ; <nl> <nl> + / / / Merge the operations of block ' source ' into the end of block ' dest ' . <nl> + / / / ' source ' s predecessors must either be empty or only contain ' dest ` . <nl> + / / / ' argValues ' is used to replace the block arguments of ' source ' after <nl> + / / / merging . <nl> + virtual void mergeBlocks ( Block * source , Block * dest , <nl> + ArrayRef < Value * > argValues = llvm : : None ) ; <nl> + <nl> / / / Split the operations starting at " before " ( inclusive ) out of the given <nl> / / / block into a new block , and return it . <nl> - virtual Block * splitBlock ( Block * block , Block : : iterator before ) { <nl> - return block - > splitBlock ( before ) ; <nl> - } <nl> + virtual Block * splitBlock ( Block * block , Block : : iterator before ) ; <nl> <nl> / / / This method is used as the final notification hook for patterns that end <nl> / / / up modifying the pattern root in place , by changing its operands . This is <nl> mmm a / include / mlir / Transforms / DialectConversion . h <nl> ppp b / include / mlir / Transforms / DialectConversion . h <nl> class ConversionPatternRewriter final : public PatternRewriter { <nl> / / / PatternRewriter hook for splitting a block into two parts . <nl> Block * splitBlock ( Block * block , Block : : iterator before ) override ; <nl> <nl> + / / / PatternRewriter hook for merging a block into another . <nl> + void mergeBlocks ( Block * source , Block * dest , <nl> + ArrayRef < Value * > argValues ) override ; <nl> + <nl> / / / PatternRewriter hook for moving blocks out of a region . <nl> void inlineRegionBefore ( Region & region , Region & parent , <nl> Region : : iterator before ) override ; <nl> mmm a / lib / Dialect / StandardOps / Ops . cpp <nl> ppp b / lib / Dialect / StandardOps / Ops . cpp <nl> void AllocOp : : getCanonicalizationPatterns ( OwningRewritePatternList & results , <nl> / / BranchOp <nl> / / = = = mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - = = = / / <nl> <nl> + namespace { <nl> + / / / Simplify a branch to a block that has a single predecessor . This effectively <nl> + / / / merges the two blocks . <nl> + struct SimplifyBrToBlockWithSinglePred : public OpRewritePattern < BranchOp > { <nl> + using OpRewritePattern < BranchOp > : : OpRewritePattern ; <nl> + <nl> + PatternMatchResult matchAndRewrite ( BranchOp op , <nl> + PatternRewriter & rewriter ) const override { <nl> + / / Check that the successor block has a single predecessor . <nl> + Block * succ = op . getDest ( ) ; <nl> + Block * opParent = op . getOperation ( ) - > getBlock ( ) ; <nl> + if ( succ = = opParent | | ! has_single_element ( succ - > getPredecessors ( ) ) ) <nl> + return matchFailure ( ) ; <nl> + <nl> + / / Merge the successor into the current block and erase the branch . <nl> + rewriter . mergeBlocks ( succ , opParent , llvm : : to_vector < 1 > ( op . getOperands ( ) ) ) ; <nl> + rewriter . eraseOp ( op ) ; <nl> + return matchSuccess ( ) ; <nl> + } <nl> + } ; <nl> + } / / end anonymous namespace . <nl> + <nl> static ParseResult parseBranchOp ( OpAsmParser & parser , OperationState & result ) { <nl> Block * dest ; <nl> SmallVector < Value * , 4 > destOperands ; <nl> void BranchOp : : eraseOperand ( unsigned index ) { <nl> getOperation ( ) - > eraseSuccessorOperand ( 0 , index ) ; <nl> } <nl> <nl> + void BranchOp : : getCanonicalizationPatterns ( OwningRewritePatternList & results , <nl> + MLIRContext * context ) { <nl> + results . insert < SimplifyBrToBlockWithSinglePred > ( context ) ; <nl> + } <nl> + <nl> / / = = = mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - = = = / / <nl> / / CallOp <nl> / / = = = mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - = = = / / <nl> mmm a / lib / IR / PatternMatch . cpp <nl> ppp b / lib / IR / PatternMatch . cpp <nl> void PatternRewriter : : eraseOp ( Operation * op ) { <nl> op - > erase ( ) ; <nl> } <nl> <nl> + / / / Merge the operations of block ' source ' into the end of block ' dest ' . <nl> + / / / ' source ' s predecessors must be empty or only contain ' dest ` . <nl> + / / / ' argValues ' is used to replace the block arguments of ' source ' after <nl> + / / / merging . <nl> + void PatternRewriter : : mergeBlocks ( Block * source , Block * dest , <nl> + ArrayRef < Value * > argValues ) { <nl> + assert ( llvm : : all_of ( source - > getPredecessors ( ) , <nl> + [ dest ] ( Block * succ ) { return succ = = dest ; } ) & & <nl> + " expected ' source ' to have no predecessors or only ' dest ' " ) ; <nl> + assert ( argValues . size ( ) = = source - > getNumArguments ( ) & & <nl> + " incorrect # of argument replacement values " ) ; <nl> + <nl> + / / Replace all of the successor arguments with the provided values . <nl> + for ( auto it : llvm : : zip ( source - > getArguments ( ) , argValues ) ) <nl> + std : : get < 0 > ( it ) - > replaceAllUsesWith ( std : : get < 1 > ( it ) ) ; <nl> + <nl> + / / Splice the operations of the ' source ' block into the ' dest ' block and erase <nl> + / / it . <nl> + dest - > getOperations ( ) . splice ( dest - > end ( ) , source - > getOperations ( ) ) ; <nl> + source - > dropAllUses ( ) ; <nl> + source - > erase ( ) ; <nl> + } <nl> + <nl> + / / / Split the operations starting at " before " ( inclusive ) out of the given <nl> + / / / block into a new block , and return it . <nl> + Block * PatternRewriter : : splitBlock ( Block * block , Block : : iterator before ) { <nl> + return block - > splitBlock ( before ) ; <nl> + } <nl> + <nl> / / / op and newOp are known to have the same number of results , replace the <nl> / / / uses of op with uses of newOp <nl> void PatternRewriter : : replaceOpWithResultsOfAnotherOp ( <nl> mmm a / lib / Transforms / DialectConversion . cpp <nl> ppp b / lib / Transforms / DialectConversion . cpp <nl> Block * ConversionPatternRewriter : : splitBlock ( Block * block , <nl> return continuation ; <nl> } <nl> <nl> + / / / PatternRewriter hook for merging a block into another . <nl> + void ConversionPatternRewriter : : mergeBlocks ( Block * source , Block * dest , <nl> + ArrayRef < Value * > argValues ) { <nl> + / / TODO ( riverriddle ) This requires fixing the implementation of <nl> + / / ' replaceUsesOfBlockArgument ' , which currently isn ' t undoable . <nl> + llvm_unreachable ( " block merging updates are currently not supported " ) ; <nl> + } <nl> + <nl> / / / PatternRewriter hook for moving blocks out of a region . <nl> void ConversionPatternRewriter : : inlineRegionBefore ( Region & region , <nl> Region & parent , <nl> mmm a / test / Transforms / canonicalize . mlir <nl> ppp b / test / Transforms / canonicalize . mlir <nl> func @ const_fold_propagate ( ) - > memref < ? x ? xf32 > { <nl> return % Av : memref < ? x ? xf32 > <nl> } <nl> <nl> + / / CHECK - LABEL : func @ br_folding <nl> + func @ br_folding ( ) - > i32 { <nl> + / / CHECK - NEXT : % [ [ CST : . * ] ] = constant 0 : i32 <nl> + / / CHECK - NEXT : return % [ [ CST ] ] : i32 <nl> + % c0_i32 = constant 0 : i32 <nl> + br ^ bb1 ( % c0_i32 : i32 ) <nl> + ^ bb1 ( % x : i32 ) : <nl> + return % x : i32 <nl> + } <nl> + <nl> / / CHECK - LABEL : func @ cond_br_folding <nl> - func @ cond_br_folding ( % a : i32 ) { <nl> + func @ cond_br_folding ( % cond : i1 , % a : i32 ) { <nl> % false_cond = constant 0 : i1 <nl> % true_cond = constant 1 : i1 <nl> + cond_br % cond , ^ bb1 , ^ bb2 ( % a : i32 ) <nl> + <nl> + ^ bb1 : <nl> + / / CHECK : ^ bb1 : <nl> + / / CHECK - NEXT : br ^ bb3 <nl> + cond_br % true_cond , ^ bb3 , ^ bb2 ( % a : i32 ) <nl> + <nl> + ^ bb2 ( % x : i32 ) : <nl> + / / CHECK : ^ bb2 <nl> + / / CHECK : br ^ bb3 <nl> + cond_br % false_cond , ^ bb2 ( % x : i32 ) , ^ bb3 <nl> <nl> - / / CHECK - NEXT : br ^ bb1 ( % arg0 : i32 ) <nl> - cond_br % true_cond , ^ bb1 ( % a : i32 ) , ^ bb2 <nl> + ^ bb3 : <nl> + return <nl> + } <nl> + <nl> + / / CHECK - LABEL : func @ cond_br_and_br_folding <nl> + func @ cond_br_and_br_folding ( % a : i32 ) { <nl> + / / Test the compound folding of conditional and unconditional branches . <nl> + / / CHECK - NEXT : return <nl> + <nl> + % false_cond = constant 0 : i1 <nl> + % true_cond = constant 1 : i1 <nl> + cond_br % true_cond , ^ bb2 , ^ bb1 ( % a : i32 ) <nl> <nl> ^ bb1 ( % x : i32 ) : <nl> - / / CHECK : br ^ bb2 <nl> cond_br % false_cond , ^ bb1 ( % x : i32 ) , ^ bb2 <nl> <nl> ^ bb2 : <nl>
Add a PatternRewriter hook to merge blocks , and use it to support for folding branches .
tensorflow/tensorflow
951a54dd13a7da5e86d8fa71d794f2014ed3369f
2019-11-05T19:57:38Z
mmm a / fdbclient / SystemData . cpp <nl> ppp b / fdbclient / SystemData . cpp <nl> const KeyRef restoreRequestDoneKey = LiteralStringRef ( " \ xff \ x02 / restoreRequestDo <nl> const KeyRangeRef restoreRequestKeys ( LiteralStringRef ( " \ xff \ x02 / restoreRequests / " ) , <nl> LiteralStringRef ( " \ xff \ x02 / restoreRequests0 " ) ) ; <nl> <nl> + const KeyRangeRef restoreApplierKeys ( LiteralStringRef ( " \ xff \ x02 / restoreApplier / " ) , <nl> + LiteralStringRef ( " \ xff \ x02 / restoreApplier0 " ) ) ; <nl> + const KeyRef restoreApplierTxnValue = LiteralStringRef ( " 1 " ) ; <nl> + <nl> + / / restoreApplierKeys : track atomic transaction progress to ensure applying atomicOp exactly once <nl> + const Key restoreApplierKeyFor ( UID const & applierID , Version version ) { <nl> + BinaryWriter wr ( Unversioned ( ) ) ; <nl> + wr . serializeBytes ( restoreWorkersKeys . begin ) ; <nl> + wr < < applierID < < version ; <nl> + return wr . toValue ( ) ; <nl> + } <nl> + <nl> / / Encode restore worker key for workerID <nl> const Key restoreWorkerKeyFor ( UID const & workerID ) { <nl> BinaryWriter wr ( Unversioned ( ) ) ; <nl> mmm a / fdbclient / SystemData . h <nl> ppp b / fdbclient / SystemData . h <nl> extern const KeyRef restoreStatusKey ; / / To be used when we measure fast restore <nl> extern const KeyRef restoreRequestTriggerKey ; <nl> extern const KeyRef restoreRequestDoneKey ; <nl> extern const KeyRangeRef restoreRequestKeys ; <nl> + extern const KeyRangeRef restoreApplierKeys ; <nl> + extern const KeyRef restoreApplierTxnValue ; <nl> + <nl> + const Key restoreApplierKeyFor ( UID const & applierID , Version version ) ; <nl> const Key restoreWorkerKeyFor ( UID const & workerID ) ; <nl> const Value restoreWorkerInterfaceValue ( RestoreWorkerInterface const & server ) ; <nl> RestoreWorkerInterface decodeRestoreWorkerInterfaceValue ( ValueRef const & value ) ; <nl> mmm a / fdbserver / RestoreApplier . actor . cpp <nl> ppp b / fdbserver / RestoreApplier . actor . cpp <nl> ACTOR static Future < Void > handleSendMutationVectorRequest ( RestoreSendMutationVec <nl> return Void ( ) ; <nl> } <nl> <nl> + <nl> + / / Progress and checkpoint for applying ( atomic ) mutations in transactions to DB <nl> + struct DBApplyProgress { <nl> + / / Mutation state in the current uncommitted transaction <nl> + VersionedMutationsMap : : iterator curItInCurTxn ; <nl> + int curIndexInCurTxn ; <nl> + <nl> + / / Save the starting point for current txn to handle ( commit_unknown_result ) error in txn commit <nl> + / / startItInUncommittedTxn is starting iterator in the most recent uncommitted ( and failed ) txn <nl> + / / startIndexInUncommittedTxn is start index in the most recent uncommitted ( and failed ) txn . <nl> + / / Note : Txns have different number of mutations <nl> + VersionedMutationsMap : : iterator startItInUncommittedTxn ; <nl> + int startIndexInUncommittedTxn ; <nl> + <nl> + / / State to decide if a txn succeeds or not when txn error ( commit_unknown_result ) happens ; <nl> + / / curTxnId : The id of the current uncommitted txn , which monotonically increase for each successful transaction <nl> + / / uncommittedTxnId : The id of the most recent succeeded txn . Used to recover the failed txn id in retry <nl> + / / lastTxnHasError : Does the last txn has error . TODO : Only need to handle txn_commit_unknown error <nl> + Version curTxnId ; <nl> + Version uncommittedTxnId ; <nl> + bool lastTxnHasError ; <nl> + <nl> + / / Decide when to commit a transaction . We buffer enough mutations in a txn before commit the txn <nl> + bool startNextVersion ; / / The next txn will include mutations in next version <nl> + int numAtomicOps ; <nl> + double transactionSize ; <nl> + <nl> + Reference < RestoreApplierData > self ; <nl> + <nl> + DBApplyProgress ( ) = default ; <nl> + DBApplyProgress ( Reference < RestoreApplierData > self ) <nl> + : self ( self ) , curIndexInCurTxn ( 0 ) , startIndexInUncommittedTxn ( 0 ) , curTxnId ( 0 ) , uncommittedTxnId ( 0 ) , <nl> + lastTxnHasError ( false ) , startNextVersion ( false ) , numAtomicOps ( 0 ) , transactionSize ( 0 ) { <nl> + curItInCurTxn = self - > kvOps . begin ( ) ; <nl> + while ( curItInCurTxn ! = self - > kvOps . end ( ) & & curItInCurTxn - > second . empty ( ) ) { <nl> + curItInCurTxn + + ; <nl> + } <nl> + startItInUncommittedTxn = curItInCurTxn ; <nl> + } <nl> + <nl> + / / Has all mutations been committed ? <nl> + bool isDone ( ) { return curItInCurTxn = = self - > kvOps . end ( ) ; } <nl> + <nl> + / / Set cursor for next mutation <nl> + void nextMutation ( ) { <nl> + curIndexInCurTxn + + ; <nl> + while ( curItInCurTxn ! = self - > kvOps . end ( ) & & curIndexInCurTxn > = curItInCurTxn - > second . size ( ) ) { <nl> + curIndexInCurTxn = 0 ; <nl> + curItInCurTxn + + ; <nl> + startNextVersion = true ; <nl> + } <nl> + } <nl> + <nl> + / / Setup for the next transaction ; This should be done after nextMutation ( ) <nl> + void nextTxn ( ) { <nl> + transactionSize = 0 ; <nl> + numAtomicOps = 0 ; <nl> + lastTxnHasError = false ; <nl> + startNextVersion = false ; <nl> + <nl> + curTxnId + + ; <nl> + <nl> + startIndexInUncommittedTxn = curIndexInCurTxn ; <nl> + startItInUncommittedTxn = curItInCurTxn ; <nl> + uncommittedTxnId = curTxnId ; <nl> + } <nl> + <nl> + / / Rollback to the starting point of the uncommitted - and - failed transaction to <nl> + / / re - execute uncommitted txn <nl> + void rollback ( ) { <nl> + TraceEvent ( SevWarn , " FastRestore_ApplyTxnError " ) <nl> + . detail ( " TxnStatusFailed " , curTxnId ) <nl> + . detail ( " ApplierApplyToDB " , self - > id ( ) ) <nl> + . detail ( " UncommittedTxnId " , uncommittedTxnId ) <nl> + . detail ( " CurIteratorVersion " , curItInCurTxn - > first ) <nl> + . detail ( " StartIteratorVersionInUncommittedTxn " , startItInUncommittedTxn - > first ) <nl> + . detail ( " CurrentIndexInFailedTxn " , curIndexInCurTxn ) <nl> + . detail ( " StartIndexInUncommittedTxn " , startIndexInUncommittedTxn ) <nl> + . detail ( " NumIncludedAtomicOps " , numAtomicOps ) ; <nl> + curItInCurTxn = startItInUncommittedTxn ; <nl> + curIndexInCurTxn = startIndexInUncommittedTxn ; <nl> + curTxnId = uncommittedTxnId ; <nl> + <nl> + numAtomicOps = 0 ; <nl> + transactionSize = 0 ; <nl> + startNextVersion = false ; <nl> + lastTxnHasError = false ; <nl> + } <nl> + <nl> + bool shouldCommit ( ) { <nl> + / / TODO : Change transactionSize > 0 to transactionSize > opConfig . transactionBatchSizeThreshold to batch <nl> + / / mutations in a txn <nl> + return ( ! lastTxnHasError & & ( startNextVersion | | transactionSize > 0 | | curItInCurTxn = = self - > kvOps . end ( ) ) ) ; <nl> + } <nl> + <nl> + bool hasError ( ) { return lastTxnHasError ; } <nl> + <nl> + void setTxnError ( Error & e ) { <nl> + TraceEvent ( SevWarnAlways , " FastRestore_ApplyTxnError " ) <nl> + . detail ( " TxnStatus " , " ? " ) <nl> + . detail ( " ApplierApplyToDB " , self - > id ( ) ) <nl> + . detail ( " TxnId " , curTxnId ) <nl> + . detail ( " StartIndexInCurrentTxn " , curIndexInCurTxn ) <nl> + . detail ( " Version " , curItInCurTxn - > first ) <nl> + . error ( e , true ) ; <nl> + lastTxnHasError = true ; <nl> + } <nl> + <nl> + MutationRef getCurrentMutation ( ) { <nl> + ASSERT_WE_THINK ( curIndexInCurTxn < curItInCurTxn - > second . size ( ) ) ; <nl> + return curItInCurTxn - > second [ curIndexInCurTxn ] ; <nl> + } <nl> + } ; <nl> + <nl> ACTOR Future < Void > applyToDB ( Reference < RestoreApplierData > self , Database cx ) { <nl> state std : : string typeStr = " " ; <nl> <nl> / / Assume the process will not crash when it apply mutations to DB . The reply message can be lost though <nl> if ( self - > kvOps . empty ( ) ) { <nl> - TraceEvent ( " FastRestore " ) . detail ( " ApplierApplyToDBEmpty " , self - > id ( ) ) ; <nl> + TraceEvent ( " FastRestore_ApplierTxn " ) <nl> + . detail ( " ApplierApplyToDBFinished " , self - > id ( ) ) <nl> + . detail ( " Reason " , " EmptyVersionMutation " ) ; <nl> return Void ( ) ; <nl> } <nl> ASSERT_WE_THINK ( self - > kvOps . size ( ) ) ; <nl> - std : : map < Version , Standalone < VectorRef < MutationRef > > > : : iterator begin = self - > kvOps . begin ( ) ; <nl> TraceEvent ( " FastRestore " ) <nl> . detail ( " ApplierApplyToDB " , self - > id ( ) ) <nl> - . detail ( " FromVersion " , begin - > first ) <nl> + . detail ( " FromVersion " , self - > kvOps . begin ( ) - > first ) <nl> . detail ( " EndVersion " , self - > kvOps . rbegin ( ) - > first ) ; <nl> <nl> self - > sanityCheckMutationOps ( ) ; <nl> <nl> - state std : : map < Version , Standalone < VectorRef < MutationRef > > > : : iterator it = self - > kvOps . begin ( ) ; <nl> - state std : : map < Version , Standalone < VectorRef < MutationRef > > > : : iterator prevIt = it ; <nl> - state int index = 0 ; <nl> - state int prevIndex = index ; <nl> - state int count = 0 ; <nl> + state DBApplyProgress progress ( self ) ; <nl> + <nl> + if ( progress . isDone ( ) ) { <nl> + TraceEvent ( " FastRestore_ApplierTxn " ) <nl> + . detail ( " ApplierApplyToDBFinished " , self - > id ( ) ) <nl> + . detail ( " Reason " , " NoMutationAtVersions " ) ; <nl> + return Void ( ) ; <nl> + } <nl> + <nl> state Reference < ReadYourWritesTransaction > tr ( new ReadYourWritesTransaction ( cx ) ) ; <nl> - state int numVersion = 0 ; <nl> - state double transactionSize = 0 ; <nl> - loop { <nl> + <nl> + loop { / / Transaction retry loop <nl> try { <nl> - tr - > reset ( ) ; <nl> - tr - > setOption ( FDBTransactionOptions : : ACCESS_SYSTEM_KEYS ) ; <nl> - tr - > setOption ( FDBTransactionOptions : : LOCK_AWARE ) ; <nl> - transactionSize = 0 ; <nl> - <nl> - for ( ; it ! = self - > kvOps . end ( ) ; + + it ) { <nl> - numVersion + + ; <nl> - / / TraceEvent ( " FastRestore " ) . detail ( " Applier " , self - > id ( ) ) . detail ( " ApplyKVsToDBVersion " , it - > first ) ; <nl> - state MutationRef m ; <nl> - for ( ; index < it - > second . size ( ) ; + + index ) { <nl> - m = it - > second [ index ] ; <nl> + / / Check if the transaction succeeds <nl> + if ( progress . hasError ( ) ) { <nl> + tr - > reset ( ) ; <nl> + tr - > setOption ( FDBTransactionOptions : : ACCESS_SYSTEM_KEYS ) ; <nl> + tr - > setOption ( FDBTransactionOptions : : LOCK_AWARE ) ; <nl> + Optional < Value > txnSucceeded = wait ( tr - > get ( restoreApplierKeyFor ( self - > id ( ) , progress . curTxnId ) ) ) ; <nl> + if ( ! txnSucceeded . present ( ) ) { <nl> + progress . rollback ( ) ; <nl> + continue ; <nl> + } else { <nl> + TraceEvent ( SevWarn , " FastRestore_ApplyTxnError " ) <nl> + . detail ( " TxnStatusSucceeded " , progress . curTxnId ) <nl> + . detail ( " ApplierApplyToDB " , self - > id ( ) ) <nl> + . detail ( " CurIteratorVersion " , progress . curItInCurTxn - > first ) <nl> + . detail ( " CurrentIteratorMutations " , progress . curItInCurTxn - > second . size ( ) ) <nl> + . detail ( " CurrentIndexInSucceedTxn " , progress . curIndexInCurTxn ) <nl> + . detail ( " NumIncludedAtomicOps " , progress . numAtomicOps ) ; <nl> + / / Txn succeeded and exectue the same logic when txn succeeds <nl> + } <nl> + } else { / / ! lastTxnHasError : accumulate mutations in a txn <nl> + tr - > reset ( ) ; <nl> + tr - > setOption ( FDBTransactionOptions : : ACCESS_SYSTEM_KEYS ) ; <nl> + tr - > setOption ( FDBTransactionOptions : : LOCK_AWARE ) ; <nl> + TraceEvent ( " FastRestore_ApplierTxn " ) <nl> + . detail ( " ApplierApplyToDB " , self - > id ( ) ) <nl> + . detail ( " TxnId " , progress . curTxnId ) <nl> + . detail ( " StartIndexInCurrentTxn " , progress . curIndexInCurTxn ) <nl> + . detail ( " CurrentIteratorMutations " , progress . curItInCurTxn - > second . size ( ) ) <nl> + . detail ( " Version " , progress . curItInCurTxn - > first ) ; <nl> + <nl> + / / restoreApplierKeyFor ( self - > id ( ) , curTxnId ) to tell if txn succeeds at an unknown error <nl> + tr - > set ( restoreApplierKeyFor ( self - > id ( ) , progress . curTxnId ) , restoreApplierTxnValue ) ; <nl> + <nl> + while ( 1 ) { / / Loop : Accumulate mutations in a transaction <nl> + MutationRef m = progress . getCurrentMutation ( ) ; <nl> + <nl> if ( m . type > = MutationRef : : Type : : SetValue & & m . type < = MutationRef : : Type : : MAX_ATOMIC_OP ) { <nl> typeStr = typeString [ m . type ] ; <nl> - } <nl> - else { <nl> + } else { <nl> TraceEvent ( SevError , " FastRestore " ) . detail ( " InvalidMutationType " , m . type ) ; <nl> } <nl> <nl> + / / TraceEvent ( SevDebug , " FastRestore_Debug " ) . detail ( " ApplierApplyToDB " , self - > describeNode ( ) ) . detail ( " Version " , it - > first ) . detail ( " Mutation " , m . toString ( ) ) ; <nl> if ( m . type = = MutationRef : : SetValue ) { <nl> tr - > set ( m . param1 , m . param2 ) ; <nl> } else if ( m . type = = MutationRef : : ClearRange ) { <nl> ACTOR Future < Void > applyToDB ( Reference < RestoreApplierData > self , Database cx ) { <nl> tr - > clear ( mutationRange ) ; <nl> } else if ( isAtomicOp ( ( MutationRef : : Type ) m . type ) ) { <nl> tr - > atomicOp ( m . param1 , m . param2 , m . type ) ; <nl> + progress . numAtomicOps + + ; <nl> } else { <nl> TraceEvent ( SevError , " FastRestore " ) <nl> - . detail ( " UnhandledMutationType " , m . type ) <nl> - . detail ( " TypeName " , typeStr ) ; <nl> + . detail ( " UnhandledMutationType " , m . type ) <nl> + . detail ( " TypeName " , typeStr ) ; <nl> } <nl> - + + count ; <nl> - transactionSize + = m . expectedSize ( ) ; <nl> - <nl> - if ( transactionSize > = opConfig . transactionBatchSizeThreshold ) { / / commit per 1000 mutations <nl> - wait ( tr - > commit ( ) ) ; <nl> - tr - > reset ( ) ; <nl> - tr - > setOption ( FDBTransactionOptions : : ACCESS_SYSTEM_KEYS ) ; <nl> - tr - > setOption ( FDBTransactionOptions : : LOCK_AWARE ) ; <nl> - prevIt = it ; <nl> - prevIndex = index ; <nl> - transactionSize = 0 ; <nl> + <nl> + progress . transactionSize + = m . expectedSize ( ) ; <nl> + <nl> + if ( progress . transactionSize > = opConfig . transactionBatchSizeThreshold ) { / / commit per 512B <nl> + break ; / / Got enough mutation in the txn <nl> + } else { <nl> + progress . nextMutation ( ) ; <nl> + / / Mutations in the same transaction come from the same version <nl> + if ( progress . startNextVersion | | progress . isDone ( ) ) { <nl> + break ; <nl> + } <nl> } <nl> } <nl> + } / / ! lastTxnHasError <nl> <nl> - if ( transactionSize > 0 ) { / / the commit batch should NOT across versions <nl> - wait ( tr - > commit ( ) ) ; <nl> - tr - > reset ( ) ; <nl> - tr - > setOption ( FDBTransactionOptions : : ACCESS_SYSTEM_KEYS ) ; <nl> - tr - > setOption ( FDBTransactionOptions : : LOCK_AWARE ) ; <nl> - prevIt = it ; <nl> - prevIndex = index ; <nl> - transactionSize = 0 ; <nl> - } <nl> - index = 0 ; <nl> - } <nl> - / / Last transaction <nl> - if ( transactionSize > 0 ) { <nl> + / / Commit the txn and prepare the starting point for next txn <nl> + if ( progress . shouldCommit ( ) ) { <nl> wait ( tr - > commit ( ) ) ; <nl> } <nl> - break ; <nl> + / / Logic for a successful transaction : Update current txn info and uncommitted txn info <nl> + progress . nextMutation ( ) ; <nl> + if ( progress . isDone ( ) ) { / / Are all mutations processed ? <nl> + break ; <nl> + } <nl> + progress . nextTxn ( ) ; <nl> } catch ( Error & e ) { <nl> + TraceEvent ( SevWarnAlways , " FastRestore_ApplyTxnError " ) <nl> + . detail ( " TxnStatus " , " ? " ) <nl> + . detail ( " ApplierApplyToDB " , self - > id ( ) ) <nl> + . detail ( " TxnId " , progress . curTxnId ) <nl> + . detail ( " StartIndexInCurrentTxn " , progress . curIndexInCurTxn ) <nl> + . detail ( " Version " , progress . curItInCurTxn - > first ) <nl> + . error ( e , true ) ; <nl> + progress . lastTxnHasError = true ; <nl> + / / if ( e . code ( ) = = commit_unknown_result ) { <nl> + / / lastTxnHasError = true ; <nl> + / / } <nl> wait ( tr - > onError ( e ) ) ; <nl> - it = prevIt ; <nl> - index = prevIndex ; <nl> - transactionSize = 0 ; <nl> } <nl> } <nl> <nl> + TraceEvent ( " FastRestore_ApplierTxn " ) <nl> + . detail ( " ApplierApplyToDBFinished " , self - > id ( ) ) <nl> + . detail ( " CleanupCurTxnIds " , progress . curTxnId ) ; <nl> + / / House cleaning <nl> self - > kvOps . clear ( ) ; <nl> + / / clean up txn ids <nl> + loop { <nl> + try { <nl> + tr - > reset ( ) ; <nl> + tr - > setOption ( FDBTransactionOptions : : ACCESS_SYSTEM_KEYS ) ; <nl> + tr - > setOption ( FDBTransactionOptions : : LOCK_AWARE ) ; <nl> + tr - > clear ( KeyRangeRef ( restoreApplierKeyFor ( self - > id ( ) , 0 ) , <nl> + restoreApplierKeyFor ( self - > id ( ) , progress . curTxnId + 1 ) ) ) ; <nl> + wait ( tr - > commit ( ) ) ; <nl> + break ; <nl> + } catch ( Error & e ) { <nl> + wait ( tr - > onError ( e ) ) ; <nl> + } <nl> + } <nl> + TraceEvent ( " FastRestore_ApplierTxn " ) . detail ( " ApplierApplyToDBFinished " , self - > id ( ) ) ; <nl> <nl> return Void ( ) ; <nl> } <nl>
Merge pull request from xumengpanda / mengxu / fast - restore - applyToDB - PR
apple/foundationdb
67c5cb96f368160342292a4858a03d7c1004e021
2019-10-15T03:13:39Z
mmm a / torch / distributions . py <nl> ppp b / torch / distributions . py <nl> def log_prob ( self , value ) : <nl> class Categorical ( Distribution ) : <nl> r " " " <nl> Creates a categorical distribution parameterized by ` probs ` . <nl> - <nl> + <nl> . . note : : <nl> It is equivalent to the distribution that ` ` multinomial ( ) ` ` samples from . <nl> <nl>
flake8 fix
pytorch/pytorch
2c39f3de9908903365d9d956b1464896d553e40e
2017-11-18T23:32:02Z
mmm a / Jenkinsfile <nl> ppp b / Jenkinsfile <nl> try { <nl> } <nl> } <nl> } , <nl> + ' CPU : Clang 3 . 9 ' : { <nl> + node ( ' mxnetlinux - cpu ' ) { <nl> + ws ( ' workspace / build - cpu - clang ' ) { <nl> + init_git ( ) <nl> + def flag = " " " \ <nl> + USE_PROFILER = 1 \ <nl> + USE_CPP_PACKAGE = 1 \ <nl> + USE_BLAS = openblas \ <nl> + USE_OPENMP = 0 \ <nl> + CXX = clang + + - 3 . 9 \ <nl> + CC = clang - 3 . 9 \ <nl> + - j \ $ ( nproc ) <nl> + " " " <nl> + make ( " cpu_clang " , flag ) <nl> + pack_lib ( ' cpu_clang ' ) <nl> + } <nl> + } <nl> + } , <nl> + ' CPU : Clang 5 ' : { <nl> + node ( ' mxnetlinux - cpu ' ) { <nl> + ws ( ' workspace / build - cpu - clang ' ) { <nl> + init_git ( ) <nl> + def flag = " " " \ <nl> + USE_PROFILER = 1 \ <nl> + USE_CPP_PACKAGE = 1 \ <nl> + USE_BLAS = openblas \ <nl> + USE_OPENMP = 1 \ <nl> + CXX = clang + + - 5 . 0 \ <nl> + CC = clang - 5 . 0 \ <nl> + - j \ $ ( nproc ) <nl> + " " " <nl> + make ( " cpu_clang " , flag ) <nl> + pack_lib ( ' cpu_clang ' ) <nl> + } <nl> + } <nl> + } , <nl> ' CPU : MKLML ' : { <nl> node ( ' mxnetlinux - cpu ' ) { <nl> ws ( ' workspace / build - mklml - cpu ' ) { <nl> mmm a / src / io / image_io . cc <nl> ppp b / src / io / image_io . cc <nl> void Imread ( const nnvm : : NodeAttrs & attrs , <nl> Engine : : Get ( ) - > PushSync ( [ ndout , buff , fsize , param ] ( RunContext ctx ) { <nl> ImdecodeImpl ( param . flag , param . to_rgb , buff , fsize , <nl> const_cast < NDArray * > ( & ndout ) ) ; <nl> - delete buff ; <nl> + delete [ ] buff ; <nl> } , ndout . ctx ( ) , { } , { ndout . var ( ) } , <nl> FnProperty : : kNormal , 0 , PROFILER_MESSAGE ( " Imread " ) ) ; <nl> # else <nl> new file mode 100644 <nl> index 00000000000 . . 5bcdee563eb <nl> mmm / dev / null <nl> ppp b / tests / ci_build / Dockerfile . cpu_clang <nl> <nl> + FROM ubuntu : 16 . 04 <nl> + <nl> + COPY install / ubuntu_install_core . sh / install / <nl> + RUN / install / ubuntu_install_core . sh <nl> + COPY install / ubuntu_install_python . sh / install / <nl> + RUN / install / ubuntu_install_python . sh <nl> + COPY install / ubuntu_install_scala . sh / install / <nl> + RUN / install / ubuntu_install_scala . sh <nl> + COPY install / ubuntu_install_r . sh / install / <nl> + RUN / install / ubuntu_install_r . sh <nl> + COPY install / ubuntu_install_perl . sh / install / <nl> + RUN / install / ubuntu_install_perl . sh <nl> + <nl> + # Install clang 3 . 9 ( the same version as in XCode 8 . * ) and 5 . 0 ( latest major release ) <nl> + RUN wget - O - http : / / apt . llvm . org / llvm - snapshot . gpg . key | apt - key add - & & \ <nl> + apt - add - repository " deb http : / / apt . llvm . org / xenial / llvm - toolchain - xenial - 3 . 9 main " & & \ <nl> + apt - add - repository " deb http : / / apt . llvm . org / xenial / llvm - toolchain - xenial - 5 . 0 main " & & \ <nl> + apt - get update & & \ <nl> + apt - get install - y clang - 3 . 9 clang - 5 . 0 & & \ <nl> + clang - 3 . 9 - - version & & \ <nl> + clang - 5 . 0 - - version <nl>
Add clang jobs to the CI ( )
apache/incubator-mxnet
2b73aac527a3439ec0dc9b1e76c6df09ea347eb1
2018-01-11T16:33:34Z
mmm a / modules / imgproc / src / histogram . cpp <nl> ppp b / modules / imgproc / src / histogram . cpp <nl> calcHist_ ( vector < uchar * > & _ptrs , const vector < int > & _deltas , <nl> calcHist1D_Invoker < T > body ( _ptrs , _deltas , hist , _uniranges , size [ 0 ] , dims , imsize ) ; <nl> parallel_for ( BlockedRange ( 0 , imsize . height ) , body ) ; <nl> return ; <nl> - # endif <nl> + # else <nl> double a = uniranges [ 0 ] , b = uniranges [ 1 ] ; <nl> int sz = size [ 0 ] , d0 = deltas [ 0 ] , step0 = deltas [ 1 ] ; <nl> const T * p0 = ( const T * ) ptrs [ 0 ] ; <nl> calcHist_ ( vector < uchar * > & _ptrs , const vector < int > & _deltas , <nl> ( ( int * ) H ) [ idx ] + + ; <nl> } <nl> } <nl> + # endif / / HAVE_TBB <nl> } <nl> else if ( dims = = 2 ) <nl> { <nl> calcHist_ ( vector < uchar * > & _ptrs , const vector < int > & _deltas , <nl> calcHist2D_Invoker < T > body ( _ptrs , _deltas , hist , _uniranges , size , dims , imsize , hstep ) ; <nl> parallel_for ( BlockedRange ( 0 , imsize . height ) , body ) ; <nl> return ; <nl> - # endif <nl> + # else <nl> double a0 = uniranges [ 0 ] , b0 = uniranges [ 1 ] , a1 = uniranges [ 2 ] , b1 = uniranges [ 3 ] ; <nl> int sz0 = size [ 0 ] , sz1 = size [ 1 ] ; <nl> int d0 = deltas [ 0 ] , step0 = deltas [ 1 ] , <nl> calcHist_ ( vector < uchar * > & _ptrs , const vector < int > & _deltas , <nl> ( ( int * ) ( H + hstep0 * idx0 ) ) [ idx1 ] + + ; <nl> } <nl> } <nl> + # endif / / HAVE_TBB <nl> } <nl> else if ( dims = = 3 ) <nl> { <nl>
removing unreachible code
opencv/opencv
74c72a1d7a7c398dceeae7cf45a187046ffb1da8
2013-07-18T15:15:49Z
mmm a / src / mongo / db / repl / oplog_fetcher . cpp <nl> ppp b / src / mongo / db / repl / oplog_fetcher . cpp <nl> StatusWith < boost : : optional < rpc : : OplogQueryMetadata > > parseOplogQueryMetadata ( <nl> queryResponse . otherFields . metadata . hasElement ( rpc : : kOplogQueryMetadataFieldName ) ; <nl> if ( receivedOplogQueryMetadata ) { <nl> const auto & metadataObj = queryResponse . otherFields . metadata ; <nl> - / / Wall clock times are required in OplogQueryMetadata when FCV is 4 . 2 . Arbiters trivially <nl> - / / have FCV equal to 4 . 2 , so they are excluded from this check . <nl> - bool isArbiter = hasGlobalServiceContext ( ) & & <nl> - repl : : ReplicationCoordinator : : get ( getGlobalServiceContext ( ) ) & & <nl> - repl : : ReplicationCoordinator : : get ( getGlobalServiceContext ( ) ) - > getMemberState ( ) = = <nl> - MemberState : : RS_ARBITER ; <nl> - bool requireWallTime = <nl> - ( serverGlobalParams . featureCompatibility . isVersionInitialized ( ) & & <nl> - serverGlobalParams . featureCompatibility . getVersion ( ) = = <nl> - ServerGlobalParams : : FeatureCompatibility : : Version : : kFullyUpgradedTo42 & & <nl> - ! isArbiter ) ; <nl> auto metadataResult = <nl> - rpc : : OplogQueryMetadata : : readFromMetadata ( metadataObj , requireWallTime ) ; <nl> + rpc : : OplogQueryMetadata : : readFromMetadata ( metadataObj , true / * requireWallTime * / ) ; <nl> if ( ! metadataResult . isOK ( ) ) { <nl> return metadataResult . getStatus ( ) ; <nl> } <nl> StatusWith < BSONObj > OplogFetcher : : _onSuccessfulBatch ( const Fetcher : : QueryRespons <nl> queryResponse . otherFields . metadata . hasElement ( rpc : : kReplSetMetadataFieldName ) ; <nl> if ( receivedReplMetadata ) { <nl> const auto & metadataObj = queryResponse . otherFields . metadata ; <nl> - / / Wall clock times are required in ReplSetMetadata when FCV is 4 . 2 . Arbiters trivially <nl> - / / have FCV equal to 4 . 2 , so they are excluded from this check . <nl> - bool isArbiter = hasGlobalServiceContext ( ) & & <nl> - repl : : ReplicationCoordinator : : get ( getGlobalServiceContext ( ) ) & & <nl> - repl : : ReplicationCoordinator : : get ( getGlobalServiceContext ( ) ) - > getMemberState ( ) = = <nl> - MemberState : : RS_ARBITER ; <nl> - bool requireWallTime = <nl> - ( serverGlobalParams . featureCompatibility . isVersionInitialized ( ) & & <nl> - serverGlobalParams . featureCompatibility . getVersion ( ) = = <nl> - ServerGlobalParams : : FeatureCompatibility : : Version : : kFullyUpgradedTo42 & & <nl> - ! isArbiter ) ; <nl> - auto metadataResult = rpc : : ReplSetMetadata : : readFromMetadata ( metadataObj , requireWallTime ) ; <nl> + auto metadataResult = <nl> + rpc : : ReplSetMetadata : : readFromMetadata ( metadataObj , true / * requireWallTime * / ) ; <nl> if ( ! metadataResult . isOK ( ) ) { <nl> error ( ) < < " invalid replication metadata from sync source " < < _getSource ( ) < < " : " <nl> < < metadataResult . getStatus ( ) < < " : " < < metadataObj ; <nl> mmm a / src / mongo / db / repl / repl_set_commands . cpp <nl> ppp b / src / mongo / db / repl / repl_set_commands . cpp <nl> class CmdReplSetUpdatePosition : public ReplSetCommand { <nl> if ( cmdObj . hasField ( " handshake " ) ) <nl> return true ; <nl> <nl> - / / Wall clock times are required in ReplSetMetadata when FCV is 4 . 2 . Arbiters trivially <nl> - / / have FCV equal to 4 . 2 , so they are excluded from this check . <nl> - bool isArbiter = replCoord - > getMemberState ( ) = = MemberState : : RS_ARBITER ; <nl> - bool requireWallTime = <nl> - ( serverGlobalParams . featureCompatibility . isVersionInitialized ( ) & & <nl> - serverGlobalParams . featureCompatibility . getVersion ( ) = = <nl> - ServerGlobalParams : : FeatureCompatibility : : Version : : kFullyUpgradedTo42 & & <nl> - ! isArbiter ) ; <nl> - <nl> - auto metadataResult = rpc : : ReplSetMetadata : : readFromMetadata ( cmdObj , requireWallTime ) ; <nl> + auto metadataResult = <nl> + rpc : : ReplSetMetadata : : readFromMetadata ( cmdObj , true / * requireWallTime * / ) ; <nl> if ( metadataResult . isOK ( ) ) { <nl> / / New style update position command has metadata , which may inform the <nl> / / upstream of a higher term . <nl> class CmdReplSetUpdatePosition : public ReplSetCommand { <nl> <nl> UpdatePositionArgs args ; <nl> <nl> - / / re - check requireWallTime <nl> - requireWallTime = <nl> - ( serverGlobalParams . featureCompatibility . isVersionInitialized ( ) & & <nl> - serverGlobalParams . featureCompatibility . getVersion ( ) = = <nl> - ServerGlobalParams : : FeatureCompatibility : : Version : : kFullyUpgradedTo42 & & <nl> - ! isArbiter ) ; <nl> - status = args . initialize ( cmdObj , requireWallTime ) ; <nl> + status = args . initialize ( cmdObj , true / * requireWallTime * / ) ; <nl> if ( status . isOK ( ) ) { <nl> status = replCoord - > processReplSetUpdatePosition ( args , & configVersion ) ; <nl> <nl> mmm a / src / mongo / db / repl / repl_set_heartbeat_response . cpp <nl> ppp b / src / mongo / db / repl / repl_set_heartbeat_response . cpp <nl> Status ReplSetHeartbeatResponse : : initialize ( const BSONObj & doc , <nl> _durableWallTime = Date_t ( ) ; <nl> status = bsonExtractTypedField ( <nl> doc , kDurableWallTimeFieldName , BSONType : : Date , & durableWallTimeElement ) ; <nl> - if ( ! status . isOK ( ) & & ( status ! = ErrorCodes : : NoSuchKey | | requireWallTime ) ) { <nl> - / / We ignore NoSuchKey errors if the FeatureCompatibilityVersion is less than 4 . 2 , since <nl> - / / older version nodes may not report wall clock times . <nl> + if ( ! status . isOK ( ) ) { <nl> return status ; <nl> } <nl> if ( status . isOK ( ) ) { <nl> Status ReplSetHeartbeatResponse : : initialize ( const BSONObj & doc , <nl> _appliedWallTime = Date_t ( ) ; <nl> status = bsonExtractTypedField ( <nl> doc , kAppliedWallTimeFieldName , BSONType : : Date , & appliedWallTimeElement ) ; <nl> - if ( ! status . isOK ( ) & & ( status ! = ErrorCodes : : NoSuchKey | | requireWallTime ) ) { <nl> - / / We ignore NoSuchKey errors if the FeatureCompatibilityVersion is less than 4 . 2 , since <nl> - / / older version nodes may not report wall clock times . <nl> + if ( ! status . isOK ( ) ) { <nl> return status ; <nl> } <nl> if ( status . isOK ( ) ) { <nl> mmm a / src / mongo / db / repl / replication_coordinator_impl_heartbeat . cpp <nl> ppp b / src / mongo / db / repl / replication_coordinator_impl_heartbeat . cpp <nl> void ReplicationCoordinatorImpl : : _handleHeartbeatResponse ( <nl> BSONObj resp ; <nl> if ( responseStatus . isOK ( ) ) { <nl> resp = cbData . response . data ; <nl> - / / Wall clock times are required in ReplSetHeartbeatResponse when FCV is 4 . 2 . Arbiters <nl> - / / trivially have FCV equal to 4 . 2 , so they are excluded from this check . <nl> - bool isArbiter = _topCoord - > getMemberState ( ) = = MemberState : : RS_ARBITER ; <nl> - bool requireWallTime = <nl> - ( serverGlobalParams . featureCompatibility . isVersionInitialized ( ) & & <nl> - serverGlobalParams . featureCompatibility . getVersion ( ) = = <nl> - ServerGlobalParams : : FeatureCompatibility : : Version : : kFullyUpgradedTo42 & & <nl> - ! isArbiter ) ; <nl> - responseStatus = hbResponse . initialize ( resp , _topCoord - > getTerm ( ) , requireWallTime ) ; <nl> - StatusWith < rpc : : ReplSetMetadata > replMetadata = <nl> - rpc : : ReplSetMetadata : : readFromMetadata ( cbData . response . data , requireWallTime ) ; <nl> + responseStatus = <nl> + hbResponse . initialize ( resp , _topCoord - > getTerm ( ) , true / * requireWallTime * / ) ; <nl> + StatusWith < rpc : : ReplSetMetadata > replMetadata = rpc : : ReplSetMetadata : : readFromMetadata ( <nl> + cbData . response . data , true / * requireWallTime * / ) ; <nl> <nl> LOG_FOR_HEARTBEATS ( 2 ) < < " Received response to heartbeat ( requestId : " < < cbData . request . id <nl> < < " ) from " < < target < < " , " < < resp ; <nl> mmm a / src / mongo / rpc / metadata / repl_set_metadata . h <nl> ppp b / src / mongo / rpc / metadata / repl_set_metadata . h <nl> class ReplSetMetadata { <nl> * primaryIndex : 0 , <nl> * syncSourceIndex : 0 <nl> * } <nl> - * requireWallTime is only false if FCV is less than 4 . 2 or the wall clock time is not read from <nl> - * this particular ReplSetMetadata instance . <nl> + * requireWallTime is only false if the wall clock time is not read from this particular <nl> + * ReplSetMetadata instance . <nl> * / <nl> static StatusWith < ReplSetMetadata > readFromMetadata ( const BSONObj & doc , bool requireWallTime ) ; <nl> Status writeToMetadata ( BSONObjBuilder * builder ) const ; <nl>
SERVER - 42485 Remove FCV checks gating reporting wall clock times in Replication
mongodb/mongo
c1d1fda71c310e7d7a04ccbfbaaa2215b63963b6
2019-08-05T20:14:34Z
mmm a / tensorflow / core / framework / tensor . h <nl> ppp b / tensorflow / core / framework / tensor . h <nl> class Var ; <nl> <nl> namespace batch_util { <nl> Status CopyElementToSlice ( Tensor element , Tensor * parent , int64 index ) ; <nl> + Status CopySliceToElement ( const Tensor & parent , Tensor * element , int64 index ) ; <nl> + Status MaybeMoveSliceToElement ( Tensor * parent , Tensor * element , int64 index ) ; <nl> } / / namespace batch_util <nl> <nl> / / / @ ingroup core <nl> class Tensor { <nl> friend Status batch_util : : CopyElementToSlice ( <nl> Tensor element , Tensor * parent , <nl> int64 index ) ; / / For access to base < T > ( ) . <nl> + friend Status batch_util : : CopySliceToElement ( <nl> + const Tensor & parent , Tensor * element , <nl> + int64 index ) ; / / For access to base < T > ( ) . <nl> + friend Status batch_util : : MaybeMoveSliceToElement ( <nl> + Tensor * parent , Tensor * element , <nl> + int64 index ) ; / / For access to base < T > ( ) . <nl> <nl> bool CanUseDMA ( ) const ; <nl> <nl> mmm a / tensorflow / core / util / batch_util . cc <nl> ppp b / tensorflow / core / util / batch_util . cc <nl> Status HandleElementToSlice < Eigen : : half > ( const Tensor & / * element * / , <nl> return Status : : OK ( ) ; <nl> } <nl> <nl> - / / TODO ( b / 78245576 ) : Consider removing this overload . <nl> template < typename T > <nl> - void HandleSliceToElement ( const Tensor & parent , Tensor * element , int64 index ) { <nl> - element - > flat < T > ( ) = parent . flat_outer_dims < T > ( ) . chip ( index , 0 ) ; <nl> + void HandleSliceToElement ( const T * src , T * dest , int64 num_values ) { <nl> + static_assert ( is_simple_type < T > : : value , " Memcpy requires a simple type . " ) ; <nl> + memcpy ( dest , src , num_values * sizeof ( T ) ) ; <nl> + } <nl> + <nl> + template < > <nl> + void HandleSliceToElement < tstring > ( const tstring * src , tstring * dest , <nl> + int64 num_values ) { <nl> + std : : copy_n ( src , num_values , dest ) ; <nl> + } <nl> + <nl> + template < > <nl> + void HandleSliceToElement < Variant > ( const Variant * src , Variant * dest , <nl> + int64 num_values ) { <nl> + std : : copy_n ( src , num_values , dest ) ; <nl> + } <nl> + <nl> + template < > <nl> + void HandleSliceToElement < ResourceHandle > ( const ResourceHandle * src , <nl> + ResourceHandle * dest , <nl> + int64 num_values ) { <nl> + std : : copy_n ( src , num_values , dest ) ; <nl> + } <nl> + <nl> + template < > <nl> + void HandleSliceToElement < Eigen : : half > ( const Eigen : : half * src , <nl> + Eigen : : half * dest , int64 num_values ) { <nl> + std : : copy_n ( src , num_values , dest ) ; <nl> } <nl> <nl> template < typename T > <nl> - void HandleSliceToElement ( Tensor * parent , Tensor * element , int64 index ) { <nl> - element - > flat < T > ( ) = parent - > flat_outer_dims < T > ( ) . chip ( index , 0 ) ; <nl> + void HandleSliceToElement ( Tensor * parent , T * src , T * dest , int64 num_values ) { <nl> + static_assert ( is_simple_type < T > : : value , " Memcpy requires a simple type . " ) ; <nl> + memcpy ( dest , src , num_values * sizeof ( T ) ) ; <nl> } <nl> <nl> template < > <nl> - void HandleSliceToElement < tstring > ( Tensor * parent , Tensor * element , <nl> - int64 index ) { <nl> - auto parent_as_matrix = parent - > flat_outer_dims < tstring > ( ) ; <nl> - auto element_flat = element - > flat < tstring > ( ) ; <nl> + void HandleSliceToElement < tstring > ( Tensor * parent , tstring * src , tstring * dest , <nl> + int64 num_values ) { <nl> if ( parent - > RefCountIsOne ( ) ) { <nl> - for ( int64 i = 0 ; i < element - > NumElements ( ) ; + + i ) { <nl> - element_flat ( i ) = std : : move ( parent_as_matrix ( index , i ) ) ; <nl> + for ( int64 i = 0 ; i < num_values ; + + i ) { <nl> + dest [ i ] = std : : move ( src [ i ] ) ; <nl> } <nl> } else { <nl> - element_flat = parent_as_matrix . chip ( index , 0 ) ; <nl> + std : : copy_n ( src , num_values , dest ) ; <nl> } <nl> } <nl> <nl> template < > <nl> - void HandleSliceToElement < Variant > ( Tensor * parent , Tensor * element , <nl> - int64 index ) { <nl> - auto parent_as_matrix = parent - > flat_outer_dims < Variant > ( ) ; <nl> - auto element_flat = element - > flat < Variant > ( ) ; <nl> + void HandleSliceToElement < Variant > ( Tensor * parent , Variant * src , Variant * dest , <nl> + int64 num_values ) { <nl> if ( parent - > RefCountIsOne ( ) ) { <nl> - for ( int64 i = 0 ; i < element - > NumElements ( ) ; + + i ) { <nl> - element_flat ( i ) = std : : move ( parent_as_matrix ( index , i ) ) ; <nl> + for ( int64 i = 0 ; i < num_values ; + + i ) { <nl> + dest [ i ] = std : : move ( src [ i ] ) ; <nl> } <nl> } else { <nl> - element_flat = parent_as_matrix . chip ( index , 0 ) ; <nl> + std : : copy_n ( src , num_values , dest ) ; <nl> } <nl> } <nl> <nl> + template < > <nl> + void HandleSliceToElement < ResourceHandle > ( Tensor * parent , ResourceHandle * src , <nl> + ResourceHandle * dest , <nl> + int64 num_values ) { <nl> + std : : copy_n ( src , num_values , dest ) ; <nl> + } <nl> + <nl> + template < > <nl> + void HandleSliceToElement < Eigen : : half > ( Tensor * parent , Eigen : : half * src , <nl> + Eigen : : half * dest , int64 num_values ) { <nl> + std : : copy_n ( src , num_values , dest ) ; <nl> + } <nl> + <nl> } / / namespace <nl> <nl> / / Copies element into the index ^ th slice of parent ( in the 0th dimension ) . <nl> Status CopyElementToSlice ( Tensor element , Tensor * parent , int64 index ) { <nl> / / Copies the index ^ th slice of parent ( in the 0th dimension ) into element . <nl> Status CopySliceToElement ( const Tensor & parent , Tensor * element , int64 index ) { <nl> TF_RETURN_IF_ERROR ( ValidateInput ( parent , * element , index ) ) ; <nl> - <nl> - # define HANDLE_TYPE ( T ) \ <nl> - case DataTypeToEnum < T > : : value : { \ <nl> - HandleSliceToElement < T > ( parent , element , index ) ; \ <nl> - return Status : : OK ( ) ; \ <nl> + const int64 num_values = element - > NumElements ( ) ; <nl> + <nl> + # define HANDLE_TYPE ( T ) \ <nl> + case DataTypeToEnum < T > : : value : { \ <nl> + const T * src = parent . base < T > ( ) + ( num_values * index ) ; \ <nl> + T * dest = element - > base < T > ( ) ; \ <nl> + HandleSliceToElement < T > ( src , dest , num_values ) ; \ <nl> + return Status : : OK ( ) ; \ <nl> } <nl> <nl> switch ( parent . dtype ( ) ) { <nl> Status CopySliceToElement ( const Tensor & parent , Tensor * element , int64 index ) { <nl> / / This is particularly important for DT_STRING tensors . <nl> Status MaybeMoveSliceToElement ( Tensor * parent , Tensor * element , int64 index ) { <nl> TF_RETURN_IF_ERROR ( ValidateInput ( * parent , * element , index ) ) ; <nl> - <nl> - # define HANDLE_TYPE ( T ) \ <nl> - case DataTypeToEnum < T > : : value : { \ <nl> - HandleSliceToElement < T > ( parent , element , index ) ; \ <nl> - return Status : : OK ( ) ; \ <nl> + const int64 num_values = element - > NumElements ( ) ; <nl> + <nl> + # define HANDLE_TYPE ( T ) \ <nl> + case DataTypeToEnum < T > : : value : { \ <nl> + T * src = parent - > base < T > ( ) + ( num_values * index ) ; \ <nl> + T * dest = element - > base < T > ( ) ; \ <nl> + HandleSliceToElement < T > ( parent , src , dest , num_values ) ; \ <nl> + return Status : : OK ( ) ; \ <nl> } <nl> <nl> switch ( parent - > dtype ( ) ) { <nl>
[ tf . data ] Optimize ` from_tensor_slices ( ) ` and ` unbatch ( ) ` for tf . SparseTensor elements .
tensorflow/tensorflow
225f45b2d93a2269d3e9664326bc337ea99a87a0
2020-01-16T00:44:44Z
new file mode 100644 <nl> index 0000000000 . . 10c531f1dd <nl> mmm / dev / null <nl> ppp b / examples / coroutine / redis / serialize . php <nl> <nl> + < ? php <nl> + <nl> + go ( function ( ) { <nl> + $ redis = new \ Swoole \ Coroutine \ Redis ; <nl> + $ redis - > connect ( ' 127 . 0 . 0 . 1 ' , 6379 , true ) ; / / param3 is serialize <nl> + $ redis - > set ( ' foo ' , [ ' bar ' = > ' baz ' ] ) ; <nl> + $ ret = $ redis - > get ( ' foo ' ) ; <nl> + var_dump ( $ ret ) ; <nl> + } ) ; <nl> \ No newline at end of file <nl>
Add example of redis coro serialize .
swoole/swoole-src
5d3408132b31226945ddd677fade3949e94dd021
2018-07-19T04:11:14Z
mmm a / doc / CMakeLists . txt <nl> ppp b / doc / CMakeLists . txt <nl> if ( DOXYGEN_FOUND ) <nl> set ( tutorial_js_path " $ { CMAKE_CURRENT_SOURCE_DIR } / js_tutorials " ) <nl> set ( example_path " $ { CMAKE_SOURCE_DIR } / samples " ) <nl> <nl> + set ( doxygen_image_path <nl> + $ { CMAKE_CURRENT_SOURCE_DIR } / images <nl> + $ { paths_doc } <nl> + $ { tutorial_path } <nl> + $ { tutorial_py_path } <nl> + $ { tutorial_js_path } <nl> + $ { paths_tutorial } <nl> + # $ { OpenCV_SOURCE_DIR } / samples / data # TODO : need to resolve ambiguous conflicts first <nl> + $ { OpenCV_SOURCE_DIR } <nl> + $ { OpenCV_SOURCE_DIR } / modules # < opencv > / modules <nl> + $ { OPENCV_EXTRA_MODULES_PATH } # < opencv_contrib > / modules <nl> + $ { OPENCV_DOCS_EXTRA_IMAGE_PATH } # custom variable for user modules <nl> + ) <nl> + <nl> # set export variables <nl> string ( REPLACE " ; " " \ \ \ n " CMAKE_DOXYGEN_INPUT_LIST " $ { rootfile } ; $ { faqfile } ; $ { paths_include } ; $ { paths_hal_interface } ; $ { paths_doc } ; $ { tutorial_path } ; $ { tutorial_py_path } ; $ { tutorial_js_path } ; $ { paths_tutorial } ; $ { tutorial_contrib_root } " ) <nl> - string ( REPLACE " ; " " \ \ \ n " CMAKE_DOXYGEN_IMAGE_PATH " $ { CMAKE_CURRENT_SOURCE_DIR } / images ; $ { paths_doc } ; $ { tutorial_path } ; $ { tutorial_py_path } ; $ { tutorial_js_path } ; $ { paths_tutorial } " ) <nl> + string ( REPLACE " ; " " \ \ \ n " CMAKE_DOXYGEN_IMAGE_PATH " $ { doxygen_image_path } " ) <nl> string ( REPLACE " ; " " \ \ \ n " CMAKE_DOXYGEN_EXCLUDE_LIST " $ { CMAKE_DOXYGEN_EXCLUDE_LIST } " ) <nl> string ( REPLACE " ; " " " CMAKE_DOXYGEN_ENABLED_SECTIONS " $ { CMAKE_DOXYGEN_ENABLED_SECTIONS } " ) <nl> # TODO : remove paths_doc from EXAMPLE_PATH after face module tutorials / samples moved to separate folders <nl>
doxygen : adjust IMAGE_PATH , allow custom OPENCV_DOCS_EXTRA_IMAGE_PATH
opencv/opencv
a104e7c59368f6500c5a9083e1d577d48f9c54ee
2020-11-10T12:43:46Z
mmm a / hphp / runtime / ext / xdebug / ext_xdebug . cpp <nl> ppp b / hphp / runtime / ext / xdebug / ext_xdebug . cpp <nl> void XDebugExtension : : moduleLoad ( const IniSetting : : Map & ini , Hdf xdebug_hdf ) { <nl> <nl> auto debugger = xdebug_hdf [ " Eval " ] [ " Debugger " ] ; <nl> <nl> - / / Get everything as bools . <nl> - # define XDEBUG_OPT ( T , name , sym , val ) { \ <nl> - std : : string key = " XDebug " # sym ; \ <nl> - config_values [ # sym ] = Config : : GetBool ( ini , xdebug_hdf , \ <nl> - " Eval . Debugger . " + key , val ) ; \ <nl> + # define XDEBUG_OPT ( T , name , sym , val ) { \ <nl> + std : : string key = " XDebug " # sym ; \ <nl> + / * Only load the HDF value if it was specified , don ' t use the defaults . * / \ <nl> + if ( debugger . exists ( key ) ) { \ <nl> + if ( std : : is_same < T , bool > : : value ) { \ <nl> + config_values [ # sym ] = Config : : GetBool ( \ <nl> + ini , xdebug_hdf , " Eval . Debugger . " + key , val \ <nl> + ) ; \ <nl> + } else if ( std : : is_same < T , int > : : value ) { \ <nl> + config_values [ # sym ] = Config : : GetInt32 ( \ <nl> + ini , xdebug_hdf , " Eval . Debugger . " + key , val \ <nl> + ) ; \ <nl> + } \ <nl> + } \ <nl> } <nl> XDEBUG_HDF_CFG <nl> # undef XDEBUG_OPT <nl> <nl> - / / But patch up overload_var_dump since it ' s actually an int . <nl> - config_values [ " OverloadVarDump " ] = <nl> - Config : : GetInt32 ( ini , xdebug_hdf , " Eval . Debugger . XDebugOverloadVarDump " , 1 ) ; <nl> - <nl> / / XDebug is disabled by default . <nl> Config : : Bind ( Enable , ini , xdebug_hdf , " Eval . Debugger . XDebugEnable " , false ) ; <nl> <nl> mmm a / hphp / runtime / ext / xdebug / ext_xdebug . h <nl> ppp b / hphp / runtime / ext / xdebug / ext_xdebug . h <nl> struct XDebugServer ; <nl> XDEBUG_OPT ( std : : string , " remote_host " , RemoteHost , " localhost " ) \ <nl> XDEBUG_OPT ( std : : string , " remote_log " , RemoteLog , " " ) \ <nl> XDEBUG_OPT ( std : : string , " remote_mode " , RemoteMode , " req " ) \ <nl> - XDEBUG_OPT ( int , " remote_port " , RemotePort , 9000 ) \ <nl> XDEBUG_OPT ( double , " remote_timeout " , RemoteTimeout , 0 . 2 ) \ <nl> XDEBUG_OPT ( bool , " show_exception_trace " , ShowExcptionTrace , false ) \ <nl> XDEBUG_OPT ( bool , " show_local_vars " , ShowLocalVars , false ) \ <nl> struct XDebugServer ; <nl> XDEBUG_OPT ( int , " overload_var_dump " , OverloadVarDump , 1 ) \ <nl> XDEBUG_OPT ( bool , " remote_autostart " , RemoteAutostart , false ) \ <nl> XDEBUG_OPT ( bool , " remote_enable " , RemoteEnable , false ) \ <nl> + XDEBUG_OPT ( int , " remote_port " , RemotePort , 9000 ) \ <nl> <nl> / / xdebug . dump . * settings <nl> # define XDEBUG_DUMP_CFG \ <nl>
Add remote_port to the list of whitelisted hdf options
facebook/hhvm
fe9844f7927e6468e42216bcd6b34df2bdecb504
2016-01-27T01:30:45Z
mmm a / cocos2dx / platform / win32 / CCCommon . cpp <nl> ppp b / cocos2dx / platform / win32 / CCCommon . cpp <nl> void CCMessageBox ( const char * pszMsg , const char * pszTitle ) <nl> void CCLuaLog ( const char * pszMsg ) <nl> { <nl> int bufflen = MultiByteToWideChar ( CP_UTF8 , 0 , pszMsg , - 1 , NULL , 0 ) ; <nl> - + + bufflen ; <nl> - WCHAR * buff = new WCHAR [ bufflen ] ; <nl> - memset ( buff , 0 , sizeof ( WCHAR ) * bufflen ) ; <nl> - MultiByteToWideChar ( CP_UTF8 , 0 , pszMsg , - 1 , buff , bufflen - 1 ) ; <nl> + WCHAR * widebuff = new WCHAR [ bufflen + 1 ] ; <nl> + memset ( widebuff , 0 , sizeof ( WCHAR ) * ( bufflen + 1 ) ) ; <nl> + MultiByteToWideChar ( CP_UTF8 , 0 , pszMsg , - 1 , widebuff , bufflen ) ; <nl> <nl> - OutputDebugStringW ( buff ) ; <nl> + OutputDebugStringW ( widebuff ) ; <nl> OutputDebugStringA ( " \ n " ) ; <nl> <nl> - puts ( pszMsg ) ; <nl> + bufflen = WideCharToMultiByte ( CP_ACP , 0 , widebuff , - 1 , NULL , 0 , NULL , NULL ) ; <nl> + char * buff = new char [ bufflen + 1 ] ; <nl> + memset ( buff , 0 , sizeof ( char ) * ( bufflen + 1 ) ) ; <nl> + WideCharToMultiByte ( CP_ACP , 0 , widebuff , - 1 , buff , bufflen , NULL , NULL ) ; <nl> + puts ( buff ) ; <nl> + <nl> + delete widebuff ; <nl> + delete buff ; <nl> } <nl> <nl> NS_CC_END <nl> - <nl>
fix win32 CCLuaLog memory leaks , and invalid Console UTF8 output
cocos2d/cocos2d-x
01866ddc95c70dd29055ba44a2a2cfe892ee18f6
2012-10-31T03:02:36Z
mmm a / contracts / eosio . system / delegate_bandwith . hpp <nl> ppp b / contracts / eosio . system / delegate_bandwith . hpp <nl> namespace eosiosystem { <nl> <nl> require_auth ( del . from ) ; <nl> <nl> - del_bandwidth_index_type del_index ( SystemAccount , del . from ) ; <nl> - total_resources_index_type total_index ( SystemAccount , del . receiver ) ; <nl> - <nl> / / eosio_assert ( is_account ( del . receiver ) , " can only delegate resources to an existing account " ) ; <nl> <nl> auto parameters = eosio_parameters_singleton : : exists ( ) ? eosio_parameters_singleton : : get ( ) <nl> namespace eosiosystem { <nl> <nl> eosio_assert ( 0 < storage_bytes , " stake is too small to increase storage even by 1 byte " ) ; <nl> <nl> - print ( " delegatebw : from = " , del . from , " receiver = " , del . receiver , " \ n " ) ; <nl> + del_bandwidth_index_type del_index ( SystemAccount , del . from ) ; <nl> auto itr = del_index . find ( del . receiver ) ; <nl> - if ( itr ! = nullptr ) { <nl> + if ( itr = = nullptr ) { <nl> del_index . emplace ( del . from , [ & ] ( auto & dbo ) { <nl> dbo . from = del . from ; <nl> dbo . to = del . receiver ; <nl> namespace eosiosystem { <nl> } ) ; <nl> } <nl> <nl> + total_resources_index_type total_index ( SystemAccount , del . receiver ) ; <nl> auto tot_itr = total_index . find ( del . receiver ) ; <nl> if ( tot_itr = = nullptr ) { <nl> tot_itr = & total_index . emplace ( del . from , [ & ] ( auto & tot ) { <nl> namespace eosiosystem { <nl> <nl> require_auth ( del . from ) ; <nl> <nl> - del_bandwidth_index_type del_index ( SystemAccount , del . from ) ; <nl> - total_resources_index_type total_index ( SystemAccount , del . receiver ) ; <nl> - <nl> / / eosio_assert ( is_account ( del . receiver ) , " can only delegate resources to an existing account " ) ; <nl> <nl> - print ( " undelegatebw : from = " , del . from , " receiver = " , del . receiver , " \ n " ) ; <nl> + del_bandwidth_index_type del_index ( SystemAccount , del . from ) ; <nl> const auto & dbw = del_index . get ( del . receiver ) ; <nl> - <nl> eosio_assert ( dbw . net_weight > = del . unstake_net_quantity , " insufficient staked net bandwidth " ) ; <nl> eosio_assert ( dbw . cpu_weight > = del . unstake_cpu_quantity , " insufficient staked cpu bandwidth " ) ; <nl> + eosio_assert ( dbw . storage_bytes > = del . unstake_storage_bytes , " insufficient staked storage " ) ; <nl> <nl> - const auto & totals = total_index . get ( del . receiver ) ; <nl> - system_token_type storage_stake_decrease = totals . storage_stake * del . unstake_storage_bytes / totals . storage_bytes ; <nl> + system_token_type storage_stake_decrease = dbw . storage_stake * del . unstake_storage_bytes / dbw . storage_bytes ; <nl> <nl> auto total_refund = system_token_type ( del . unstake_cpu_quantity ) + system_token_type ( del . unstake_net_quantity ) + storage_stake_decrease ; <nl> - / / eosio_assert ( total_refund . quantity > = 0 , " must unstake a positive amount " ) ; <nl> + <nl> + eosio_assert ( total_refund . quantity > = 0 , " must unstake a positive amount " ) ; <nl> <nl> del_index . update ( dbw , del . from , [ & ] ( auto & dbo ) { <nl> dbo . net_weight - = del . unstake_net_quantity ; <nl> namespace eosiosystem { <nl> dbo . storage_bytes - = del . unstake_storage_bytes ; <nl> } ) ; <nl> <nl> + total_resources_index_type total_index ( SystemAccount , del . receiver ) ; <nl> + const auto & totals = total_index . get ( del . receiver ) ; <nl> total_index . update ( totals , 0 , [ & ] ( auto & tot ) { <nl> tot . net_weight - = del . unstake_net_quantity ; <nl> tot . cpu_weight - = del . unstake_cpu_quantity ; <nl> namespace eosiosystem { <nl> set_resource_limits ( totals . owner , totals . storage_bytes , totals . net_weight . quantity , totals . cpu_weight . quantity , 0 ) ; <nl> <nl> / / / TODO : implement / enforce time delays on withdrawing <nl> - print ( " undelegatebw : " , total_refund . quantity , " \ n " ) ; <nl> currency : : inline_transfer ( SystemAccount , del . from , asset ( static_cast < int64_t > ( total_refund . quantity ) ) , " unstake bandwidth " ) ; <nl> <nl> auto parameters = eosio_parameters_singleton : : get ( ) ; <nl> mmm a / contracts / eosiolib / singleton . hpp <nl> ppp b / contracts / eosiolib / singleton . hpp <nl> namespace eosio { <nl> <nl> uint64_t primary_key ( ) const { return pk_value ; } <nl> <nl> - EOSLIB_SERIALIZE ( row , ( value ) ) ; <nl> + EOSLIB_SERIALIZE ( row , ( value ) ) <nl> } ; <nl> <nl> typedef eosio : : multi_index < SingletonName , row > table ; <nl> mmm a / tests / wasm_tests / eosio . system_tests . cpp <nl> ppp b / tests / wasm_tests / eosio . system_tests . cpp <nl> BOOST_FIXTURE_TEST_CASE ( delegate_to_myself , eosio_system_tester ) try { <nl> <nl> BOOST_REQUIRE_EQUAL ( asset : : from_string ( " 400 . 0000 EOS " ) , get_balance ( " alice " ) ) ; <nl> <nl> + / / trying to unstake more net bandwith than at stake <nl> + BOOST_REQUIRE_EQUAL ( error ( " condition : assertion failed : insufficient staked net bandwidth " ) , <nl> + push_action ( N ( alice ) , N ( undelegatebw ) , mvo ( ) <nl> + ( " from " , " alice " ) <nl> + ( " receiver " , " alice " ) <nl> + ( " unstake_net " , " 200 . 0001 EOS " ) <nl> + ( " unstake_cpu " , " 0 . 0000 EOS " ) <nl> + ( " unstake_bytes " , 0 ) ) <nl> + ) ; <nl> + <nl> + / / trying to unstake more cpu bandwith than at stake <nl> + BOOST_REQUIRE_EQUAL ( error ( " condition : assertion failed : insufficient staked cpu bandwidth " ) , <nl> + push_action ( N ( alice ) , N ( undelegatebw ) , mvo ( ) <nl> + ( " from " , " alice " ) <nl> + ( " receiver " , " alice " ) <nl> + ( " unstake_net " , " 000 . 0000 EOS " ) <nl> + ( " unstake_cpu " , " 100 . 0001 EOS " ) <nl> + ( " unstake_bytes " , 0 ) ) <nl> + ) ; <nl> + <nl> + / / trying to unstake more storage than at stake <nl> + BOOST_REQUIRE_EQUAL ( error ( " condition : assertion failed : insufficient staked storage " ) , <nl> + push_action ( N ( alice ) , N ( undelegatebw ) , mvo ( ) <nl> + ( " from " , " alice " ) <nl> + ( " receiver " , " alice " ) <nl> + ( " unstake_net " , " 000 . 0000 EOS " ) <nl> + ( " unstake_cpu " , " 000 . 0001 EOS " ) <nl> + ( " unstake_bytes " , bytes + 1 ) ) <nl> + ) ; <nl> + <nl> + / / check that nothing has changed <nl> + stake = get_total_stake ( " alice " ) ; <nl> + BOOST_REQUIRE_EQUAL ( asset : : from_string ( " 200 . 0000 EOS " ) . amount , stake [ " net_weight " ] . as_uint64 ( ) ) ; <nl> + BOOST_REQUIRE_EQUAL ( asset : : from_string ( " 100 . 0000 EOS " ) . amount , stake [ " cpu_weight " ] . as_uint64 ( ) ) ; <nl> + BOOST_REQUIRE_EQUAL ( asset : : from_string ( " 300 . 0000 EOS " ) . amount , stake [ " storage_stake " ] . as_uint64 ( ) ) ; <nl> + BOOST_REQUIRE_EQUAL ( bytes , stake [ " storage_bytes " ] . as_uint64 ( ) ) ; <nl> + <nl> push_action ( N ( alice ) , N ( undelegatebw ) , mvo ( ) <nl> ( " from " , " alice " ) <nl> ( " receiver " , " alice " ) <nl> BOOST_FIXTURE_TEST_CASE ( delegate_to_myself , eosio_system_tester ) try { <nl> std : : cout < < " STAKE : " < < stake [ " net_weight " ] . as_uint64 ( ) < < ' ' < < stake [ " cpu_weight " ] . as_uint64 ( ) < < std : : endl ; <nl> BOOST_REQUIRE_EQUAL ( asset : : from_string ( " 0 . 0000 EOS " ) . amount , stake [ " net_weight " ] . as_uint64 ( ) ) ; <nl> BOOST_REQUIRE_EQUAL ( asset : : from_string ( " 0 . 0000 EOS " ) . amount , stake [ " cpu_weight " ] . as_uint64 ( ) ) ; <nl> - / / BOOST_REQUIRE_EQUAL ( asset : : from_string ( " 0 . 0000 EOS " ) . amount , stake [ " storage_stake " ] . as_uint64 ( ) ) ; <nl> + BOOST_REQUIRE_EQUAL ( 0 , stake [ " storage_stake " ] . as_uint64 ( ) ) ; <nl> + <nl> + } FC_LOG_AND_RETHROW ( ) <nl> + <nl> + BOOST_FIXTURE_TEST_CASE ( stake_negative , eosio_system_tester ) try { <nl> + issue ( " alice " , " 1000 . 0000 EOS " , config : : system_account_name ) ; <nl> <nl> + BOOST_REQUIRE_EQUAL ( error ( " condition : assertion failed : must stake a positive amount " ) , <nl> + push_action ( N ( alice ) , N ( delegatebw ) , mvo ( ) <nl> + ( " from " , " alice " ) <nl> + ( " receiver " , " alice " ) <nl> + ( " stake_net " , " - 0 . 0001 EOS " ) <nl> + ( " stake_cpu " , " 0 . 0000 EOS " ) <nl> + ( " stake_storage " , " 0 . 0000 EOS " ) ) <nl> + ) ; <nl> + <nl> + BOOST_REQUIRE_EQUAL ( error ( " condition : assertion failed : must stake a positive amount " ) , <nl> + push_action ( N ( alice ) , N ( delegatebw ) , mvo ( ) <nl> + ( " from " , " alice " ) <nl> + ( " receiver " , " alice " ) <nl> + ( " stake_net " , " 0 . 0000 EOS " ) <nl> + ( " stake_cpu " , " - 0 . 0001 EOS " ) <nl> + ( " stake_storage " , " 0 . 0000 EOS " ) ) <nl> + ) ; <nl> + <nl> + BOOST_REQUIRE_EQUAL ( error ( " condition : assertion failed : must stake a positive amount " ) , <nl> + push_action ( N ( alice ) , N ( delegatebw ) , mvo ( ) <nl> + ( " from " , " alice " ) <nl> + ( " receiver " , " alice " ) <nl> + ( " stake_net " , " 0 . 0000 EOS " ) <nl> + ( " stake_cpu " , " 0 . 0000 EOS " ) <nl> + ( " stake_storage " , " - 0 . 0001 EOS " ) ) <nl> + ) ; <nl> <nl> } FC_LOG_AND_RETHROW ( ) <nl> <nl>
unit - test for system contract ( failing )
EOSIO/eos
e47b55c7c4cbdf5619b41d52443c638a2e500624
2018-03-02T23:11:52Z
mmm a / src / ast / modules . cc <nl> ppp b / src / ast / modules . cc <nl> bool ModuleDescriptor : : Validate ( DeclarationScope * module_scope , <nl> <nl> return true ; <nl> } <nl> + <nl> } / / namespace internal <nl> } / / namespace v8 <nl> mmm a / src / ast / modules . h <nl> ppp b / src / ast / modules . h <nl> class ModuleDescriptor : public ZoneObject { <nl> PendingCompilationErrorHandler * error_handler , <nl> Zone * zone ) const ; <nl> <nl> - private : <nl> struct ModuleEntry : public ZoneObject { <nl> const Scanner : : Location location ; <nl> const AstRawString * export_name ; <nl> class ModuleDescriptor : public ZoneObject { <nl> module_request ( nullptr ) { } <nl> } ; <nl> <nl> + const ZoneList < const ModuleEntry * > & exports ( ) { return exports_ ; } <nl> + const ZoneList < const ModuleEntry * > & imports ( ) { return imports_ ; } <nl> + <nl> + private : <nl> ZoneList < const ModuleEntry * > exports_ ; <nl> ZoneList < const ModuleEntry * > imports_ ; <nl> } ; <nl> mmm a / src / ast / prettyprinter . cc <nl> ppp b / src / ast / prettyprinter . cc <nl> void AstPrinter : : VisitVariableProxy ( VariableProxy * node ) { <nl> case VariableLocation : : LOOKUP : <nl> SNPrintF ( buf + pos , " lookup " ) ; <nl> break ; <nl> + case VariableLocation : : MODULE : <nl> + SNPrintF ( buf + pos , " module " ) ; <nl> + break ; <nl> } <nl> PrintLiteralWithModeIndented ( buf . start ( ) , var , node - > name ( ) ) ; <nl> } <nl> mmm a / src / ast / scopes . cc <nl> ppp b / src / ast / scopes . cc <nl> static void PrintLocation ( Variable * var ) { <nl> case VariableLocation : : LOOKUP : <nl> PrintF ( " lookup " ) ; <nl> break ; <nl> + case VariableLocation : : MODULE : <nl> + PrintF ( " module " ) ; <nl> + break ; <nl> } <nl> } <nl> <nl> void Scope : : PropagateScopeInfo ( bool outer_scope_calls_sloppy_eval ) { <nl> <nl> <nl> bool Scope : : MustAllocate ( Variable * var ) { <nl> + DCHECK ( var - > location ( ) ! = VariableLocation : : MODULE ) ; <nl> / / Give var a read / write use if there is a chance it might be accessed <nl> / / via an eval ( ) call . This is only possible if the variable has a <nl> / / visible name . <nl> void DeclarationScope : : AllocateLocals ( AstValueFactory * ast_value_factory ) { <nl> } <nl> } <nl> <nl> + void DeclarationScope : : AllocateModuleVariables ( ) { <nl> + for ( auto entry : module ( ) - > imports ( ) ) { <nl> + if ( entry - > local_name = = nullptr ) continue ; <nl> + if ( entry - > import_name = = nullptr ) continue ; / / Namespace import . <nl> + Variable * var = LookupLocal ( entry - > local_name ) ; <nl> + / / TODO ( neis ) : Use a meaningful index . <nl> + var - > AllocateTo ( VariableLocation : : MODULE , 42 ) ; <nl> + } <nl> + for ( auto entry : module ( ) - > exports ( ) ) { <nl> + if ( entry - > local_name = = nullptr ) continue ; <nl> + Variable * var = LookupLocal ( entry - > local_name ) ; <nl> + var - > AllocateTo ( VariableLocation : : MODULE , 42 ) ; <nl> + } <nl> + } <nl> + <nl> void Scope : : AllocateVariablesRecursively ( AstValueFactory * ast_value_factory ) { <nl> if ( ! already_resolved ( ) ) { <nl> num_stack_slots_ = 0 ; <nl> void Scope : : AllocateVariablesRecursively ( AstValueFactory * ast_value_factory ) { <nl> / / Allocate variables for this scope . <nl> / / Parameters must be allocated first , if any . <nl> if ( is_declaration_scope ( ) ) { <nl> - if ( is_function_scope ( ) ) AsDeclarationScope ( ) - > AllocateParameterLocals ( ) ; <nl> + if ( is_module_scope ( ) ) { <nl> + AsDeclarationScope ( ) - > AllocateModuleVariables ( ) ; <nl> + } else if ( is_function_scope ( ) ) { <nl> + AsDeclarationScope ( ) - > AllocateParameterLocals ( ) ; <nl> + } <nl> AsDeclarationScope ( ) - > AllocateReceiver ( ) ; <nl> } <nl> AllocateNonParameterLocalsAndDeclaredGlobals ( ast_value_factory ) ; <nl> mmm a / src / ast / scopes . h <nl> ppp b / src / ast / scopes . h <nl> class DeclarationScope : public Scope { <nl> <nl> / / The ModuleDescriptor for this scope ; only for module scopes . <nl> / / TODO ( verwaest ) : Move to ModuleScope ? <nl> - ModuleDescriptor * module ( ) const { return module_descriptor_ ; } <nl> + ModuleDescriptor * module ( ) const { <nl> + DCHECK ( is_module_scope ( ) ) ; <nl> + DCHECK_NOT_NULL ( module_descriptor_ ) ; <nl> + return module_descriptor_ ; <nl> + } <nl> <nl> void DeclareThis ( AstValueFactory * ast_value_factory ) ; <nl> void DeclareDefaultFunctionVariables ( AstValueFactory * ast_value_factory ) ; <nl> class DeclarationScope : public Scope { <nl> void AllocateLocals ( AstValueFactory * ast_value_factory ) ; <nl> void AllocateParameterLocals ( ) ; <nl> void AllocateReceiver ( ) ; <nl> + / / Set MODULE as VariableLocation for all variables that will live in some <nl> + / / module ' s export table . <nl> + void AllocateModuleVariables ( ) ; <nl> <nl> private : <nl> void AllocateParameter ( Variable * var , int index ) ; <nl> mmm a / src / ast / variables . cc <nl> ppp b / src / ast / variables . cc <nl> const char * Variable : : Mode2String ( VariableMode mode ) { <nl> case CONST_LEGACY : return " CONST_LEGACY " ; <nl> case LET : return " LET " ; <nl> case CONST : return " CONST " ; <nl> - case IMPORT : return " IMPORT " ; <nl> case DYNAMIC : return " DYNAMIC " ; <nl> case DYNAMIC_GLOBAL : return " DYNAMIC_GLOBAL " ; <nl> case DYNAMIC_LOCAL : return " DYNAMIC_LOCAL " ; <nl> mmm a / src / compiler / ast - graph - builder . cc <nl> ppp b / src / compiler / ast - graph - builder . cc <nl> void AstGraphBuilder : : VisitVariableDeclaration ( VariableDeclaration * decl ) { <nl> PrepareFrameState ( store , decl - > proxy ( ) - > id ( ) ) ; <nl> break ; <nl> } <nl> + case VariableLocation : : MODULE : <nl> + UNREACHABLE ( ) ; <nl> } <nl> } <nl> <nl> void AstGraphBuilder : : VisitFunctionDeclaration ( FunctionDeclaration * decl ) { <nl> PrepareFrameState ( store , decl - > proxy ( ) - > id ( ) ) ; <nl> break ; <nl> } <nl> + case VariableLocation : : MODULE : <nl> + UNREACHABLE ( ) ; <nl> } <nl> } <nl> <nl> Node * AstGraphBuilder : : BuildVariableLoad ( Variable * variable , <nl> PrepareFrameState ( value , bailout_id , combine ) ; <nl> return value ; <nl> } <nl> + case VariableLocation : : MODULE : <nl> + UNREACHABLE ( ) ; <nl> } <nl> UNREACHABLE ( ) ; <nl> return nullptr ; <nl> Node * AstGraphBuilder : : BuildVariableDelete ( Variable * variable , <nl> PrepareFrameState ( result , bailout_id , combine ) ; <nl> return result ; <nl> } <nl> + case VariableLocation : : MODULE : <nl> + UNREACHABLE ( ) ; <nl> } <nl> UNREACHABLE ( ) ; <nl> return nullptr ; <nl> Node * AstGraphBuilder : : BuildVariableAssignment ( <nl> PrepareFrameState ( store , bailout_id , combine ) ; <nl> return store ; <nl> } <nl> + case VariableLocation : : MODULE : <nl> + UNREACHABLE ( ) ; <nl> } <nl> UNREACHABLE ( ) ; <nl> return nullptr ; <nl> mmm a / src / crankshaft / hydrogen . cc <nl> ppp b / src / crankshaft / hydrogen . cc <nl> void HOptimizedGraphBuilder : : VisitVariableProxy ( VariableProxy * expr ) { <nl> <nl> case VariableLocation : : LOOKUP : <nl> return Bailout ( kReferenceToAVariableWhichRequiresDynamicLookup ) ; <nl> + <nl> + case VariableLocation : : MODULE : <nl> + UNREACHABLE ( ) ; <nl> } <nl> } <nl> <nl> void HOptimizedGraphBuilder : : HandleCompoundAssignment ( Assignment * expr ) { <nl> <nl> case VariableLocation : : LOOKUP : <nl> return Bailout ( kCompoundAssignmentToLookupSlot ) ; <nl> + <nl> + case VariableLocation : : MODULE : <nl> + UNREACHABLE ( ) ; <nl> } <nl> return ast_context ( ) - > ReturnValue ( Pop ( ) ) ; <nl> <nl> void HOptimizedGraphBuilder : : VisitAssignment ( Assignment * expr ) { <nl> <nl> case VariableLocation : : LOOKUP : <nl> return Bailout ( kAssignmentToLOOKUPVariable ) ; <nl> + <nl> + case VariableLocation : : MODULE : <nl> + UNREACHABLE ( ) ; <nl> } <nl> } else { <nl> return Bailout ( kInvalidLeftHandSideInAssignment ) ; <nl> void HOptimizedGraphBuilder : : VisitCountOperation ( CountOperation * expr ) { <nl> <nl> case VariableLocation : : LOOKUP : <nl> return Bailout ( kLookupVariableInCountOperation ) ; <nl> + <nl> + case VariableLocation : : MODULE : <nl> + UNREACHABLE ( ) ; <nl> } <nl> <nl> Drop ( returns_original_input ? 2 : 1 ) ; <nl> void HOptimizedGraphBuilder : : VisitVariableDeclaration ( <nl> break ; <nl> case VariableLocation : : LOOKUP : <nl> return Bailout ( kUnsupportedLookupSlotInDeclaration ) ; <nl> + case VariableLocation : : MODULE : <nl> + UNREACHABLE ( ) ; <nl> } <nl> } <nl> <nl> void HOptimizedGraphBuilder : : VisitFunctionDeclaration ( <nl> } <nl> case VariableLocation : : LOOKUP : <nl> return Bailout ( kUnsupportedLookupSlotInDeclaration ) ; <nl> + case VariableLocation : : MODULE : <nl> + UNREACHABLE ( ) ; <nl> } <nl> } <nl> <nl> mmm a / src / full - codegen / arm / full - codegen - arm . cc <nl> ppp b / src / full - codegen / arm / full - codegen - arm . cc <nl> void FullCodeGenerator : : VisitVariableDeclaration ( <nl> PrepareForBailoutForId ( proxy - > id ( ) , BailoutState : : NO_REGISTERS ) ; <nl> break ; <nl> } <nl> + <nl> + case VariableLocation : : MODULE : <nl> + UNREACHABLE ( ) ; <nl> } <nl> } <nl> <nl> void FullCodeGenerator : : VisitFunctionDeclaration ( <nl> PrepareForBailoutForId ( proxy - > id ( ) , BailoutState : : NO_REGISTERS ) ; <nl> break ; <nl> } <nl> + <nl> + case VariableLocation : : MODULE : <nl> + UNREACHABLE ( ) ; <nl> } <nl> } <nl> <nl> void FullCodeGenerator : : EmitVariableLoad ( VariableProxy * proxy , <nl> __ CallRuntime ( function_id ) ; <nl> __ bind ( & done ) ; <nl> context ( ) - > Plug ( r0 ) ; <nl> + break ; <nl> } <nl> + <nl> + case VariableLocation : : MODULE : <nl> + UNREACHABLE ( ) ; <nl> } <nl> } <nl> <nl> mmm a / src / full - codegen / arm64 / full - codegen - arm64 . cc <nl> ppp b / src / full - codegen / arm64 / full - codegen - arm64 . cc <nl> void FullCodeGenerator : : VisitVariableDeclaration ( <nl> PrepareForBailoutForId ( proxy - > id ( ) , BailoutState : : NO_REGISTERS ) ; <nl> break ; <nl> } <nl> + <nl> + case VariableLocation : : MODULE : <nl> + UNREACHABLE ( ) ; <nl> } <nl> } <nl> <nl> void FullCodeGenerator : : VisitFunctionDeclaration ( <nl> PrepareForBailoutForId ( proxy - > id ( ) , BailoutState : : NO_REGISTERS ) ; <nl> break ; <nl> } <nl> + <nl> + case VariableLocation : : MODULE : <nl> + UNREACHABLE ( ) ; <nl> } <nl> } <nl> <nl> void FullCodeGenerator : : EmitVariableLoad ( VariableProxy * proxy , <nl> context ( ) - > Plug ( x0 ) ; <nl> break ; <nl> } <nl> + <nl> + case VariableLocation : : MODULE : <nl> + UNREACHABLE ( ) ; <nl> } <nl> } <nl> <nl> mmm a / src / full - codegen / ia32 / full - codegen - ia32 . cc <nl> ppp b / src / full - codegen / ia32 / full - codegen - ia32 . cc <nl> void FullCodeGenerator : : VisitVariableDeclaration ( <nl> PrepareForBailoutForId ( proxy - > id ( ) , BailoutState : : NO_REGISTERS ) ; <nl> break ; <nl> } <nl> + <nl> + case VariableLocation : : MODULE : <nl> + UNREACHABLE ( ) ; <nl> } <nl> } <nl> <nl> void FullCodeGenerator : : VisitFunctionDeclaration ( <nl> PrepareForBailoutForId ( proxy - > id ( ) , BailoutState : : NO_REGISTERS ) ; <nl> break ; <nl> } <nl> + <nl> + case VariableLocation : : MODULE : <nl> + UNREACHABLE ( ) ; <nl> } <nl> } <nl> <nl> void FullCodeGenerator : : EmitVariableLoad ( VariableProxy * proxy , <nl> context ( ) - > Plug ( eax ) ; <nl> break ; <nl> } <nl> + <nl> + case VariableLocation : : MODULE : <nl> + UNREACHABLE ( ) ; <nl> } <nl> } <nl> <nl> mmm a / src / full - codegen / mips / full - codegen - mips . cc <nl> ppp b / src / full - codegen / mips / full - codegen - mips . cc <nl> void FullCodeGenerator : : VisitVariableDeclaration ( <nl> PrepareForBailoutForId ( proxy - > id ( ) , BailoutState : : NO_REGISTERS ) ; <nl> break ; <nl> } <nl> + <nl> + case VariableLocation : : MODULE : <nl> + UNREACHABLE ( ) ; <nl> } <nl> } <nl> <nl> void FullCodeGenerator : : VisitFunctionDeclaration ( <nl> PrepareForBailoutForId ( proxy - > id ( ) , BailoutState : : NO_REGISTERS ) ; <nl> break ; <nl> } <nl> + <nl> + case VariableLocation : : MODULE : <nl> + UNREACHABLE ( ) ; <nl> } <nl> } <nl> <nl> void FullCodeGenerator : : EmitVariableLoad ( VariableProxy * proxy , <nl> __ CallRuntime ( function_id ) ; <nl> __ bind ( & done ) ; <nl> context ( ) - > Plug ( v0 ) ; <nl> + break ; <nl> } <nl> + <nl> + case VariableLocation : : MODULE : <nl> + UNREACHABLE ( ) ; <nl> } <nl> } <nl> <nl> mmm a / src / full - codegen / mips64 / full - codegen - mips64 . cc <nl> ppp b / src / full - codegen / mips64 / full - codegen - mips64 . cc <nl> void FullCodeGenerator : : VisitVariableDeclaration ( <nl> PrepareForBailoutForId ( proxy - > id ( ) , BailoutState : : NO_REGISTERS ) ; <nl> break ; <nl> } <nl> + <nl> + case VariableLocation : : MODULE : <nl> + UNREACHABLE ( ) ; <nl> } <nl> } <nl> <nl> void FullCodeGenerator : : VisitFunctionDeclaration ( <nl> PrepareForBailoutForId ( proxy - > id ( ) , BailoutState : : NO_REGISTERS ) ; <nl> break ; <nl> } <nl> + <nl> + case VariableLocation : : MODULE : <nl> + UNREACHABLE ( ) ; <nl> } <nl> } <nl> <nl> void FullCodeGenerator : : EmitVariableLoad ( VariableProxy * proxy , <nl> __ CallRuntime ( function_id ) ; <nl> __ bind ( & done ) ; <nl> context ( ) - > Plug ( v0 ) ; <nl> + break ; <nl> } <nl> + <nl> + case VariableLocation : : MODULE : <nl> + UNREACHABLE ( ) ; <nl> } <nl> } <nl> <nl> mmm a / src / full - codegen / ppc / full - codegen - ppc . cc <nl> ppp b / src / full - codegen / ppc / full - codegen - ppc . cc <nl> void FullCodeGenerator : : VisitVariableDeclaration ( <nl> PrepareForBailoutForId ( proxy - > id ( ) , BailoutState : : NO_REGISTERS ) ; <nl> break ; <nl> } <nl> + <nl> + case VariableLocation : : MODULE : <nl> + UNREACHABLE ( ) ; <nl> } <nl> } <nl> <nl> void FullCodeGenerator : : VisitFunctionDeclaration ( <nl> PrepareForBailoutForId ( proxy - > id ( ) , BailoutState : : NO_REGISTERS ) ; <nl> break ; <nl> } <nl> + <nl> + case VariableLocation : : MODULE : <nl> + UNREACHABLE ( ) ; <nl> } <nl> } <nl> <nl> void FullCodeGenerator : : EmitVariableLoad ( VariableProxy * proxy , <nl> __ CallRuntime ( function_id ) ; <nl> __ bind ( & done ) ; <nl> context ( ) - > Plug ( r3 ) ; <nl> + break ; <nl> } <nl> + <nl> + case VariableLocation : : MODULE : <nl> + UNREACHABLE ( ) ; <nl> } <nl> } <nl> <nl> mmm a / src / full - codegen / s390 / full - codegen - s390 . cc <nl> ppp b / src / full - codegen / s390 / full - codegen - s390 . cc <nl> void FullCodeGenerator : : VisitVariableDeclaration ( <nl> PrepareForBailoutForId ( proxy - > id ( ) , BailoutState : : NO_REGISTERS ) ; <nl> break ; <nl> } <nl> + <nl> + case VariableLocation : : MODULE : <nl> + UNREACHABLE ( ) ; <nl> } <nl> } <nl> <nl> void FullCodeGenerator : : VisitFunctionDeclaration ( <nl> PrepareForBailoutForId ( proxy - > id ( ) , BailoutState : : NO_REGISTERS ) ; <nl> break ; <nl> } <nl> + <nl> + case VariableLocation : : MODULE : <nl> + UNREACHABLE ( ) ; <nl> } <nl> } <nl> <nl> void FullCodeGenerator : : EmitVariableLoad ( VariableProxy * proxy , <nl> __ CallRuntime ( function_id ) ; <nl> __ bind ( & done ) ; <nl> context ( ) - > Plug ( r2 ) ; <nl> + break ; <nl> } <nl> + <nl> + case VariableLocation : : MODULE : <nl> + UNREACHABLE ( ) ; <nl> } <nl> } <nl> <nl> mmm a / src / full - codegen / x64 / full - codegen - x64 . cc <nl> ppp b / src / full - codegen / x64 / full - codegen - x64 . cc <nl> void FullCodeGenerator : : VisitVariableDeclaration ( <nl> PrepareForBailoutForId ( proxy - > id ( ) , BailoutState : : NO_REGISTERS ) ; <nl> break ; <nl> } <nl> + <nl> + case VariableLocation : : MODULE : <nl> + UNREACHABLE ( ) ; <nl> } <nl> } <nl> <nl> void FullCodeGenerator : : VisitFunctionDeclaration ( <nl> PrepareForBailoutForId ( proxy - > id ( ) , BailoutState : : NO_REGISTERS ) ; <nl> break ; <nl> } <nl> + <nl> + case VariableLocation : : MODULE : <nl> + UNREACHABLE ( ) ; <nl> } <nl> } <nl> <nl> void FullCodeGenerator : : EmitVariableLoad ( VariableProxy * proxy , <nl> context ( ) - > Plug ( rax ) ; <nl> break ; <nl> } <nl> + <nl> + case VariableLocation : : MODULE : <nl> + UNREACHABLE ( ) ; <nl> } <nl> } <nl> <nl> mmm a / src / full - codegen / x87 / full - codegen - x87 . cc <nl> ppp b / src / full - codegen / x87 / full - codegen - x87 . cc <nl> void FullCodeGenerator : : VisitVariableDeclaration ( <nl> PrepareForBailoutForId ( proxy - > id ( ) , BailoutState : : NO_REGISTERS ) ; <nl> break ; <nl> } <nl> + <nl> + case VariableLocation : : MODULE : <nl> + UNREACHABLE ( ) ; <nl> } <nl> } <nl> <nl> void FullCodeGenerator : : VisitFunctionDeclaration ( <nl> PrepareForBailoutForId ( proxy - > id ( ) , BailoutState : : NO_REGISTERS ) ; <nl> break ; <nl> } <nl> + <nl> + case VariableLocation : : MODULE : <nl> + UNREACHABLE ( ) ; <nl> } <nl> } <nl> <nl> void FullCodeGenerator : : EmitVariableLoad ( VariableProxy * proxy , <nl> context ( ) - > Plug ( eax ) ; <nl> break ; <nl> } <nl> + <nl> + case VariableLocation : : MODULE : <nl> + UNREACHABLE ( ) ; <nl> } <nl> } <nl> <nl> mmm a / src / globals . h <nl> ppp b / src / globals . h <nl> enum VariableMode { <nl> <nl> LET , / / declared via ' let ' declarations ( first lexical ) <nl> <nl> - / / TODO ( neis ) : Is it correct to make this one of the lexical modes ? <nl> - IMPORT , / / declared via ' import ' declarations ( except namespace imports ) <nl> - <nl> CONST , / / declared via ' const ' declarations ( last lexical ) <nl> <nl> / / Variables introduced by the compiler : <nl> inline bool IsLexicalVariableMode ( VariableMode mode ) { <nl> <nl> <nl> inline bool IsImmutableVariableMode ( VariableMode mode ) { <nl> - return mode = = CONST | | mode = = CONST_LEGACY | | mode = = IMPORT ; <nl> + return mode = = CONST | | mode = = CONST_LEGACY ; <nl> } <nl> <nl> - <nl> enum class VariableLocation { <nl> / / Before and during variable allocation , a variable whose location is <nl> / / not yet determined . After allocation , a variable looked up as a <nl> enum class VariableLocation { <nl> / / A named slot in a heap context . name ( ) is the variable name in the <nl> / / context object on the heap , with lookup starting at the current <nl> / / context . index ( ) is invalid . <nl> - LOOKUP <nl> - } ; <nl> + LOOKUP , <nl> <nl> + / / A named slot in a module ' s export table . <nl> + MODULE <nl> + } ; <nl> <nl> / / ES6 Draft Rev3 10 . 2 specifies declarative environment records with mutable <nl> / / and immutable bindings that can be in two states : initialized and <nl> mmm a / src / interpreter / bytecode - generator . cc <nl> ppp b / src / interpreter / bytecode - generator . cc <nl> void BytecodeGenerator : : VisitVariableDeclaration ( VariableDeclaration * decl ) { <nl> . CallRuntime ( Runtime : : kDeclareEvalVar , name , 1 ) ; <nl> break ; <nl> } <nl> + case VariableLocation : : MODULE : <nl> + UNREACHABLE ( ) ; <nl> } <nl> } <nl> <nl> void BytecodeGenerator : : VisitFunctionDeclaration ( FunctionDeclaration * decl ) { <nl> VisitForAccumulatorValue ( decl - > fun ( ) ) ; <nl> builder ( ) - > StoreAccumulatorInRegister ( literal ) . CallRuntime ( <nl> Runtime : : kDeclareEvalFunction , name , 2 ) ; <nl> + break ; <nl> } <nl> + case VariableLocation : : MODULE : <nl> + UNREACHABLE ( ) ; <nl> } <nl> } <nl> <nl> void BytecodeGenerator : : VisitVariableLoad ( Variable * variable , <nl> execution_result ( ) - > SetResultInAccumulator ( ) ; <nl> break ; <nl> } <nl> + case VariableLocation : : MODULE : <nl> + UNREACHABLE ( ) ; <nl> } <nl> } <nl> <nl> void BytecodeGenerator : : VisitVariableAssignment ( Variable * variable , <nl> builder ( ) - > StoreLookupSlot ( variable - > name ( ) , language_mode ( ) ) ; <nl> break ; <nl> } <nl> + case VariableLocation : : MODULE : <nl> + UNREACHABLE ( ) ; <nl> } <nl> } <nl> <nl> mmm a / src / parsing / parser . cc <nl> ppp b / src / parsing / parser . cc <nl> void Parser : : ParseImportDeclaration ( bool * ok ) { <nl> module_namespace_binding = <nl> ParseIdentifier ( kDontAllowRestrictedIdentifiers , CHECK_OK_VOID ) ; <nl> module_namespace_binding_loc = scanner ( ) - > location ( ) ; <nl> + DeclareImport ( module_namespace_binding , pos , CHECK_OK_VOID ) ; <nl> break ; <nl> } <nl> <nl> void Parser : : ParseImportDeclaration ( bool * ok ) { <nl> / / Now that we have all the information , we can make the appropriate <nl> / / declarations . <nl> <nl> - if ( module_namespace_binding ! = nullptr ) { <nl> - module ( ) - > AddStarImport ( module_namespace_binding , module_specifier , <nl> - module_namespace_binding_loc , zone ( ) ) ; <nl> - / / TODO ( neis ) : Create special immutable binding for the namespace object . <nl> - } <nl> - <nl> / / TODO ( neis ) : Would prefer to call DeclareImport below rather than above and <nl> / / in ParseNamedImports , but then a possible error message would point to the <nl> / / wrong location . Maybe have a DeclareAt version of Declare that takes a <nl> / / location ? <nl> <nl> + if ( module_namespace_binding ! = nullptr ) { <nl> + module ( ) - > AddStarImport ( module_namespace_binding , module_specifier , <nl> + module_namespace_binding_loc , zone ( ) ) ; <nl> + / / DeclareImport ( module_namespace_binding , pos , CHECK_OK_VOID ) ; <nl> + } <nl> + <nl> if ( import_default_binding ! = nullptr ) { <nl> module ( ) - > AddImport ( ast_value_factory ( ) - > default_string ( ) , <nl> import_default_binding , module_specifier , <nl> VariableProxy * Parser : : NewUnresolved ( const AstRawString * name , <nl> <nl> void Parser : : DeclareImport ( const AstRawString * local_name , int pos , bool * ok ) { <nl> DCHECK_NOT_NULL ( local_name ) ; <nl> - VariableProxy * proxy = NewUnresolved ( local_name , IMPORT ) ; <nl> + VariableProxy * proxy = NewUnresolved ( local_name , CONST ) ; <nl> Declaration * declaration = <nl> - factory ( ) - > NewVariableDeclaration ( proxy , IMPORT , scope ( ) , pos ) ; <nl> + factory ( ) - > NewVariableDeclaration ( proxy , CONST , scope ( ) , pos ) ; <nl> Declare ( declaration , DeclarationDescriptor : : NORMAL , true , CHECK_OK_VOID ) ; <nl> } <nl> <nl> mmm a / test / cctest / test - parsing . cc <nl> ppp b / test / cctest / test - parsing . cc <nl> TEST ( ModuleParsingInternals ) { <nl> " export let hoo ; " <nl> " export const joo = 42 ; " <nl> " export default ( function koo ( ) { } ) ; " <nl> - " import ' q . js ' " ; <nl> + " import ' q . js ' ; " <nl> + " let nonexport = 42 ; " <nl> + " import { m as mm } from ' m . js ' ; " <nl> + " import { aa } from ' m . js ' ; " <nl> + " export { aa as bb , x } ; " <nl> + " import * as loo from ' bar . js ' ; " <nl> + " import * as foob from ' bar . js ' ; " <nl> + " export { foob } ; " ; <nl> i : : Handle < i : : String > source = factory - > NewStringFromAsciiChecked ( kSource ) ; <nl> i : : Handle < i : : Script > script = factory - > NewScript ( source ) ; <nl> i : : Zone zone ( CcTest : : i_isolate ( ) - > allocator ( ) ) ; <nl> TEST ( ModuleParsingInternals ) { <nl> CHECK ( outer_scope - > is_script_scope ( ) ) ; <nl> CHECK_NULL ( outer_scope - > outer_scope ( ) ) ; <nl> CHECK ( module_scope - > is_module_scope ( ) ) ; <nl> - i : : ModuleDescriptor * descriptor = module_scope - > module ( ) ; <nl> - CHECK_NOT_NULL ( descriptor ) ; <nl> i : : ZoneList < i : : Declaration * > * declarations = module_scope - > declarations ( ) ; <nl> - CHECK_EQ ( 8 , declarations - > length ( ) ) ; <nl> + <nl> CHECK ( declarations - > at ( 0 ) - > proxy ( ) - > raw_name ( ) - > IsOneByteEqualTo ( " x " ) ) ; <nl> + CHECK ( declarations - > at ( 0 ) - > proxy ( ) - > var ( ) - > mode ( ) = = i : : LET ) ; <nl> + CHECK ( declarations - > at ( 0 ) - > proxy ( ) - > var ( ) - > location ( ) = = <nl> + i : : VariableLocation : : MODULE ) ; <nl> + <nl> CHECK ( declarations - > at ( 1 ) - > proxy ( ) - > raw_name ( ) - > IsOneByteEqualTo ( " z " ) ) ; <nl> + CHECK ( declarations - > at ( 1 ) - > proxy ( ) - > var ( ) - > mode ( ) = = i : : CONST ) ; <nl> + CHECK ( declarations - > at ( 1 ) - > proxy ( ) - > var ( ) - > location ( ) = = <nl> + i : : VariableLocation : : MODULE ) ; <nl> + <nl> CHECK ( declarations - > at ( 2 ) - > proxy ( ) - > raw_name ( ) - > IsOneByteEqualTo ( " n " ) ) ; <nl> + CHECK ( declarations - > at ( 2 ) - > proxy ( ) - > var ( ) - > mode ( ) = = i : : CONST ) ; <nl> + CHECK ( declarations - > at ( 2 ) - > proxy ( ) - > var ( ) - > location ( ) = = <nl> + i : : VariableLocation : : MODULE ) ; <nl> + <nl> CHECK ( declarations - > at ( 3 ) - > proxy ( ) - > raw_name ( ) - > IsOneByteEqualTo ( " foo " ) ) ; <nl> + CHECK ( declarations - > at ( 3 ) - > proxy ( ) - > var ( ) - > mode ( ) = = i : : VAR ) ; <nl> + CHECK ( declarations - > at ( 3 ) - > proxy ( ) - > var ( ) - > location ( ) = = <nl> + i : : VariableLocation : : MODULE ) ; <nl> + <nl> CHECK ( declarations - > at ( 4 ) - > proxy ( ) - > raw_name ( ) - > IsOneByteEqualTo ( " goo " ) ) ; <nl> + CHECK ( declarations - > at ( 4 ) - > proxy ( ) - > var ( ) - > mode ( ) = = i : : LET ) ; <nl> + CHECK ( declarations - > at ( 4 ) - > proxy ( ) - > var ( ) - > location ( ) = = <nl> + i : : VariableLocation : : MODULE ) ; <nl> + <nl> CHECK ( declarations - > at ( 5 ) - > proxy ( ) - > raw_name ( ) - > IsOneByteEqualTo ( " hoo " ) ) ; <nl> + CHECK ( declarations - > at ( 5 ) - > proxy ( ) - > var ( ) - > mode ( ) = = i : : LET ) ; <nl> + CHECK ( declarations - > at ( 5 ) - > proxy ( ) - > var ( ) - > location ( ) = = <nl> + i : : VariableLocation : : MODULE ) ; <nl> + <nl> CHECK ( declarations - > at ( 6 ) - > proxy ( ) - > raw_name ( ) - > IsOneByteEqualTo ( " joo " ) ) ; <nl> + CHECK ( declarations - > at ( 6 ) - > proxy ( ) - > var ( ) - > mode ( ) = = i : : CONST ) ; <nl> + CHECK ( declarations - > at ( 6 ) - > proxy ( ) - > var ( ) - > location ( ) = = <nl> + i : : VariableLocation : : MODULE ) ; <nl> + <nl> CHECK ( <nl> declarations - > at ( 7 ) - > proxy ( ) - > raw_name ( ) - > IsOneByteEqualTo ( " * default * " ) ) ; <nl> + CHECK ( declarations - > at ( 7 ) - > proxy ( ) - > var ( ) - > mode ( ) = = i : : CONST ) ; <nl> + CHECK ( declarations - > at ( 7 ) - > proxy ( ) - > var ( ) - > location ( ) = = <nl> + i : : VariableLocation : : MODULE ) ; <nl> + <nl> + CHECK ( <nl> + declarations - > at ( 8 ) - > proxy ( ) - > raw_name ( ) - > IsOneByteEqualTo ( " nonexport " ) ) ; <nl> + CHECK ( declarations - > at ( 8 ) - > proxy ( ) - > var ( ) - > location ( ) ! = <nl> + i : : VariableLocation : : MODULE ) ; <nl> + <nl> + CHECK ( declarations - > at ( 9 ) - > proxy ( ) - > raw_name ( ) - > IsOneByteEqualTo ( " mm " ) ) ; <nl> + CHECK ( declarations - > at ( 9 ) - > proxy ( ) - > var ( ) - > mode ( ) = = i : : CONST ) ; <nl> + CHECK ( declarations - > at ( 9 ) - > proxy ( ) - > var ( ) - > location ( ) = = <nl> + i : : VariableLocation : : MODULE ) ; <nl> + <nl> + CHECK ( declarations - > at ( 10 ) - > proxy ( ) - > raw_name ( ) - > IsOneByteEqualTo ( " aa " ) ) ; <nl> + CHECK ( declarations - > at ( 10 ) - > proxy ( ) - > var ( ) - > mode ( ) = = i : : CONST ) ; <nl> + CHECK ( declarations - > at ( 10 ) - > proxy ( ) - > var ( ) - > location ( ) = = <nl> + i : : VariableLocation : : MODULE ) ; <nl> + <nl> + CHECK ( declarations - > at ( 11 ) - > proxy ( ) - > raw_name ( ) - > IsOneByteEqualTo ( " loo " ) ) ; <nl> + CHECK ( declarations - > at ( 11 ) - > proxy ( ) - > var ( ) - > mode ( ) = = i : : CONST ) ; <nl> + CHECK ( declarations - > at ( 11 ) - > proxy ( ) - > var ( ) - > location ( ) ! = <nl> + i : : VariableLocation : : MODULE ) ; <nl> + <nl> + CHECK ( declarations - > at ( 12 ) - > proxy ( ) - > raw_name ( ) - > IsOneByteEqualTo ( " foob " ) ) ; <nl> + CHECK ( declarations - > at ( 12 ) - > proxy ( ) - > var ( ) - > mode ( ) = = i : : CONST ) ; <nl> + CHECK ( declarations - > at ( 12 ) - > proxy ( ) - > var ( ) - > location ( ) = = <nl> + i : : VariableLocation : : MODULE ) ; <nl> + <nl> + CHECK_EQ ( 13 , declarations - > length ( ) ) ; <nl> + <nl> + i : : ModuleDescriptor * descriptor = module_scope - > module ( ) ; <nl> + CHECK_NOT_NULL ( descriptor ) ; <nl> / / TODO ( neis ) : Test more once we can inspect the imports / exports . <nl> } <nl> <nl> deleted file mode 100644 <nl> index e56880500ba . . 00000000000 <nl> mmm a / test / mjsunit / harmony / modules . js <nl> ppp / dev / null <nl> <nl> - / / Copyright 2015 the V8 project authors . All rights reserved . <nl> - / / Use of this source code is governed by a BSD - style license that can be <nl> - / / found in the LICENSE file . <nl> - / / <nl> - / / MODULE <nl> - <nl> - export let a = 42 ; <nl> - assertEquals ( 42 , a ) ; <nl>
[ modules ] Introduce new VariableLocation for module imports / exports .
v8/v8
4df91581d1dd74511ea29482117f54e0083807bd
2016-08-08T09:49:27Z
mmm a / . jenkins / caffe2 / build . sh <nl> ppp b / . jenkins / caffe2 / build . sh <nl> elif [ [ " $ { BUILD_ENVIRONMENT } " = = conda * ] ] ; then <nl> PROTOBUF_INCDIR = / opt / conda / include pip install - b / tmp / pip_install_onnx " file : / / $ { ROOT_DIR } / third_party / onnx # egg = onnx " <nl> report_compile_cache_stats <nl> exit 0 <nl> - elif [ [ $ BUILD_ENVIRONMENT = = * setup * ] ] ; then <nl> - rm - rf $ INSTALL_PREFIX & & mkdir $ INSTALL_PREFIX <nl> - PYTHONPATH = $ INSTALL_PREFIX $ PYTHON setup_caffe2 . py develop - - install - dir $ INSTALL_PREFIX <nl> - exit 0 <nl> fi <nl> <nl> <nl> deleted file mode 100644 <nl> index 10fe62fa8ced . . 000000000000 <nl> mmm a / setup_caffe2 . py <nl> ppp / dev / null <nl> <nl> - from __future__ import absolute_import <nl> - from __future__ import division <nl> - from __future__ import print_function <nl> - from __future__ import unicode_literals <nl> - <nl> - from distutils . spawn import find_executable <nl> - from distutils import sysconfig , log <nl> - import setuptools <nl> - import setuptools . command . build_py <nl> - import setuptools . command . develop <nl> - import setuptools . command . build_ext <nl> - <nl> - from collections import namedtuple <nl> - from contextlib import contextmanager <nl> - import glob <nl> - import os <nl> - import multiprocessing <nl> - import shlex <nl> - import subprocess <nl> - import sys <nl> - from textwrap import dedent <nl> - <nl> - TOP_DIR = os . path . realpath ( os . path . dirname ( __file__ ) ) <nl> - SRC_DIR = os . path . join ( TOP_DIR , ' caffe2 ' ) <nl> - CMAKE_BUILD_DIR = os . path . join ( TOP_DIR , ' . setuptools - cmake - build ' ) <nl> - <nl> - install_requires = [ ] <nl> - setup_requires = [ ] <nl> - tests_require = [ ] <nl> - <nl> - # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # <nl> - # Pre Check <nl> - # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # <nl> - <nl> - CMAKE = find_executable ( ' cmake ' ) <nl> - assert CMAKE , ' Could not find " cmake " executable ! ' <nl> - NINJA = find_executable ( ' ninja ' ) <nl> - MAKE = find_executable ( ' make ' ) <nl> - assert NINJA or MAKE , \ <nl> - ' Could not find neither " ninja " nor " make " executable ! ' <nl> - <nl> - # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # <nl> - # utils functions <nl> - # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # <nl> - <nl> - <nl> - @ contextmanager <nl> - def cd ( path ) : <nl> - if not os . path . isabs ( path ) : <nl> - raise RuntimeError ( ' Can only cd to absolute path , got : { } ' . format ( path ) ) <nl> - orig_path = os . getcwd ( ) <nl> - os . chdir ( path ) <nl> - try : <nl> - yield <nl> - finally : <nl> - os . chdir ( orig_path ) <nl> - <nl> - # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # <nl> - # Version <nl> - # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # <nl> - <nl> - try : <nl> - git_version = subprocess . check_output ( [ ' git ' , ' describe ' , ' - - tags ' , ' HEAD ' ] , <nl> - cwd = TOP_DIR ) . decode ( ' ascii ' ) . strip ( ) <nl> - except ( OSError , subprocess . CalledProcessError ) : <nl> - git_version = None <nl> - <nl> - with open ( os . path . join ( SRC_DIR , ' VERSION_NUMBER ' ) ) as version_file : <nl> - VersionInfo = namedtuple ( ' VersionInfo ' , [ ' version ' , ' git_version ' ] ) ( <nl> - version = version_file . read ( ) . strip ( ) , <nl> - git_version = git_version <nl> - ) <nl> - <nl> - # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # <nl> - # Customized commands <nl> - # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # <nl> - <nl> - <nl> - class Caffe2Command ( setuptools . Command ) : <nl> - user_options = [ ] <nl> - <nl> - def initialize_options ( self ) : <nl> - pass <nl> - <nl> - def finalize_options ( self ) : <nl> - pass <nl> - <nl> - <nl> - class create_version ( Caffe2Command ) : <nl> - def run ( self ) : <nl> - with open ( os . path . join ( SRC_DIR , ' version . py ' ) , ' w ' ) as f : <nl> - f . write ( dedent ( ' ' ' <nl> - version = ' { version } ' <nl> - git_version = ' { git_version } ' <nl> - ' ' ' . format ( * * dict ( VersionInfo . _asdict ( ) ) ) ) ) <nl> - <nl> - <nl> - class cmake_build ( Caffe2Command ) : <nl> - " " " <nl> - Compiles everything when ` python setup . py build ` is run using cmake . <nl> - <nl> - Custom args can be passed to cmake by specifying the ` CMAKE_ARGS ` <nl> - environment variable . E . g . to build without cuda support run : <nl> - ` CMAKE_ARGS = - DUSE_CUDA = Off python setup . py build ` <nl> - <nl> - The number of CPUs used by ` make ` / ` ninja ` can be specified by passing <nl> - ` - j < ncpus > ` to ` setup . py build ` . By default all CPUs are used . <nl> - " " " <nl> - user_options = [ <nl> - ( str ( ' jobs = ' ) , str ( ' j ' ) , <nl> - str ( ' Specifies the number of jobs to use with make or ninja ' ) ) <nl> - ] <nl> - <nl> - built = False <nl> - <nl> - def initialize_options ( self ) : <nl> - self . jobs = multiprocessing . cpu_count ( ) <nl> - <nl> - def finalize_options ( self ) : <nl> - self . jobs = int ( self . jobs ) <nl> - <nl> - def run ( self ) : <nl> - if cmake_build . built : <nl> - return <nl> - cmake_build . built = True <nl> - <nl> - if not os . path . exists ( CMAKE_BUILD_DIR ) : <nl> - os . makedirs ( CMAKE_BUILD_DIR ) <nl> - <nl> - with cd ( CMAKE_BUILD_DIR ) : <nl> - # configure <nl> - cmake_args = [ <nl> - find_executable ( ' cmake ' ) , <nl> - ' - DBUILD_SHARED_LIBS = OFF ' , <nl> - ' - DPYTHON_EXECUTABLE : FILEPATH = { } ' . format ( sys . executable ) , <nl> - ' - DPYTHON_INCLUDE_DIR = { } ' . format ( sysconfig . get_python_inc ( ) ) , <nl> - ' - DBUILD_TEST = OFF ' , <nl> - ' - DBUILD_BENCHMARK = OFF ' , <nl> - ' - DBUILD_BINARY = OFF ' , <nl> - ' - DCMAKE_EXPORT_COMPILE_COMMANDS = ON ' , <nl> - ] <nl> - if NINJA : <nl> - cmake_args . extend ( [ ' - G ' , ' Ninja ' ] ) <nl> - if ' CMAKE_ARGS ' in os . environ : <nl> - extra_cmake_args = shlex . split ( os . environ [ ' CMAKE_ARGS ' ] ) <nl> - # prevent crossfire with downstream scripts <nl> - del os . environ [ ' CMAKE_ARGS ' ] <nl> - log . info ( ' Extra cmake args : { } ' . format ( extra_cmake_args ) ) <nl> - cmake_args . extend ( extra_cmake_args ) <nl> - cmake_args . append ( TOP_DIR ) <nl> - subprocess . check_call ( cmake_args ) <nl> - <nl> - build_args = [ NINJA or MAKE ] <nl> - # control the number of concurrent jobs <nl> - if self . jobs is not None : <nl> - build_args . extend ( [ ' - j ' , str ( self . jobs ) ] ) <nl> - subprocess . check_call ( build_args ) <nl> - <nl> - <nl> - class build_py ( setuptools . command . build_py . build_py ) : <nl> - def run ( self ) : <nl> - self . run_command ( ' create_version ' ) <nl> - self . run_command ( ' cmake_build ' ) <nl> - for src in glob . glob ( <nl> - os . path . join ( CMAKE_BUILD_DIR , ' caffe2 ' , ' proto ' , ' * . py ' ) ) : <nl> - dst = os . path . join ( <nl> - TOP_DIR , os . path . relpath ( src , CMAKE_BUILD_DIR ) ) <nl> - self . copy_file ( src , dst ) <nl> - setuptools . command . build_py . build_py . run ( self ) <nl> - <nl> - <nl> - class build_ext ( setuptools . command . build_ext . build_ext ) : <nl> - def get_outputs ( self ) : <nl> - return [ os . path . join ( self . build_lib , ' caffe2 ' ) ] <nl> - <nl> - def run ( self ) : <nl> - self . run_command ( ' cmake_build ' ) <nl> - setuptools . command . build_ext . build_ext . run ( self ) <nl> - <nl> - def build_extensions ( self ) : <nl> - i = 0 <nl> - while i < len ( self . extensions ) : <nl> - ext = self . extensions [ i ] <nl> - fullname = self . get_ext_fullname ( ext . name ) <nl> - filename = self . get_ext_filename ( fullname ) <nl> - <nl> - src = os . path . join ( CMAKE_BUILD_DIR , filename ) <nl> - if not os . path . exists ( src ) : <nl> - del self . extensions [ i ] <nl> - else : <nl> - dst = os . path . join ( os . path . realpath ( self . build_lib ) , filename ) <nl> - self . copy_file ( src , dst ) <nl> - i + = 1 <nl> - <nl> - <nl> - class develop ( setuptools . command . develop . develop ) : <nl> - def run ( self ) : <nl> - self . run_command ( ' build_py ' ) <nl> - setuptools . command . develop . develop . run ( self ) <nl> - <nl> - <nl> - cmdclass = { <nl> - ' create_version ' : create_version , <nl> - ' cmake_build ' : cmake_build , <nl> - ' build_py ' : build_py , <nl> - ' build_ext ' : build_ext , <nl> - ' develop ' : develop , <nl> - } <nl> - <nl> - # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # <nl> - # Extensions <nl> - # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # <nl> - <nl> - ext_modules = [ <nl> - setuptools . Extension ( <nl> - name = str ( ' caffe2 . python . caffe2_pybind11_state ' ) , <nl> - sources = [ ] ) , <nl> - setuptools . Extension ( <nl> - name = str ( ' caffe2 . python . caffe2_pybind11_state_gpu ' ) , <nl> - sources = [ ] ) , <nl> - setuptools . Extension ( <nl> - name = str ( ' caffe2 . python . caffe2_pybind11_state_hip ' ) , <nl> - sources = [ ] ) , <nl> - ] <nl> - <nl> - # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # <nl> - # Packages <nl> - # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # <nl> - <nl> - packages = setuptools . find_packages ( ) <nl> - <nl> - install_requires . extend ( [ <nl> - ' protobuf ' , <nl> - ' numpy ' , <nl> - ' future ' , <nl> - ' hypothesis ' , <nl> - ' requests ' , <nl> - ' scipy ' , <nl> - ' six ' , <nl> - ] ) <nl> - <nl> - # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # <nl> - # Test <nl> - # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # <nl> - <nl> - setup_requires . append ( ' pytest - runner ' ) <nl> - tests_require . extend ( [ ' pytest - cov ' , ' hypothesis ' ] ) <nl> - <nl> - # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # <nl> - # Final <nl> - # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # <nl> - <nl> - setuptools . setup ( <nl> - name = ' caffe2 ' , <nl> - version = VersionInfo . version , <nl> - description = ' Caffe2 ' , <nl> - ext_modules = ext_modules , <nl> - cmdclass = cmdclass , <nl> - packages = packages , <nl> - install_requires = install_requires , <nl> - setup_requires = setup_requires , <nl> - tests_require = tests_require , <nl> - author = ' jiayq ' , <nl> - author_email = ' jiayq @ fb . com ' , <nl> - url = ' https : / / caffe2 . ai ' , <nl> - entry_points = { <nl> - ' console_scripts ' : [ <nl> - ' convert - caffe2 - to - onnx = caffe2 . python . onnx . bin . conversion : caffe2_to_onnx ' , <nl> - ' convert - onnx - to - caffe2 = caffe2 . python . onnx . bin . conversion : onnx_to_caffe2 ' , <nl> - ] <nl> - } , <nl> - ) <nl>
Removing setup_caffe2 . py ( )
pytorch/pytorch
043a2e36e57970fca630880670800447cc75e82c
2018-08-22T22:37:07Z
mmm a / src / yuzu / bootmanager . cpp <nl> ppp b / src / yuzu / bootmanager . cpp <nl> void GRenderWindow : : moveContext ( ) { <nl> } <nl> <nl> void GRenderWindow : : SwapBuffers ( ) { <nl> - # if ! defined ( QT_NO_DEBUG ) <nl> - / / Qt debug runtime prints a bogus warning on the console if you haven ' t called makeCurrent <nl> - / / since the last time you called swapBuffers . This presumably means something if you ' re using <nl> - / / QGLWidget the " regular " way , but in our multi - threaded use case is harmless since we never <nl> - / / call doneCurrent in this thread . <nl> + / / In our multi - threaded QGLWidget use case we shouldn ' t need to call ` makeCurrent ` , <nl> + / / since we never call ` doneCurrent ` in this thread . <nl> + / / However : <nl> + / / - The Qt debug runtime prints a bogus warning on the console if ` makeCurrent ` wasn ' t called <nl> + / / since the last time ` swapBuffers ` was executed ; <nl> + / / - On macOS , if ` makeCurrent ` isn ' t called explicitely , resizing the buffer breaks . <nl> child - > makeCurrent ( ) ; <nl> - # endif <nl> + <nl> child - > swapBuffers ( ) ; <nl> } <nl> <nl>
Port from CItra
yuzu-emu/yuzu
b8384c0c9199c00b06a03265ce75d684e2c1310f
2018-07-07T12:11:49Z
mmm a / include / swift / ClangImporter / ClangImporter . h <nl> ppp b / include / swift / ClangImporter / ClangImporter . h <nl> enum class SelectorSplitKind ; <nl> / / / Clang AST to ClangImporter to import the type into Swift . <nl> class DWARFImporterDelegate { <nl> public : <nl> - virtual ~ DWARFImporterDelegate ( ) { } <nl> + virtual ~ DWARFImporterDelegate ( ) = default ; <nl> / / / Perform a qualified lookup of a Clang type with this name . <nl> / / / \ param kind Only return results with this type kind . <nl> virtual void lookupValue ( StringRef name , llvm : : Optional < ClangTypeKind > kind , <nl> SmallVectorImpl < clang : : Decl * > & results ) { } <nl> + / / / vtable anchor . <nl> + virtual void anchor ( ) ; <nl> } ; <nl> <nl> / / / Class that imports Clang modules into Swift , mapping directly <nl> mmm a / lib / ClangImporter / DWARFImporter . cpp <nl> ppp b / lib / ClangImporter / DWARFImporter . cpp <nl> <nl> <nl> using namespace swift ; <nl> <nl> + void DWARFImporterDelegate : : anchor ( ) { } <nl> + <nl> / / / Represents a Clang module that was " imported " from debug info . Since all the <nl> / / / loading of types is done on demand , this class is effectively empty . <nl> class DWARFModuleUnit final : public LoadedFile { <nl>
Add vtable anchor and mark destructor as default . ( NFC )
apple/swift
476a10a2b08626ab05f4f13621e5ae4f1330cff1
2019-08-15T20:48:49Z
mmm a / lib / AST / Decl . cpp <nl> ppp b / lib / AST / Decl . cpp <nl> STATISTIC ( NumLazyGenericEnvironments , <nl> STATISTIC ( NumLazyGenericEnvironmentsLoaded , <nl> " # of lazily - deserialized generic environments loaded " ) ; <nl> <nl> + # define DECL ( Id , _ ) \ <nl> + static_assert ( ( DeclKind : : Id = = DeclKind : : Module ) ^ \ <nl> + IsTriviallyDestructible < Id # # Decl > : : value , \ <nl> + " Decls are BumpPtrAllocated ; the destructor is never called " ) ; <nl> + # include " swift / AST / DeclNodes . def " <nl> + <nl> const clang : : MacroInfo * ClangNode : : getAsMacro ( ) const { <nl> if ( auto MM = getAsModuleMacro ( ) ) <nl> return MM - > getMacroInfo ( ) ; <nl> mmm a / lib / AST / Expr . cpp <nl> ppp b / lib / AST / Expr . cpp <nl> <nl> # include " llvm / ADT / Twine . h " <nl> using namespace swift ; <nl> <nl> + # define EXPR ( Id , _ ) \ <nl> + static_assert ( IsTriviallyDestructible < Id # # Expr > : : value , \ <nl> + " Exprs are BumpPtrAllocated ; the destructor is never called " ) ; <nl> + # include " swift / AST / ExprNodes . def " <nl> + <nl> StringRef swift : : getFunctionRefKindStr ( FunctionRefKind refKind ) { <nl> switch ( refKind ) { <nl> case FunctionRefKind : : Unapplied : <nl> mmm a / lib / AST / Stmt . cpp <nl> ppp b / lib / AST / Stmt . cpp <nl> <nl> <nl> using namespace swift ; <nl> <nl> + # define STMT ( Id , _ ) \ <nl> + static_assert ( IsTriviallyDestructible < Id # # Stmt > : : value , \ <nl> + " Stmts are BumpPtrAllocated ; the destructor is never called " ) ; <nl> + # include " swift / AST / StmtNodes . def " <nl> + <nl> / / = = = mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - = = = / / <nl> / / Stmt methods . <nl> / / = = = mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - = = = / / <nl>
[ AST ] static_assert that Decls , Stmts , and Exprs don ' t need cleanup ( )
apple/swift
c22711a26166df9f5e0c517974b87ec7e45e05f3
2018-10-02T17:42:42Z
mmm a / selfdrive / ui / qt / window . cc <nl> ppp b / selfdrive / ui / qt / window . cc <nl> MainWindow : : MainWindow ( QWidget * parent ) : QWidget ( parent ) { <nl> <nl> # ifdef QCOM2 <nl> set_core_affinity ( 7 ) ; <nl> + <nl> + / / TODO : this is needed to make first page not squished , why ? <nl> + main_layout - > setSizeConstraint ( QLayout : : SetMinimumSize ) ; <nl> # endif <nl> <nl> glWindow = new GLWindow ( this ) ; <nl> MainWindow : : MainWindow ( QWidget * parent ) : QWidget ( parent ) { <nl> QObject : : connect ( onboardingWindow , SIGNAL ( onboardingDone ( ) ) , this , SLOT ( closeSettings ( ) ) ) ; <nl> onboardingWindow - > updateActiveScreen ( ) ; <nl> <nl> - / / TODO : this is needed to make first page not squished , why ? <nl> - main_layout - > setSizeConstraint ( QLayout : : SetMinimumSize ) ; <nl> - <nl> setStyleSheet ( R " ( <nl> * { <nl> color : white ; <nl> mmm a / tools / ubuntu_setup . sh <nl> ppp b / tools / ubuntu_setup . sh <nl> sudo apt - get update & & sudo apt - get install - y \ <nl> build - essential \ <nl> bzip2 \ <nl> capnproto \ <nl> + cppcheck \ <nl> libcapnp - dev \ <nl> clang \ <nl> cmake \ <nl>
Fix Wifi UI not working on PC ( )
commaai/openpilot
3a7fcc62fb5bdadab0122625114d5b5e99a3e47e
2020-11-21T23:10:00Z
mmm a / src / heap / allocation - observer . h <nl> ppp b / src / heap / allocation - observer . h <nl> class AllocationObserver { <nl> DCHECK_LE ( kTaggedSize , step_size ) ; <nl> } <nl> virtual ~ AllocationObserver ( ) = default ; <nl> + AllocationObserver ( const AllocationObserver & ) = delete ; <nl> + AllocationObserver & operator = ( const AllocationObserver & ) = delete ; <nl> <nl> protected : <nl> / / Pure virtual method provided by the subclasses that gets called when at <nl> class AllocationObserver { <nl> intptr_t step_size_ ; <nl> <nl> friend class AllocationCounter ; <nl> - DISALLOW_COPY_AND_ASSIGN ( AllocationObserver ) ; <nl> } ; <nl> <nl> class V8_EXPORT_PRIVATE V8_NODISCARD PauseAllocationObserversScope { <nl> public : <nl> explicit PauseAllocationObserversScope ( Heap * heap ) ; <nl> ~ PauseAllocationObserversScope ( ) ; <nl> + PauseAllocationObserversScope ( const PauseAllocationObserversScope & ) = delete ; <nl> + PauseAllocationObserversScope & operator = ( <nl> + const PauseAllocationObserversScope & ) = delete ; <nl> <nl> private : <nl> Heap * heap_ ; <nl> - DISALLOW_COPY_AND_ASSIGN ( PauseAllocationObserversScope ) ; <nl> } ; <nl> <nl> } / / namespace internal <nl> mmm a / src / heap / base - space . h <nl> ppp b / src / heap / base - space . h <nl> class Heap ; <nl> / / BaseSpace is the abstract superclass for all allocation spaces . <nl> class V8_EXPORT_PRIVATE BaseSpace : public Malloced { <nl> public : <nl> + BaseSpace ( const BaseSpace & ) = delete ; <nl> + BaseSpace & operator = ( const BaseSpace & ) = delete ; <nl> + <nl> Heap * heap ( ) const { <nl> DCHECK_NOT_NULL ( heap_ ) ; <nl> return heap_ ; <nl> class V8_EXPORT_PRIVATE BaseSpace : public Malloced { <nl> / / Keeps track of committed memory in a space . <nl> std : : atomic < size_t > committed_ ; <nl> size_t max_committed_ ; <nl> - <nl> - DISALLOW_COPY_AND_ASSIGN ( BaseSpace ) ; <nl> } ; <nl> <nl> } / / namespace internal <nl> mmm a / src / heap / collection - barrier . cc <nl> ppp b / src / heap / collection - barrier . cc <nl> class BackgroundCollectionInterruptTask : public CancelableTask { <nl> : CancelableTask ( heap - > isolate ( ) ) , heap_ ( heap ) { } <nl> <nl> ~ BackgroundCollectionInterruptTask ( ) override = default ; <nl> + BackgroundCollectionInterruptTask ( const BackgroundCollectionInterruptTask & ) = <nl> + delete ; <nl> + BackgroundCollectionInterruptTask & operator = ( <nl> + const BackgroundCollectionInterruptTask & ) = delete ; <nl> <nl> private : <nl> / / v8 : : internal : : CancelableTask overrides . <nl> void RunInternal ( ) override { heap_ - > CheckCollectionRequested ( ) ; } <nl> <nl> Heap * heap_ ; <nl> - DISALLOW_COPY_AND_ASSIGN ( BackgroundCollectionInterruptTask ) ; <nl> } ; <nl> <nl> void CollectionBarrier : : AwaitCollectionBackground ( ) { <nl> mmm a / src / heap / concurrent - marking . cc <nl> ppp b / src / heap / concurrent - marking . cc <nl> class ConcurrentMarkingState final <nl> class SlotSnapshot { <nl> public : <nl> SlotSnapshot ( ) : number_of_slots_ ( 0 ) { } <nl> + SlotSnapshot ( const SlotSnapshot & ) = delete ; <nl> + SlotSnapshot & operator = ( const SlotSnapshot & ) = delete ; <nl> int number_of_slots ( ) const { return number_of_slots_ ; } <nl> ObjectSlot slot ( int i ) const { return snapshot_ [ i ] . first ; } <nl> Object value ( int i ) const { return snapshot_ [ i ] . second ; } <nl> class SlotSnapshot { <nl> static const int kMaxSnapshotSize = JSObject : : kMaxInstanceSize / kTaggedSize ; <nl> int number_of_slots_ ; <nl> std : : pair < ObjectSlot , Object > snapshot_ [ kMaxSnapshotSize ] ; <nl> - DISALLOW_COPY_AND_ASSIGN ( SlotSnapshot ) ; <nl> } ; <nl> <nl> class ConcurrentMarkingVisitor final <nl> class ConcurrentMarking : : JobTask : public v8 : : JobTask { <nl> is_forced_gc_ ( is_forced_gc ) { } <nl> <nl> ~ JobTask ( ) override = default ; <nl> + JobTask ( const JobTask & ) = delete ; <nl> + JobTask & operator = ( const JobTask & ) = delete ; <nl> <nl> / / v8 : : JobTask overrides . <nl> void Run ( JobDelegate * delegate ) override { <nl> class ConcurrentMarking : : JobTask : public v8 : : JobTask { <nl> ConcurrentMarking * concurrent_marking_ ; <nl> const unsigned mark_compact_epoch_ ; <nl> const bool is_forced_gc_ ; <nl> - DISALLOW_COPY_AND_ASSIGN ( JobTask ) ; <nl> } ; <nl> <nl> ConcurrentMarking : : ConcurrentMarking ( Heap * heap , <nl> mmm a / src / heap / cppgc / gc - info - table . h <nl> ppp b / src / heap / cppgc / gc - info - table . h <nl> class V8_EXPORT GCInfoTable final { <nl> / / of testing code . <nl> explicit GCInfoTable ( PageAllocator * page_allocator ) ; <nl> ~ GCInfoTable ( ) ; <nl> + GCInfoTable ( const GCInfoTable & ) = delete ; <nl> + GCInfoTable & operator = ( const GCInfoTable & ) = delete ; <nl> <nl> GCInfoIndex RegisterNewGCInfo ( const GCInfo & info ) ; <nl> <nl> class V8_EXPORT GCInfoTable final { <nl> GCInfoIndex limit_ = 0 ; <nl> <nl> v8 : : base : : Mutex table_mutex_ ; <nl> - <nl> - DISALLOW_COPY_AND_ASSIGN ( GCInfoTable ) ; <nl> } ; <nl> <nl> class V8_EXPORT GlobalGCInfoTable final { <nl> public : <nl> + GlobalGCInfoTable ( const GlobalGCInfoTable & ) = delete ; <nl> + GlobalGCInfoTable & operator = ( const GlobalGCInfoTable & ) = delete ; <nl> + <nl> / / Sets up a singleton table that can be acquired using Get ( ) . <nl> static void Create ( PageAllocator * page_allocator ) ; <nl> <nl> class V8_EXPORT GlobalGCInfoTable final { <nl> static GCInfoTable * global_table_ ; <nl> <nl> DISALLOW_NEW_AND_DELETE ( ) <nl> - DISALLOW_COPY_AND_ASSIGN ( GlobalGCInfoTable ) ; <nl> } ; <nl> <nl> } / / namespace internal <nl> mmm a / src / heap / cppgc / stats - collector . h <nl> ppp b / src / heap / cppgc / stats - collector . h <nl> class V8_EXPORT_PRIVATE StatsCollector final { <nl> IncreaseScopeTime ( ) ; <nl> } <nl> <nl> + InternalScope ( const InternalScope & ) = delete ; <nl> + InternalScope & operator = ( const InternalScope & ) = delete ; <nl> + <nl> private : <nl> void * operator new ( size_t , void * ) = delete ; <nl> void * operator new ( size_t ) = delete ; <nl> class V8_EXPORT_PRIVATE StatsCollector final { <nl> StatsCollector * const stats_collector_ ; <nl> const v8 : : base : : TimeTicks start_time_ ; <nl> const ScopeIdType scope_id_ ; <nl> - <nl> - DISALLOW_COPY_AND_ASSIGN ( InternalScope ) ; <nl> } ; <nl> <nl> public : <nl> mmm a / src / heap / gc - idle - time - handler . h <nl> ppp b / src / heap / gc - idle - time - handler . h <nl> class V8_EXPORT_PRIVATE GCIdleTimeHandler { <nl> static const double kHighContextDisposalRate ; <nl> <nl> GCIdleTimeHandler ( ) = default ; <nl> + GCIdleTimeHandler ( const GCIdleTimeHandler & ) = delete ; <nl> + GCIdleTimeHandler & operator = ( const GCIdleTimeHandler & ) = delete ; <nl> <nl> GCIdleTimeAction Compute ( double idle_time_in_ms , <nl> GCIdleTimeHeapState heap_state ) ; <nl> class V8_EXPORT_PRIVATE GCIdleTimeHandler { <nl> static bool ShouldDoContextDisposalMarkCompact ( int context_disposed , <nl> double contexts_disposal_rate , <nl> size_t size_of_objects ) ; <nl> - <nl> - private : <nl> - DISALLOW_COPY_AND_ASSIGN ( GCIdleTimeHandler ) ; <nl> } ; <nl> <nl> } / / namespace internal <nl> mmm a / src / heap / gc - tracer . h <nl> ppp b / src / heap / gc - tracer . h <nl> enum ScavengeSpeedMode { kForAllObjects , kForSurvivedObjects } ; <nl> / / invocation IFF - - trace_gc is used . <nl> class V8_EXPORT_PRIVATE GCTracer { <nl> public : <nl> + GCTracer ( const GCTracer & ) = delete ; <nl> + GCTracer & operator = ( const GCTracer & ) = delete ; <nl> + <nl> struct IncrementalMarkingInfos { <nl> IncrementalMarkingInfos ( ) : duration ( 0 ) , longest_step ( 0 ) , steps ( 0 ) { } <nl> <nl> class V8_EXPORT_PRIVATE GCTracer { <nl> <nl> Scope ( GCTracer * tracer , ScopeId scope , ThreadKind thread_kind ) ; <nl> ~ Scope ( ) ; <nl> + Scope ( const Scope & ) = delete ; <nl> + Scope & operator = ( const Scope & ) = delete ; <nl> static const char * Name ( ScopeId id ) ; <nl> <nl> private : <nl> class V8_EXPORT_PRIVATE GCTracer { <nl> RuntimeCallTimer timer_ ; <nl> RuntimeCallStats * runtime_stats_ = nullptr ; <nl> base : : Optional < WorkerThreadRuntimeCallStatsScope > runtime_call_stats_scope_ ; <nl> - <nl> - DISALLOW_COPY_AND_ASSIGN ( Scope ) ; <nl> } ; <nl> <nl> class Event { <nl> class V8_EXPORT_PRIVATE GCTracer { <nl> <nl> base : : Mutex background_counter_mutex_ ; <nl> BackgroundCounter background_counter_ [ Scope : : NUMBER_OF_SCOPES ] ; <nl> - <nl> - DISALLOW_COPY_AND_ASSIGN ( GCTracer ) ; <nl> } ; <nl> <nl> } / / namespace internal <nl> mmm a / src / heap / heap . cc <nl> ppp b / src / heap / heap . cc <nl> class LeftTrimmerVerifierRootVisitor : public RootVisitor { <nl> explicit LeftTrimmerVerifierRootVisitor ( FixedArrayBase to_check ) <nl> : to_check_ ( to_check ) { } <nl> <nl> + LeftTrimmerVerifierRootVisitor ( const LeftTrimmerVerifierRootVisitor & ) = <nl> + delete ; <nl> + LeftTrimmerVerifierRootVisitor & operator = ( <nl> + const LeftTrimmerVerifierRootVisitor & ) = delete ; <nl> + <nl> void VisitRootPointers ( Root root , const char * description , <nl> FullObjectSlot start , FullObjectSlot end ) override { <nl> for ( FullObjectSlot p = start ; p < end ; + + p ) { <nl> class LeftTrimmerVerifierRootVisitor : public RootVisitor { <nl> <nl> private : <nl> FixedArrayBase to_check_ ; <nl> - <nl> - DISALLOW_COPY_AND_ASSIGN ( LeftTrimmerVerifierRootVisitor ) ; <nl> } ; <nl> } / / namespace <nl> # endif / / ENABLE_SLOW_DCHECKS <nl> class MemoryPressureInterruptTask : public CancelableTask { <nl> : CancelableTask ( heap - > isolate ( ) ) , heap_ ( heap ) { } <nl> <nl> ~ MemoryPressureInterruptTask ( ) override = default ; <nl> + MemoryPressureInterruptTask ( const MemoryPressureInterruptTask & ) = delete ; <nl> + MemoryPressureInterruptTask & operator = ( const MemoryPressureInterruptTask & ) = <nl> + delete ; <nl> <nl> private : <nl> / / v8 : : internal : : CancelableTask overrides . <nl> void RunInternal ( ) override { heap_ - > CheckMemoryPressure ( ) ; } <nl> <nl> Heap * heap_ ; <nl> - DISALLOW_COPY_AND_ASSIGN ( MemoryPressureInterruptTask ) ; <nl> } ; <nl> <nl> void Heap : : CheckMemoryPressure ( ) { <nl> mmm a / src / heap / heap . h <nl> ppp b / src / heap / heap . h <nl> class Heap { <nl> class ExternalStringTable { <nl> public : <nl> explicit ExternalStringTable ( Heap * heap ) : heap_ ( heap ) { } <nl> + ExternalStringTable ( const ExternalStringTable & ) = delete ; <nl> + ExternalStringTable & operator = ( const ExternalStringTable & ) = delete ; <nl> <nl> / / Registers an external string . <nl> inline void AddString ( String string ) ; <nl> class Heap { <nl> / / strings . <nl> std : : vector < Object > young_strings_ ; <nl> std : : vector < Object > old_strings_ ; <nl> - <nl> - DISALLOW_COPY_AND_ASSIGN ( ExternalStringTable ) ; <nl> } ; <nl> <nl> struct StringTypeTable { <nl> class Heap { <nl> Heap ( ) ; <nl> ~ Heap ( ) ; <nl> <nl> + Heap ( const Heap & ) = delete ; <nl> + Heap & operator = ( const Heap & ) = delete ; <nl> + <nl> static bool IsRegularObjectAllocation ( AllocationType allocation ) { <nl> return AllocationType : : kYoung = = allocation | | <nl> AllocationType : : kOld = = allocation ; <nl> class Heap { <nl> <nl> / / Used in cctest . <nl> friend class heap : : HeapTester ; <nl> - <nl> - DISALLOW_COPY_AND_ASSIGN ( Heap ) ; <nl> } ; <nl> <nl> class HeapStats { <nl> mmm a / src / heap / item - parallel - job . h <nl> ppp b / src / heap / item - parallel - job . h <nl> class V8_EXPORT_PRIVATE ItemParallelJob { <nl> public : <nl> Item ( ) = default ; <nl> virtual ~ Item ( ) = default ; <nl> + Item ( const Item & ) = delete ; <nl> + Item & operator = ( const Item & ) = delete ; <nl> <nl> / / Marks an item as being finished . <nl> void MarkFinished ( ) { CHECK_EQ ( kProcessing , state_ . exchange ( kFinished ) ) ; } <nl> class V8_EXPORT_PRIVATE ItemParallelJob { <nl> <nl> friend class ItemParallelJob ; <nl> friend class ItemParallelJob : : Task ; <nl> - <nl> - DISALLOW_COPY_AND_ASSIGN ( Item ) ; <nl> } ; <nl> <nl> class V8_EXPORT_PRIVATE Task : public CancelableTask { <nl> class V8_EXPORT_PRIVATE ItemParallelJob { <nl> enum class Runner { kForeground , kBackground } ; <nl> explicit Task ( Isolate * isolate ) ; <nl> ~ Task ( ) override = default ; <nl> + Task ( const Task & ) = delete ; <nl> + Task & operator = ( const Task & ) = delete ; <nl> <nl> virtual void RunInParallel ( Runner runner ) = 0 ; <nl> <nl> class V8_EXPORT_PRIVATE ItemParallelJob { <nl> size_t items_considered_ = 0 ; <nl> Runner runner_ = Runner : : kBackground ; <nl> base : : Semaphore * on_finish_ = nullptr ; <nl> - <nl> - DISALLOW_COPY_AND_ASSIGN ( Task ) ; <nl> } ; <nl> <nl> ItemParallelJob ( CancelableTaskManager * cancelable_task_manager , <nl> class V8_EXPORT_PRIVATE ItemParallelJob { <nl> <nl> ~ ItemParallelJob ( ) ; <nl> <nl> + ItemParallelJob ( const ItemParallelJob & ) = delete ; <nl> + ItemParallelJob & operator = ( const ItemParallelJob & ) = delete ; <nl> + <nl> / / Adds a task to the job . Transfers ownership to the job . <nl> void AddTask ( Task * task ) { tasks_ . push_back ( std : : unique_ptr < Task > ( task ) ) ; } <nl> <nl> class V8_EXPORT_PRIVATE ItemParallelJob { <nl> std : : vector < std : : unique_ptr < Task > > tasks_ ; <nl> CancelableTaskManager * cancelable_task_manager_ ; <nl> base : : Semaphore * pending_tasks_ ; <nl> - <nl> - DISALLOW_COPY_AND_ASSIGN ( ItemParallelJob ) ; <nl> } ; <nl> <nl> } / / namespace internal <nl> mmm a / src / heap / memory - allocator . cc <nl> ppp b / src / heap / memory - allocator . cc <nl> class MemoryAllocator : : Unmapper : : UnmapFreeMemoryJob : public JobTask { <nl> explicit UnmapFreeMemoryJob ( Isolate * isolate , Unmapper * unmapper ) <nl> : unmapper_ ( unmapper ) , tracer_ ( isolate - > heap ( ) - > tracer ( ) ) { } <nl> <nl> + UnmapFreeMemoryJob ( const UnmapFreeMemoryJob & ) = delete ; <nl> + UnmapFreeMemoryJob & operator = ( const UnmapFreeMemoryJob & ) = delete ; <nl> + <nl> void Run ( JobDelegate * delegate ) override { <nl> TRACE_GC1 ( tracer_ , GCTracer : : Scope : : BACKGROUND_UNMAPPER , <nl> ThreadKind : : kBackground ) ; <nl> class MemoryAllocator : : Unmapper : : UnmapFreeMemoryJob : public JobTask { <nl> private : <nl> Unmapper * const unmapper_ ; <nl> GCTracer * const tracer_ ; <nl> - DISALLOW_COPY_AND_ASSIGN ( UnmapFreeMemoryJob ) ; <nl> } ; <nl> <nl> void MemoryAllocator : : Unmapper : : FreeQueuedChunks ( ) { <nl> mmm a / src / heap / memory - reducer . h <nl> ppp b / src / heap / memory - reducer . h <nl> class V8_EXPORT_PRIVATE MemoryReducer { <nl> } ; <nl> <nl> explicit MemoryReducer ( Heap * heap ) ; <nl> + MemoryReducer ( const MemoryReducer & ) = delete ; <nl> + MemoryReducer & operator = ( const MemoryReducer & ) = delete ; <nl> / / Callbacks . <nl> void NotifyMarkCompact ( const Event & event ) ; <nl> void NotifyPossibleGarbage ( const Event & event ) ; <nl> class V8_EXPORT_PRIVATE MemoryReducer { <nl> class TimerTask : public v8 : : internal : : CancelableTask { <nl> public : <nl> explicit TimerTask ( MemoryReducer * memory_reducer ) ; <nl> + TimerTask ( const TimerTask & ) = delete ; <nl> + TimerTask & operator = ( const TimerTask & ) = delete ; <nl> <nl> private : <nl> / / v8 : : internal : : CancelableTask overrides . <nl> void RunInternal ( ) override ; <nl> MemoryReducer * memory_reducer_ ; <nl> - DISALLOW_COPY_AND_ASSIGN ( TimerTask ) ; <nl> } ; <nl> <nl> void NotifyTimer ( const Event & event ) ; <nl> class V8_EXPORT_PRIVATE MemoryReducer { <nl> <nl> / / Used in cctest . <nl> friend class heap : : HeapTester ; <nl> - DISALLOW_COPY_AND_ASSIGN ( MemoryReducer ) ; <nl> } ; <nl> <nl> } / / namespace internal <nl> mmm a / src / heap / read - only - heap . h <nl> ppp b / src / heap / read - only - heap . h <nl> class ReadOnlyHeap { <nl> <nl> virtual ~ ReadOnlyHeap ( ) = default ; <nl> <nl> + ReadOnlyHeap ( const ReadOnlyHeap & ) = delete ; <nl> + ReadOnlyHeap & operator = ( const ReadOnlyHeap & ) = delete ; <nl> + <nl> / / If necessary creates read - only heap and initializes its artifacts ( if the <nl> / / deserializer is provided ) . Then attaches the read - only heap to the isolate . <nl> / / If the deserializer is not provided , then the read - only heap will be only <nl> class ReadOnlyHeap { <nl> <nl> explicit ReadOnlyHeap ( ReadOnlySpace * ro_space ) : read_only_space_ ( ro_space ) { } <nl> ReadOnlyHeap ( ReadOnlyHeap * ro_heap , ReadOnlySpace * ro_space ) ; <nl> - <nl> - DISALLOW_COPY_AND_ASSIGN ( ReadOnlyHeap ) ; <nl> } ; <nl> <nl> / / This is used without pointer compression when there is just a single <nl> mmm a / src / heap / slot - set . h <nl> ppp b / src / heap / slot - set . h <nl> class PossiblyEmptyBuckets { <nl> <nl> ~ PossiblyEmptyBuckets ( ) { Release ( ) ; } <nl> <nl> + PossiblyEmptyBuckets ( const PossiblyEmptyBuckets & ) = delete ; <nl> + PossiblyEmptyBuckets & operator = ( const PossiblyEmptyBuckets & ) = delete ; <nl> + <nl> void Initialize ( ) { <nl> bitmap_ = kNullAddress ; <nl> DCHECK ( ! IsAllocated ( ) ) ; <nl> class PossiblyEmptyBuckets { <nl> } <nl> <nl> FRIEND_TEST ( PossiblyEmptyBucketsTest , WordsForBuckets ) ; <nl> - <nl> - DISALLOW_COPY_AND_ASSIGN ( PossiblyEmptyBuckets ) ; <nl> } ; <nl> <nl> STATIC_ASSERT ( std : : is_standard_layout < PossiblyEmptyBuckets > : : value ) ; <nl> mmm a / src / heap / spaces . h <nl> ppp b / src / heap / spaces . h <nl> class V8_EXPORT_PRIVATE Space : public BaseSpace { <nl> 0 ; <nl> } <nl> <nl> + Space ( const Space & ) = delete ; <nl> + Space & operator = ( const Space & ) = delete ; <nl> + <nl> static inline void MoveExternalBackingStoreBytes ( <nl> ExternalBackingStoreType type , Space * from , Space * to , size_t amount ) ; <nl> <nl> class V8_EXPORT_PRIVATE Space : public BaseSpace { <nl> std : : atomic < size_t > * external_backing_store_bytes_ ; <nl> <nl> std : : unique_ptr < FreeList > free_list_ ; <nl> - <nl> - DISALLOW_COPY_AND_ASSIGN ( Space ) ; <nl> } ; <nl> <nl> STATIC_ASSERT ( sizeof ( std : : atomic < intptr_t > ) = = kSystemPointerSize ) ; <nl> mmm a / src / heap / sweeper . cc <nl> ppp b / src / heap / sweeper . cc <nl> class Sweeper : : SweeperJob final : public JobTask { <nl> <nl> ~ SweeperJob ( ) override = default ; <nl> <nl> + SweeperJob ( const SweeperJob & ) = delete ; <nl> + SweeperJob & operator = ( const SweeperJob & ) = delete ; <nl> + <nl> void Run ( JobDelegate * delegate ) final { <nl> if ( delegate - > IsJoiningThread ( ) ) { <nl> TRACE_GC ( tracer_ , GCTracer : : Scope : : MC_SWEEP ) ; <nl> class Sweeper : : SweeperJob final : public JobTask { <nl> } <nl> Sweeper * const sweeper_ ; <nl> GCTracer * const tracer_ ; <nl> - <nl> - DISALLOW_COPY_AND_ASSIGN ( SweeperJob ) ; <nl> } ; <nl> <nl> class Sweeper : : IncrementalSweeperTask final : public CancelableTask { <nl> class Sweeper : : IncrementalSweeperTask final : public CancelableTask { <nl> <nl> ~ IncrementalSweeperTask ( ) override = default ; <nl> <nl> + IncrementalSweeperTask ( const IncrementalSweeperTask & ) = delete ; <nl> + IncrementalSweeperTask & operator = ( const IncrementalSweeperTask & ) = delete ; <nl> + <nl> private : <nl> void RunInternal ( ) final { <nl> VMState < GC > state ( isolate_ ) ; <nl> class Sweeper : : IncrementalSweeperTask final : public CancelableTask { <nl> <nl> Isolate * const isolate_ ; <nl> Sweeper * const sweeper_ ; <nl> - DISALLOW_COPY_AND_ASSIGN ( IncrementalSweeperTask ) ; <nl> } ; <nl> <nl> void Sweeper : : TearDown ( ) { <nl> class Sweeper : : IterabilityTask final : public CancelableTask { <nl> <nl> ~ IterabilityTask ( ) override = default ; <nl> <nl> + IterabilityTask ( const IterabilityTask & ) = delete ; <nl> + IterabilityTask & operator = ( const IterabilityTask & ) = delete ; <nl> + <nl> private : <nl> void RunInternal ( ) final { <nl> TRACE_GC1 ( tracer_ , GCTracer : : Scope : : MC_BACKGROUND_SWEEPING , <nl> class Sweeper : : IterabilityTask final : public CancelableTask { <nl> Sweeper * const sweeper_ ; <nl> base : : Semaphore * const pending_iterability_task_ ; <nl> GCTracer * const tracer_ ; <nl> - <nl> - DISALLOW_COPY_AND_ASSIGN ( IterabilityTask ) ; <nl> } ; <nl> <nl> void Sweeper : : StartIterabilityTasks ( ) { <nl>
[ cleanup ] [ heap ] Remove uses of DISALLOW_COPY_AND_ASSIGN
v8/v8
de3e1205cab0a0034004f9679b4ff090b11fa94e
2020-12-02T01:07:49Z
mmm a / project / BuildDependencies / scripts / 0_package . target - win10 - arm . list <nl> ppp b / project / BuildDependencies / scripts / 0_package . target - win10 - arm . list <nl> openssl - 1 . 0 . 2k - v1 - win10 - ARM - v140 . 7z <nl> pcre - 8 . 40 - win10 - ARM - v140 . 7z <nl> pillow - 4 . 2 . 1 - v1 - win10 - ARM - v140 . 7z <nl> pycryptodome - 3 . 4 . 7 - v1 - win10 - ARM - v140 . 7z <nl> - python - 2 . 7 . 13 - v4 - win10 - ARM - v140 . 7z <nl> + python - 2 . 7 . 13 - v6 - win10 - ARM - v140 . 7z <nl> rapidjson - 1 . 1 . 0 - win32 . 7z <nl> shairplay - ce80e00 - win10 - ARM - v140 . 7z <nl> sqlite - 3170000 - win10 - ARM - v140 . 7z <nl> mmm a / project / BuildDependencies / scripts / 0_package . target - win10 - win32 . list <nl> ppp b / project / BuildDependencies / scripts / 0_package . target - win10 - win32 . list <nl> openssl - 1 . 0 . 2k - v1 - win10 - Win32 - v140 . 7z <nl> pcre - 8 . 40 - win10 - Win32 - v140 . 7z <nl> pillow - 4 . 2 . 1 - v1 - win10 - Win32 - v140 . 7z <nl> pycryptodome - 3 . 4 . 7 - v1 - win10 - Win32 - v140 . 7z <nl> - python - 2 . 7 . 13 - v4 - win10 - Win32 - v140 . 7z <nl> + python - 2 . 7 . 13 - v6 - win10 - Win32 - v140 . 7z <nl> rapidjson - 1 . 1 . 0 - win32 . 7z <nl> shairplay - ce80e00 - win10 - Win32 - v140 . 7z <nl> sqlite - 3170000 - win10 - Win32 - v140 . 7z <nl> mmm a / project / BuildDependencies / scripts / 0_package . target - win10 - x64 . list <nl> ppp b / project / BuildDependencies / scripts / 0_package . target - win10 - x64 . list <nl> openssl - 1 . 0 . 2k - v1 - win10 - x64 - v140 . 7z <nl> pcre - 8 . 40 - win10 - x64 - v140 . 7z <nl> pillow - 4 . 2 . 1 - v1 - win10 - x64 - v140 . 7z <nl> pycryptodome - 3 . 4 . 7 - v1 - win10 - x64 - v140 . 7z <nl> - python - 2 . 7 . 13 - v5 - win10 - x64 - v140 . 7z <nl> + python - 2 . 7 . 13 - v6 - win10 - x64 - v140 . 7z <nl> rapidjson - 1 . 1 . 0 - win32 . 7z <nl> shairplay - ce80e00 - win10 - x64 - v140 . 7z <nl> sqlite - 3170000 - win10 - x64 - v140 . 7z <nl>
[ win10 ] dependencies - update python package .
xbmc/xbmc
5ca64bd63de8d2115ded6fdfacf45840ad546a22
2018-03-27T12:24:17Z
new file mode 100644 <nl> index 00000000000 . . 9b2c2bdeed3 <nl> mmm / dev / null <nl> ppp b / js / apps / system / aardvark / test / karma / karma_coverage_local . conf . js <nl> <nl> + / / Karma configuration <nl> + / / Generated on Thu Jul 04 2013 11 : 39 : 34 GMT + 0200 ( CEST ) <nl> + <nl> + module . exports = function ( karma ) { <nl> + <nl> + karma . set ( { <nl> + <nl> + / / base path , that will be used to resolve files and exclude <nl> + basePath : ' . . / . . / ' , <nl> + <nl> + <nl> + / / frameworks to use <nl> + / / frameworks : [ ' jasmine ' , ' junit - reporter ' ] , <nl> + frameworks : [ ' jasmine ' ] , <nl> + <nl> + <nl> + / / list of files / patterns to load in the browser <nl> + files : [ <nl> + ' test / lib / jasmine - 1 . 3 . 1 / jasmine - html . js ' , <nl> + ' test / lib / jslint . js ' , <nl> + <nl> + / / Templates <nl> + { pattern : ' frontend / js / templates / * . ejs ' , served : true , included : false , watched : true } , <nl> + <nl> + ' frontend / js / lib / jquery - 2 . 1 . 0 . min . js ' , <nl> + ' frontend / js / lib / jquery - ui - 1 . 9 . 2 . custom . js ' , <nl> + ' frontend / js / lib / jquery . dataTables . js ' , <nl> + ' frontend / js / lib / jquery . autogrow . js ' , <nl> + ' frontend / js / lib / jquery . jeditable . js ' , <nl> + ' frontend / js / lib / jquery . jeditable . autogrow . js ' , <nl> + ' frontend / js / lib / jquery . snippet . js ' , <nl> + ' frontend / js / lib / jquery . slideto . min . js ' , <nl> + ' frontend / js / lib / jquery . wiggle . min . js ' , <nl> + ' frontend / js / lib / jquery . contextmenu . js ' , <nl> + ' frontend / js / lib / handlebars - 1 . 0 . rc . 1 . js ' , <nl> + ' frontend / js / lib / underscore . js ' , <nl> + ' frontend / js / lib / backbone . js ' , <nl> + ' frontend / js / lib / d3 . v3 . min . js ' , <nl> + ' frontend / js / lib / nv . d3 . js ' , <nl> + ' frontend / js / lib / d3 . fisheye . js ' , <nl> + ' frontend / js / lib / ColVis . js ' , <nl> + ' frontend / js / lib / bootstrap . js ' , <nl> + ' frontend / js / lib / bootstrap - pagination . js ' , <nl> + ' frontend / src / ace . js ' , <nl> + ' frontend / js / lib / jqconsole . min . js ' , <nl> + ' frontend / js / lib / splitter . js ' , <nl> + ' frontend / js / lib / swagger . js ' , <nl> + ' frontend / js / lib / swagger - ui . js ' , <nl> + ' frontend / js / lib / highlight . 7 . 3 . pack . js ' , <nl> + <nl> + / / Template Engine Mock <nl> + ' test / mocks / disableEJS . js ' , <nl> + <nl> + <nl> + / / arangodb <nl> + ' frontend / js / arango / arango . js ' , <nl> + ' frontend / js / shell / browser . js ' , <nl> + ' frontend / js / modules / org / arangodb / arango - collection - common . js ' , <nl> + ' frontend / js / modules / org / arangodb / arango - collection . js ' , <nl> + ' frontend / js / modules / org / arangodb / arango - database . js ' , <nl> + ' frontend / js / modules / org / arangodb / arango - query - cursor . js ' , <nl> + ' frontend / js / modules / org / arangodb / arango - statement - common . js ' , <nl> + ' frontend / js / modules / org / arangodb / arango - statement . js ' , <nl> + ' frontend / js / modules / org / arangodb / arangosh . js ' , <nl> + ' frontend / js / modules / org / arangodb / graph - common . js ' , <nl> + ' frontend / js / modules / org / arangodb / graph . js ' , <nl> + ' frontend / js / modules / org / arangodb / mimetypes . js ' , <nl> + ' frontend / js / modules / org / arangodb / simple - query - common . js ' , <nl> + ' frontend / js / modules / org / arangodb / simple - query . js ' , <nl> + ' frontend / js / modules / org / arangodb / aql / functions . js ' , <nl> + ' frontend / js / modules / org / arangodb / graph / traversal . js ' , <nl> + ' frontend / js / modules / org / arangodb - common . js ' , <nl> + ' frontend / js / modules / org / arangodb . js ' , <nl> + ' frontend / js / bootstrap / errors . js ' , <nl> + ' frontend / js / bootstrap / monkeypatches . js ' , <nl> + ' frontend / js / bootstrap / module - internal . js ' , <nl> + ' frontend / js / client / bootstrap / module - internal . js ' , <nl> + ' frontend / js / client / client . js ' , <nl> + <nl> + / / Mocks <nl> + ' test / specs / graphViewer / helper / eventHelper . js ' , <nl> + ' test / specs / graphViewer / helper / objectsHelper . js ' , <nl> + ' test / specs / graphViewer / helper / mocks . js ' , <nl> + ' test / specs / graphViewer / helper / commMock . js ' , <nl> + ' test / specs / graphViewer / helper / uiMatchers . js ' , <nl> + <nl> + / / GraphViewer <nl> + / / Core Modules <nl> + ' frontend / js / graphViewer / graphViewer . js ' , <nl> + ' frontend / js / graphViewer / graph / domObserverFactory . js ' , <nl> + ' frontend / js / graphViewer / graph / colourMapper . js ' , <nl> + ' frontend / js / graphViewer / graph / communityNode . js ' , <nl> + ' frontend / js / graphViewer / graph / webWorkerWrapper . js ' , <nl> + ' frontend / js / graphViewer / graph / nodeShaper . js ' , <nl> + ' frontend / js / graphViewer / graph / abstractAdapter . js ' , <nl> + ' frontend / js / graphViewer / graph / JSONAdapter . js ' , <nl> + ' frontend / js / graphViewer / graph / arangoAdapter . js ' , <nl> + ' frontend / js / graphViewer / graph / foxxAdapter . js ' , <nl> + ' frontend / js / graphViewer / graph / previewAdapter . js ' , <nl> + ' frontend / js / graphViewer / graph / edgeShaper . js ' , <nl> + ' frontend / js / graphViewer / graph / forceLayouter . js ' , <nl> + ' frontend / js / graphViewer / graph / eventDispatcher . js ' , <nl> + ' frontend / js / graphViewer / graph / eventLibrary . js ' , <nl> + ' frontend / js / graphViewer / graph / zoomManager . js ' , <nl> + ' frontend / js / graphViewer / graph / nodeReducer . js ' , <nl> + ' frontend / js / graphViewer / graph / modularityJoiner . js ' , <nl> + <nl> + / / UI Modules <nl> + ' frontend / js / graphViewer / ui / modalDialogHelper . js ' , <nl> + ' frontend / js / graphViewer / ui / contextMenuHelper . js ' , <nl> + ' frontend / js / graphViewer / ui / nodeShaperControls . js ' , <nl> + ' frontend / js / graphViewer / ui / edgeShaperControls . js ' , <nl> + ' frontend / js / graphViewer / ui / arangoAdapterControls . js ' , <nl> + ' frontend / js / graphViewer / ui / layouterControls . js ' , <nl> + ' frontend / js / graphViewer / ui / uiComponentsHelper . js ' , <nl> + ' frontend / js / graphViewer / ui / eventDispatcherControls . js ' , <nl> + ' frontend / js / graphViewer / ui / graphViewerUI . js ' , <nl> + ' frontend / js / graphViewer / ui / graphViewerWidget . js ' , <nl> + ' frontend / js / graphViewer / ui / graphViewerPreview . js ' , <nl> + <nl> + / / Models <nl> + ' frontend / js / models / currentDatabase . js ' , <nl> + ' frontend / js / models / arangoCollection . js ' , <nl> + ' frontend / js / models / arangoDatabase . js ' , <nl> + ' frontend / js / models / arangoDocument . js ' , <nl> + ' frontend / js / models / arangoLog . js ' , <nl> + ' frontend / js / models / arangoStatistics . js ' , <nl> + ' frontend / js / models / arangoStatisticsDescription . js ' , <nl> + ' frontend / js / models / foxx . js ' , <nl> + ' frontend / js / models / notification . js ' , <nl> + ' frontend / js / models / graph . js ' , <nl> + ' clusterFrontend / js / models / clusterServer . js ' , <nl> + ' clusterFrontend / js / models / clusterCoordinator . js ' , <nl> + ' clusterFrontend / js / models / clusterDatabase . js ' , <nl> + ' clusterFrontend / js / models / clusterCollection . js ' , <nl> + ' clusterFrontend / js / models / clusterShard . js ' , <nl> + <nl> + / / Collections <nl> + ' frontend / js / collections / arangoCollections . js ' , <nl> + ' frontend / js / collections / arangoDocuments . js ' , <nl> + ' frontend / js / collections / arangoDocument . js ' , <nl> + ' frontend / js / collections / arangoDatabase . js ' , <nl> + ' frontend / js / collections / arangoLogs . js ' , <nl> + ' frontend / js / collections / arangoUsers . js ' , <nl> + ' frontend / js / collections / arangoStatisticsCollection . js ' , <nl> + ' frontend / js / collections / arangoStatisticsDescriptionCollection . js ' , <nl> + ' frontend / js / collections / foxxCollection . js ' , <nl> + ' frontend / js / collections / graphCollection . js ' , <nl> + ' clusterFrontend / js / collections / clusterServers . js ' , <nl> + ' clusterFrontend / js / collections / clusterCoordinators . js ' , <nl> + ' clusterFrontend / js / collections / clusterDatabases . js ' , <nl> + ' clusterFrontend / js / collections / clusterCollections . js ' , <nl> + ' clusterFrontend / js / collections / clusterShards . js ' , <nl> + ' frontend / js / collections / notificationCollection . js ' , <nl> + <nl> + / / Views <nl> + ' frontend / js / views / navigationView . js ' , <nl> + ' frontend / js / views / notificationView . js ' , <nl> + ' frontend / js / views / apiView . js ' , <nl> + ' frontend / js / views / footerView . js ' , <nl> + ' frontend / js / views / queryView . js ' , <nl> + ' frontend / js / views / shellView . js ' , <nl> + ' frontend / js / views / dashboardView . js ' , <nl> + ' frontend / js / views / collectionsView . js ' , <nl> + ' frontend / js / views / collectionView . js ' , <nl> + ' frontend / js / views / collectionInfoView . js ' , <nl> + ' frontend / js / views / newCollectionView . js ' , <nl> + ' frontend / js / views / collectionsItemView . js ' , <nl> + ' frontend / js / views / documentsView . js ' , <nl> + ' frontend / js / views / documentView . js ' , <nl> + ' frontend / js / views / logsView . js ' , <nl> + ' frontend / js / views / applicationsView . js ' , <nl> + ' frontend / js / views / foxxActiveView . js ' , <nl> + ' frontend / js / views / foxxInstalledView . js ' , <nl> + ' frontend / js / views / foxxEditView . js ' , <nl> + ' frontend / js / views / foxxMountView . js ' , <nl> + ' frontend / js / views / appDocumentationView . js ' , <nl> + ' frontend / js / views / graphView . js ' , <nl> + ' frontend / js / views / graphManagementView . js ' , <nl> + ' frontend / js / views / addNewGraphView . js ' , <nl> + ' frontend / js / views / deleteGraphView . js ' , <nl> + ' frontend / js / views / dbSelectionView . js ' , <nl> + ' frontend / js / views / editListEntryView . js ' , <nl> + ' frontend / js / views / loginView . js ' , <nl> + ' frontend / js / views / clusterDashboardView . js ' , <nl> + ' frontend / js / views / clusterOverviewView . js ' , <nl> + ' frontend / js / views / clusterServerView . js ' , <nl> + ' frontend / js / views / clusterCoordinatorView . js ' , <nl> + ' frontend / js / views / clusterDatabaseView . js ' , <nl> + ' frontend / js / views / clusterCollectionView . js ' , <nl> + ' frontend / js / views / clusterShardsView . js ' , <nl> + ' frontend / js / views / statisticBarView . js ' , <nl> + ' frontend / js / views / userBarView . js ' , <nl> + <nl> + / / Views Planner <nl> + ' clusterFrontend / js / views / planScenarioSelectorView . js ' , <nl> + ' clusterFrontend / js / views / planSymmetricView . js ' , <nl> + ' clusterFrontend / js / views / planTestView . js ' , <nl> + <nl> + / / Views cluster <nl> + ' clusterFrontend / js / views / showClusterView . js ' , <nl> + <nl> + / / Router <nl> + ' frontend / js / routers / router . js ' , <nl> + ' clusterFrontend / js / routers / clusterRouter . js ' , <nl> + <nl> + / / Specs <nl> + / / GraphViewer <nl> + ' test / specs / graphViewer / specColourMapper / colourMapperSpec . js ' , <nl> + ' test / specs / graphViewer / specWindowObjects / domObserverFactorySpec . js ' , <nl> + ' test / specs / graphViewer / specCommunityNode / communityNodeSpec . js ' , <nl> + ' test / specs / graphViewer / specAdapter / interfaceSpec . js ' , <nl> + ' test / specs / graphViewer / specAdapter / abstractAdapterSpec . js ' , <nl> + ' test / specs / graphViewer / specAdapter / jsonAdapterSpec . js ' , <nl> + ' test / specs / graphViewer / specAdapter / arangoAdapterSpec . js ' , <nl> + ' test / specs / graphViewer / specAdapter / foxxAdapterSpec . js ' , <nl> + ' test / specs / graphViewer / specAdapter / previewAdapterSpec . js ' , <nl> + ' test / specs / graphViewer / specAdapter / arangoAdapterUISpec . js ' , <nl> + ' test / specs / graphViewer / specNodeShaper / nodeShaperSpec . js ' , <nl> + ' test / specs / graphViewer / specNodeShaper / nodeShaperUISpec . js ' , <nl> + ' test / specs / graphViewer / specEdgeShaper / edgeShaperSpec . js ' , <nl> + ' test / specs / graphViewer / specEdgeShaper / edgeShaperUISpec . js ' , <nl> + ' test / specs / graphViewer / specForceLayouter / forceLayouterSpec . js ' , <nl> + ' test / specs / graphViewer / specForceLayouter / forceLayouterUISpec . js ' , <nl> + ' test / specs / graphViewer / specEvents / eventLibrarySpec . js ' , <nl> + ' test / specs / graphViewer / specEvents / eventDispatcherSpec . js ' , <nl> + ' test / specs / graphViewer / specEvents / eventDispatcherUISpec . js ' , <nl> + ' test / specs / graphViewer / specZoomManager / zoomManagerSpec . js ' , <nl> + ' test / specs / graphViewer / specGraphViewer / graphViewerSpec . js ' , <nl> + ' test / specs / graphViewer / specGraphViewer / graphViewerUISpec . js ' , <nl> + ' test / specs / graphViewer / specGraphViewer / graphViewerWidgetSpec . js ' , <nl> + ' test / specs / graphViewer / specGraphViewer / graphViewerPreviewSpec . js ' , <nl> + ' test / specs / graphViewer / specNodeReducer / nodeReducerSpec . js ' , <nl> + / / ' test / specs / graphViewer / specNodeReducer / modularityJoinerSpec . js ' , <nl> + / / ' test / specs / graphViewer / specWindowObjects / workerWrapperSpec . js ' , <nl> + ' test / specs / graphViewer / specContextMenu / contextMenuSpec . js ' , <nl> + / / Arango <nl> + ' test / specs / arango / arangoSpec . js ' , <nl> + <nl> + / / Models <nl> + ' test / specs / models / currentDatabaseSpec . js ' , <nl> + ' test / specs / models / graphSpec . js ' , <nl> + <nl> + / / Collections <nl> + ' test / specs / collections / clusterServersSpec . js ' , <nl> + ' test / specs / collections / clusterDatabasesSpec . js ' , <nl> + ' test / specs / collections / clusterCollectionsSpec . js ' , <nl> + ' test / specs / collections / clusterShardsSpec . js ' , <nl> + <nl> + <nl> + / / Views <nl> + ' test / specs / views / editListEntryViewSpec . js ' , <nl> + ' test / specs / views / collectionViewSpec . js ' , <nl> + ' test / specs / views / collectionsViewSpec . js ' , <nl> + ' test / specs / views / foxxEditViewSpec . js ' , <nl> + ' test / specs / views / dbSelectionViewSpec . js ' , <nl> + ' test / specs / views / navigationViewSpec . js ' , <nl> + ' test / specs / views / graphViewSpec . js ' , <nl> + ' test / specs / views / graphManagementViewSpec . js ' , <nl> + ' test / specs / views / addNewGraphViewSpec . js ' , <nl> + / * <nl> + ' test / specs / views / clusterDashboardViewSpec . js ' , <nl> + ' test / specs / views / clusterOverviewViewSpec . js ' , <nl> + ' test / specs / views / clusterServerViewSpec . js ' , <nl> + ' test / specs / views / clusterCoordinatorViewSpec . js ' , <nl> + ' test / specs / views / clusterDatabaseViewSpec . js ' , <nl> + ' test / specs / views / clusterCollectionViewSpec . js ' , <nl> + ' test / specs / views / clusterShardsViewSpec . js ' , <nl> + * / <nl> + / / Router <nl> + ' test / specs / router / routerSpec . js ' , <nl> + <nl> + / / Planner <nl> + / / Router <nl> + / / ' test / specs / planner / router / routerSpec . js ' , <nl> + / / View <nl> + / / ' test / specs / planner / views / planSymmetricViewSpec . js ' , <nl> + / / ' test / specs / planner / views / planTestViewSpec . js ' , <nl> + / / ' test / specs / planner / views / planScenarioSelectorViewSpec . js ' , <nl> + / / ' test / specJSLint / jsLintSpec . js ' <nl> + ] , <nl> + <nl> + <nl> + / / list of files to exclude <nl> + exclude : [ <nl> + <nl> + ] , <nl> + <nl> + preprocessors : { <nl> + ' frontend / js / * * / * * . js ' : [ ' coverage ' ] <nl> + } , <nl> + <nl> + / / test results reporter to use <nl> + / / possible values : ' dots ' , ' progress ' , ' junit ' , ' growl ' , ' coverage ' <nl> + reporters : [ ' dots ' , ' coverage ' ] , <nl> + <nl> + coverageReporter : { <nl> + type : ' html ' , <nl> + dir : ' coverage / ' <nl> + } , <nl> + <nl> + / / web server port <nl> + port : 9876 , <nl> + <nl> + <nl> + / / cli runner port <nl> + runnerPort : 9100 , <nl> + <nl> + / / enable / disable colors in the output ( reporters and logs ) <nl> + colors : false , <nl> + <nl> + <nl> + / / level of logging <nl> + / / possible values : karma . LOG_DISABLE | | karma . LOG_ERROR | | karma . LOG_WARN | | karma . LOG_INFO | | karma . LOG_DEBUG <nl> + logLevel : karma . LOG_INFO , <nl> + <nl> + <nl> + / / enable / disable watching file and executing tests whenever any file changes <nl> + autoWatch : false , <nl> + <nl> + <nl> + / / Start these browsers , currently available : <nl> + / / - Chrome <nl> + / / - ChromeCanary <nl> + / / - Firefox <nl> + / / - Opera <nl> + / / - Safari ( only Mac ) <nl> + / / - PhantomJS <nl> + / / - IE ( only Windows ) <nl> + browsers : [ " PhantomJS " ] , <nl> + <nl> + / / If browser does not capture in given timeout [ ms ] , kill it <nl> + captureTimeout : 60000 , <nl> + <nl> + <nl> + / / Continuous Integration mode <nl> + / / if true , it capture browsers , run tests and exit <nl> + singleRun : true <nl> + } ) ; <nl> + } ; <nl>
conf for local coverage report
arangodb/arangodb
416e2bcd041fe56b2ec9c02cdc8ba7231277414b
2014-03-24T15:15:39Z
mmm a / tensorflow / lite / toco / graph_transformations / propagate_fixed_sizes . cc <nl> ppp b / tensorflow / lite / toco / graph_transformations / propagate_fixed_sizes . cc <nl> void ProcessSparseToDenseOperator ( Model * model , SparseToDenseOperator * op ) { <nl> } else { <nl> const std : : vector < int64 > & output_shape_data = <nl> output_shape_array . GetBuffer < ArrayDataType : : kInt64 > ( ) . data ; <nl> - std : : copy ( <nl> + / / explicitly cast elements to int in order to avoid MSVC warnings about <nl> + / / narrowing conversion . <nl> + std : : transform ( <nl> output_shape_data . begin ( ) , output_shape_data . end ( ) , <nl> - std : : back_inserter ( * output_array . mutable_shape ( ) - > mutable_dims ( ) ) ) ; <nl> + std : : back_inserter ( * output_array . mutable_shape ( ) - > mutable_dims ( ) ) , <nl> + [ ] ( const int64 dim ) { return static_cast < int > ( dim ) ; } ) ; <nl> } <nl> } <nl> <nl> mmm a / tensorflow / lite / toco / graph_transformations / resolve_constant_tile . cc <nl> ppp b / tensorflow / lite / toco / graph_transformations / resolve_constant_tile . cc <nl> std : : pair < int , int > TileOneDimension ( const Shape & in_dimensions , <nl> CopyMultipleTimes ( out_data , total_tiled_stride_size , <nl> multipliers [ dimension ] - 1 , <nl> out_data + total_tiled_stride_size ) ; <nl> - return std : : make_pair ( total_stride_size , <nl> - total_tiled_stride_size * multipliers [ dimension ] ) ; <nl> + return std : : make_pair ( <nl> + total_stride_size , <nl> + static_cast < int > ( total_tiled_stride_size * multipliers [ dimension ] ) ) ; <nl> } <nl> <nl> template < ArrayDataType Type > <nl> mmm a / tensorflow / lite / toco / graph_transformations / unroll_batch_matmul . cc <nl> ppp b / tensorflow / lite / toco / graph_transformations / unroll_batch_matmul . cc <nl> : : tensorflow : : Status UnrollBatchMatMul : : Run ( Model * model , std : : size_t op_index , <nl> <nl> / / Reshape the rank - 3 Tensor into the correct output shape . <nl> const auto & result_batch_shape = bcast . output_batch_shape ( ) . dim_sizes ( ) ; <nl> - std : : vector < int > result_shape ( result_batch_shape . begin ( ) , <nl> - result_batch_shape . end ( ) ) ; <nl> + std : : vector < int > result_shape ; <nl> + / / Explicitly cast 64 - bit sizes to int in order to avoid MSVC warnings . <nl> + std : : transform ( result_batch_shape . begin ( ) , result_batch_shape . end ( ) , <nl> + std : : back_inserter ( result_shape ) , <nl> + [ ] ( const int64 dim ) { return static_cast < int > ( dim ) ; } ) ; <nl> result_shape . push_back ( input_array_a . shape ( ) . dims ( dims_a - 2 ) ) ; <nl> result_shape . push_back ( input_array_b . shape ( ) . dims ( dims_b - 1 ) ) ; <nl> <nl> mmm a / tensorflow / lite / toco / import_tensorflow . cc <nl> ppp b / tensorflow / lite / toco / import_tensorflow . cc <nl> string CreateConstArray ( Model * model , string const & name , <nl> string array_name = toco : : AvailableArrayName ( * model , name ) ; <nl> auto & array = model - > GetOrCreateArray ( array_name ) ; <nl> array . data_type = T ; <nl> - array . mutable_shape ( ) - > mutable_dims ( ) - > emplace_back ( data . size ( ) ) ; <nl> + array . mutable_shape ( ) - > mutable_dims ( ) - > emplace_back ( <nl> + static_cast < int > ( data . size ( ) ) ) ; <nl> array . GetMutableBuffer < T > ( ) . data = data ; <nl> return array_name ; <nl> } <nl> mmm a / tensorflow / lite / tools / optimize / operator_property . cc <nl> ppp b / tensorflow / lite / tools / optimize / operator_property . cc <nl> OperatorProperty GetOperatorProperty ( const ModelT * model , int subgraph_index , <nl> / / LogSoftmax requires output with 16 / 256 as scale and 127 as zero point . <nl> TensorProperty tensor_property ; <nl> tensor_property . restriction = true ; <nl> - tensor_property . restricted_value = { 16 . 0 / 256 . 0 , 127 } ; <nl> + tensor_property . restricted_value = { 16 . 0f / 256 . 0f , 127 } ; <nl> property . outputs = { { 0 , tensor_property } } ; <nl> property . version = 2 ; <nl> break ; <nl> OperatorProperty GetOperatorProperty ( const ModelT * model , int subgraph_index , <nl> / / Logistic requires output with 1 / 256 as scale and - 128 as zero point . <nl> TensorProperty tensor_property ; <nl> tensor_property . restriction = true ; <nl> - tensor_property . restricted_value = { 1 / 256 . 0 , - 128 } ; <nl> + tensor_property . restricted_value = { 1 / 256 . 0f , - 128 } ; <nl> property . outputs = { { 0 , tensor_property } } ; <nl> property . version = 2 ; <nl> break ; <nl> OperatorProperty GetOperatorProperty ( const ModelT * model , int subgraph_index , <nl> / / L2 Norm requires output with 1 / 128 as scale and 0 as zero point . <nl> TensorProperty tensor_property ; <nl> tensor_property . restriction = true ; <nl> - tensor_property . restricted_value = { 1 / 128 . 0 , 0 } ; <nl> + tensor_property . restricted_value = { 1 / 128 . 0f , 0 } ; <nl> property . outputs = { { 0 , tensor_property } } ; <nl> property . version = 2 ; <nl> break ; <nl> OperatorProperty GetOperatorProperty ( const ModelT * model , int subgraph_index , <nl> / / Softmax requires output with 1 / 256 as scale and - 128 as zero point . <nl> TensorProperty tensor_property ; <nl> tensor_property . restriction = true ; <nl> - tensor_property . restricted_value = { 1 / 256 . 0 , - 128 } ; <nl> + tensor_property . restricted_value = { 1 / 256 . 0f , - 128 } ; <nl> property . outputs = { { 0 , tensor_property } } ; <nl> property . version = 2 ; <nl> break ; <nl> OperatorProperty GetOperatorProperty ( const ModelT * model , int subgraph_index , <nl> / / Tanh requires output with 1 / 128 as scale and 0 as zero point . <nl> TensorProperty tensor_property ; <nl> tensor_property . restriction = true ; <nl> - tensor_property . restricted_value = { 1 / 128 . 0 , 0 } ; <nl> + tensor_property . restricted_value = { 1 / 128 . 0f , 0 } ; <nl> property . outputs = { { 0 , tensor_property } } ; <nl> property . version = 2 ; <nl> break ; <nl> mmm a / tensorflow / lite / tools / optimize / operator_property . h <nl> ppp b / tensorflow / lite / tools / optimize / operator_property . h <nl> struct TensorProperty { <nl> / / Constraints . <nl> bool restriction = false ; <nl> / / scale / zero_point hardcoded . <nl> - std : : pair < float , int > restricted_value = { 0 . 0 , 0 } ; <nl> + std : : pair < float , int > restricted_value = { 0 . 0f , 0 } ; <nl> <nl> / / Use derived scale . <nl> bool use_derived_scale = false ; <nl> mmm a / tensorflow / lite / tools / optimize / sparsity / format_converter . cc <nl> ppp b / tensorflow / lite / tools / optimize / sparsity / format_converter . cc <nl> void FormatConverter < T > : : Populate ( const T * src_data , std : : vector < int > indices , <nl> template < typename T > <nl> TfLiteStatus FormatConverter < T > : : SparseToDense ( const T * src_data ) { <nl> data_ . resize ( dense_size_ ) ; <nl> - std : : fill ( data_ . begin ( ) , data_ . end ( ) , 0 ) ; <nl> + std : : fill ( data_ . begin ( ) , data_ . end ( ) , T ( 0 ) ) ; <nl> <nl> int total_rank = traversal_order_ . size ( ) ; <nl> int src_data_ptr = 0 ; <nl>
Be more pedantic about type conversions to avoid warning on Windows .
tensorflow/tensorflow
24fe4c1a0d2fbf614854da0770ddb01229d05bba
2020-02-28T00:30:56Z
mmm a / trunk / src / core / srs_core_amf0 . cpp <nl> ppp b / trunk / src / core / srs_core_amf0 . cpp <nl> int srs_amf0_read_utf8 ( SrsStream * stream , std : : string & value ) <nl> char ch = * ( str . data ( ) + i ) ; <nl> if ( ( ch & 0x80 ) ! = 0 ) { <nl> ret = ERROR_RTMP_AMF0_DECODE ; <nl> - srs_error ( " only support utf8 - 1 , 0x00 - 0x7F , actual is % # x . ret = % d " , ( int ) ch , ret ) ; <nl> - return ret ; <nl> + srs_error ( " ignored . only support utf8 - 1 , 0x00 - 0x7F , actual is % # x . ret = % d " , ( int ) ch , ret ) ; <nl> + ret = ERROR_SUCCESS ; <nl> } <nl> } <nl> <nl> mmm a / trunk / src / core / srs_core_client . cpp <nl> ppp b / trunk / src / core / srs_core_client . cpp <nl> int SrsClient : : streaming_publish ( ) <nl> <nl> SrsAutoFree ( SrsMessage , msg , false ) ; <nl> <nl> + / / process onMetaData <nl> + if ( msg - > header . is_amf0_data ( ) | | msg - > header . is_amf3_data ( ) ) { <nl> + if ( ( ret = msg - > decode_packet ( ) ) ! = ERROR_SUCCESS ) { <nl> + srs_error ( " decode onMetaData message failed . ret = % d " , ret ) ; <nl> + return ret ; <nl> + } <nl> + <nl> + SrsPacket * pkt = msg - > get_packet ( ) ; <nl> + if ( dynamic_cast < SrsOnMetaDataPacket * > ( pkt ) ) { <nl> + SrsOnMetaDataPacket * metadata = dynamic_cast < SrsOnMetaDataPacket * > ( pkt ) ; <nl> + } <nl> + <nl> + srs_trace ( " ignore AMF0 / AMF3 data message . " ) ; <nl> + continue ; <nl> + } <nl> + <nl> / / process UnPublish event . <nl> if ( msg - > header . is_amf0_command ( ) | | msg - > header . is_amf3_command ( ) ) { <nl> if ( ( ret = msg - > decode_packet ( ) ) ! = ERROR_SUCCESS ) { <nl> int SrsClient : : streaming_publish ( ) <nl> } <nl> <nl> srs_trace ( " ignore AMF0 / AMF3 command message . " ) ; <nl> + continue ; <nl> } <nl> } <nl> <nl> mmm a / trunk / src / core / srs_core_protocol . cpp <nl> ppp b / trunk / src / core / srs_core_protocol . cpp <nl> messages . <nl> # define RTMP_AMF0_COMMAND_UNPUBLISH " FCUnpublish " <nl> # define RTMP_AMF0_COMMAND_PUBLISH " publish " <nl> # define RTMP_AMF0_DATA_SAMPLE_ACCESS " | RtmpSampleAccess " <nl> + # define RTMP_AMF0_DATA_SET_DATAFRAME " @ setDataFrame " <nl> + # define RTMP_AMF0_DATA_ON_METADATA " onMetaData " <nl> <nl> / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * <nl> * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * <nl> bool SrsMessageHeader : : is_amf0_command ( ) <nl> return message_type = = RTMP_MSG_AMF0CommandMessage ; <nl> } <nl> <nl> + bool SrsMessageHeader : : is_amf0_data ( ) <nl> + { <nl> + return message_type = = RTMP_MSG_AMF0DataMessage ; <nl> + } <nl> + <nl> bool SrsMessageHeader : : is_amf3_command ( ) <nl> { <nl> return message_type = = RTMP_MSG_AMF3CommandMessage ; <nl> } <nl> <nl> + bool SrsMessageHeader : : is_amf3_data ( ) <nl> + { <nl> + return message_type = = RTMP_MSG_AMF3DataMessage ; <nl> + } <nl> + <nl> bool SrsMessageHeader : : is_window_ackledgement_size ( ) <nl> { <nl> return message_type = = RTMP_MSG_WindowAcknowledgementSize ; <nl> int SrsMessage : : decode_packet ( ) <nl> srs_verbose ( " decode stream initialized success " ) ; <nl> <nl> / / decode specified packet type <nl> - if ( header . is_amf0_command ( ) | | header . is_amf3_command ( ) ) { <nl> + if ( header . is_amf0_command ( ) | | header . is_amf3_command ( ) | | header . is_amf0_data ( ) | | header . is_amf3_data ( ) ) { <nl> srs_verbose ( " start to decode AMF0 / AMF3 command message . " ) ; <nl> <nl> / / skip 1bytes to decode the amf3 command . <nl> int SrsMessage : : decode_packet ( ) <nl> srs_info ( " decode the AMF0 / AMF3 command ( unpublish message ) . " ) ; <nl> packet = new SrsFMLEStartPacket ( ) ; <nl> return packet - > decode ( stream ) ; <nl> + } else if ( command = = RTMP_AMF0_DATA_SET_DATAFRAME | | command = = RTMP_AMF0_DATA_ON_METADATA ) { <nl> + srs_info ( " decode the AMF0 / AMF3 data ( onMetaData message ) . " ) ; <nl> + packet = new SrsOnMetaDataPacket ( ) ; <nl> + return packet - > decode ( stream ) ; <nl> } <nl> <nl> / / default packet to drop message . <nl> int SrsConnectAppResPacket : : encode_packet ( SrsStream * stream ) <nl> } <nl> srs_verbose ( " encode info success . " ) ; <nl> <nl> - <nl> srs_info ( " encode connect app response packet success . " ) ; <nl> <nl> return ret ; <nl> int SrsSampleAccessPacket : : encode_packet ( SrsStream * stream ) <nl> return ret ; <nl> } <nl> <nl> + SrsOnMetaDataPacket : : SrsOnMetaDataPacket ( ) <nl> + { <nl> + name = RTMP_AMF0_DATA_ON_METADATA ; <nl> + metadata = new SrsAmf0Object ( ) ; <nl> + } <nl> + <nl> + SrsOnMetaDataPacket : : ~ SrsOnMetaDataPacket ( ) <nl> + { <nl> + srs_freep ( metadata ) ; <nl> + } <nl> + <nl> + int SrsOnMetaDataPacket : : decode ( SrsStream * stream ) <nl> + { <nl> + int ret = ERROR_SUCCESS ; <nl> + <nl> + if ( ( ret = srs_amf0_read_string ( stream , name ) ) ! = ERROR_SUCCESS ) { <nl> + srs_error ( " decode metadata name failed . ret = % d " , ret ) ; <nl> + return ret ; <nl> + } <nl> + <nl> + / / ignore the @ setDataFrame <nl> + if ( name = = RTMP_AMF0_DATA_SET_DATAFRAME ) { <nl> + if ( ( ret = srs_amf0_read_string ( stream , name ) ) ! = ERROR_SUCCESS ) { <nl> + srs_error ( " decode metadata name failed . ret = % d " , ret ) ; <nl> + return ret ; <nl> + } <nl> + } <nl> + <nl> + srs_verbose ( " decode metadata name success . name = % s " , name . c_str ( ) ) ; <nl> + <nl> + if ( ( ret = srs_amf0_read_object ( stream , metadata ) ) ! = ERROR_SUCCESS ) { <nl> + srs_error ( " decode metadata metadata failed . ret = % d " , ret ) ; <nl> + return ret ; <nl> + } <nl> + <nl> + srs_info ( " decode metadata success " ) ; <nl> + <nl> + return ret ; <nl> + } <nl> + <nl> + int SrsOnMetaDataPacket : : get_perfer_cid ( ) <nl> + { <nl> + return RTMP_CID_OverConnection2 ; <nl> + } <nl> + <nl> + int SrsOnMetaDataPacket : : get_message_type ( ) <nl> + { <nl> + return RTMP_MSG_AMF0DataMessage ; <nl> + } <nl> + <nl> + int SrsOnMetaDataPacket : : get_size ( ) <nl> + { <nl> + return srs_amf0_get_string_size ( name ) + srs_amf0_get_object_size ( metadata ) ; <nl> + } <nl> + <nl> + int SrsOnMetaDataPacket : : encode_packet ( SrsStream * stream ) <nl> + { <nl> + int ret = ERROR_SUCCESS ; <nl> + <nl> + if ( ( ret = srs_amf0_write_string ( stream , name ) ) ! = ERROR_SUCCESS ) { <nl> + srs_error ( " encode name failed . ret = % d " , ret ) ; <nl> + return ret ; <nl> + } <nl> + srs_verbose ( " encode name success . " ) ; <nl> + <nl> + if ( ( ret = srs_amf0_write_object ( stream , metadata ) ) ! = ERROR_SUCCESS ) { <nl> + srs_error ( " encode metadata failed . ret = % d " , ret ) ; <nl> + return ret ; <nl> + } <nl> + srs_verbose ( " encode metadata success . " ) ; <nl> + <nl> + srs_info ( " encode onMetaData packet success . " ) ; <nl> + return ret ; <nl> + } <nl> + <nl> SrsSetWindowAckSizePacket : : SrsSetWindowAckSizePacket ( ) <nl> { <nl> ackowledgement_window_size = 0 ; <nl> mmm a / trunk / src / core / srs_core_protocol . hpp <nl> ppp b / trunk / src / core / srs_core_protocol . hpp <nl> struct SrsMessageHeader <nl> virtual ~ SrsMessageHeader ( ) ; <nl> <nl> bool is_amf0_command ( ) ; <nl> + bool is_amf0_data ( ) ; <nl> bool is_amf3_command ( ) ; <nl> + bool is_amf3_data ( ) ; <nl> bool is_window_ackledgement_size ( ) ; <nl> bool is_set_chunk_size ( ) ; <nl> } ; <nl> class SrsSampleAccessPacket : public SrsPacket <nl> virtual int encode_packet ( SrsStream * stream ) ; <nl> } ; <nl> <nl> + / * * <nl> + * the stream metadata . <nl> + * FMLE : @ setDataFrame <nl> + * others : onMetaData <nl> + * / <nl> + class SrsOnMetaDataPacket : public SrsPacket <nl> + { <nl> + private : <nl> + typedef SrsPacket super ; <nl> + protected : <nl> + virtual const char * get_class_name ( ) <nl> + { <nl> + return CLASS_NAME_STRING ( SrsOnMetaDataPacket ) ; <nl> + } <nl> + public : <nl> + std : : string name ; <nl> + SrsAmf0Object * metadata ; <nl> + public : <nl> + SrsOnMetaDataPacket ( ) ; <nl> + virtual ~ SrsOnMetaDataPacket ( ) ; <nl> + public : <nl> + virtual int decode ( SrsStream * stream ) ; <nl> + public : <nl> + virtual int get_perfer_cid ( ) ; <nl> + public : <nl> + virtual int get_message_type ( ) ; <nl> + protected : <nl> + virtual int get_size ( ) ; <nl> + virtual int encode_packet ( SrsStream * stream ) ; <nl> + } ; <nl> + <nl> / * * <nl> * 5 . 5 . Window Acknowledgement Size ( 5 ) <nl> * The client or the server sends this message to inform the peer which <nl>
support decode onMetaData .
ossrs/srs
a24d0ecf0cfa0d22bf707e24812e1ded312b2e65
2013-10-22T09:26:05Z
mmm a / src / qml / bulk - operations / BulkOperationsDialog . qml <nl> ppp b / src / qml / bulk - operations / BulkOperationsDialog . qml <nl> Dialog { <nl> text : root . operationName = = " rdb_import " ? qsTr ( " Matched keys : " ) : qsTr ( " Affected keys : " ) <nl> } <nl> <nl> - Rectangle { <nl> - id : listContainer <nl> + FastTextView { <nl> + id : affectedKeysListView <nl> color : " # eee " <nl> <nl> border . color : " # ccc " <nl> border . width : 1 <nl> <nl> Layout . fillWidth : true <nl> - Layout . fillHeight : true <nl> - <nl> - ScrollView { <nl> - anchors . fill : parent <nl> - anchors . margins : 10 <nl> - <nl> - ScrollBar . vertical . policy : ScrollBar . AlwaysOn <nl> - <nl> - ListView { <nl> - id : affectedKeysListView <nl> - width : listContainer . width * 0 . 9 <nl> - <nl> - delegate : Text { text : ( index + 1 ) + " . " + modelData } <nl> - } <nl> - } <nl> + Layout . fillHeight : true <nl> <nl> Connections { <nl> target : bulkOperations <nl> new file mode 100644 <nl> index 00000000 . . ca57a62b <nl> mmm / dev / null <nl> ppp b / src / qml / common / FastTextView . qml <nl> <nl> + import QtQuick 2 . 0 <nl> + import QtQuick . Controls 2 . 13 <nl> + <nl> + Rectangle { <nl> + id : root <nl> + <nl> + property alias model : listView . model <nl> + property alias delegate : listView . delegate <nl> + <nl> + ScrollView { <nl> + anchors . fill : parent <nl> + anchors . margins : 10 <nl> + <nl> + ScrollBar . vertical . policy : ScrollBar . AlwaysOn <nl> + <nl> + ListView { <nl> + id : listView <nl> + width : root . width * 0 . 9 <nl> + <nl> + delegate : Text { text : ( index + 1 ) + " . " + modelData } <nl> + } <nl> + } <nl> + <nl> + MouseArea { <nl> + anchors . fill : parent <nl> + acceptedButtons : Qt . RightButton <nl> + <nl> + onClicked : { <nl> + menu . x = mouseX <nl> + menu . y = mouseY <nl> + menu . open ( ) <nl> + } <nl> + } <nl> + <nl> + Menu { <nl> + id : menu <nl> + z : 255 <nl> + <nl> + MenuItem { <nl> + text : " Copy " <nl> + icon . source : " qrc : / images / copy . svg " <nl> + icon . color : " transparent " <nl> + <nl> + onTriggered : { <nl> + var allStrings = " " ; <nl> + for ( var id in affectedKeysListView . model ) { <nl> + allStrings + = affectedKeysListView . model [ id ] + " \ n " <nl> + } <nl> + qmlUtils . copyToClipboard ( allStrings ) <nl> + allStrings = " " <nl> + } <nl> + } <nl> + } <nl> + } <nl> mmm a / src / qml / qml . qrc <nl> ppp b / src / qml / qml . qrc <nl> <nl> < file > common / BetterSpinBox . qml < / file > <nl> < file > common / BetterComboBox . qml < / file > <nl> < file > value - editor / editors / formatters / ValueFormatters . qml < / file > <nl> + < file > common / FastTextView . qml < / file > <nl> < / qresource > <nl> < / RCC > <nl>
Add FastTextView
uglide/RedisDesktopManager
eb44602ad82b80545434379efb12f679b1713632
2020-04-17T12:21:34Z
mmm a / tests / test_other . py <nl> ppp b / tests / test_other . py <nl> def test_emcc ( self ) : <nl> if opt_level > = 2 and ' - g ' in params : <nl> assert re . search ( ' HEAP8 \ [ \ $ ? \ w + ? \ + ? \ ( + \ $ ? \ w + ? ' , generated ) or re . search ( ' HEAP8 \ [ HEAP32 \ [ ' , generated ) , ' eliminator should create compound expressions , and fewer one - time vars ' # also in - O1 , but easier to test in - O2 <nl> assert ( ' _puts ( ' in generated ) = = ( opt_level > = 1 ) , ' with opt > = 1 , llvm opts are run and they should optimize printf to puts ' <nl> - if opt_level = = 0 or ' - g ' in params : assert ' function _main ( ) { ' in generated , ' Should be unminified , including whitespace ' <nl> + if opt_level = = 0 or ' - g ' in params : assert ' function _main ( ) { ' in generated or ' function _main ( ) { ' in generated , ' Should be unminified ' <nl> elif opt_level > = 2 : assert ( ' function _main ( ) { ' in generated or ' " use asm " ; var a = ' in generated ) , ' Should be whitespace - minified ' <nl> <nl> # emcc - s RELOOP = 1 src . cpp = = > should pass - s to emscripten . py . - - typed - arrays is a convenient alias for - s USE_TYPED_ARRAYS <nl>
update test_emcc
emscripten-core/emscripten
daae8bdf7894bb4d4e9461b5b841c14019c6559a
2013-10-03T17:53:57Z
mmm a / atom / renderer / api / atom_api_web_frame . cc <nl> ppp b / atom / renderer / api / atom_api_web_frame . cc <nl> <nl> <nl> # include " atom / renderer / api / atom_api_web_frame . h " <nl> <nl> - / / This defines are required by SchemeRegistry . h . <nl> - # define ALWAYS_INLINE inline <nl> - # define OS ( WTF_FEATURE ) ( defined WTF_OS_ # # WTF_FEATURE & & WTF_OS_ # # WTF_FEATURE ) / / NOLINT <nl> - # define USE ( WTF_FEATURE ) ( defined WTF_USE_ # # WTF_FEATURE & & WTF_USE_ # # WTF_FEATURE ) / / NOLINT <nl> - # define ENABLE ( WTF_FEATURE ) ( defined ENABLE_ # # WTF_FEATURE & & ENABLE_ # # WTF_FEATURE ) / / NOLINT <nl> - <nl> # include " atom / common / native_mate_converters / gfx_converter . h " <nl> # include " atom / common / native_mate_converters / string16_converter . h " <nl> # include " atom / renderer / api / atom_api_spell_check_client . h " <nl> <nl> # include " native_mate / object_template_builder . h " <nl> # include " third_party / WebKit / public / web / WebDocument . h " <nl> # include " third_party / WebKit / public / web / WebLocalFrame . h " <nl> + # include " third_party / WebKit / public / web / WebSecurityPolicy . h " <nl> # include " third_party / WebKit / public / web / WebView . h " <nl> - # include " third_party / WebKit / Source / platform / weborigin / SchemeRegistry . h " <nl> <nl> # include " atom / common / node_includes . h " <nl> <nl> - namespace mate { <nl> - <nl> - template < > <nl> - struct Converter < WTF : : String > { <nl> - static bool FromV8 ( v8 : : Isolate * isolate , <nl> - v8 : : Local < v8 : : Value > val , <nl> - WTF : : String * out ) { <nl> - if ( ! val - > IsString ( ) ) <nl> - return false ; <nl> - <nl> - v8 : : String : : Value s ( val ) ; <nl> - * out = WTF : : String ( reinterpret_cast < const base : : char16 * > ( * s ) , s . length ( ) ) ; <nl> - return true ; <nl> - } <nl> - } ; <nl> - <nl> - } / / namespace mate <nl> - <nl> namespace atom { <nl> <nl> namespace api { <nl> void WebFrame : : SetSpellCheckProvider ( mate : : Arguments * args , <nl> web_frame_ - > view ( ) - > setSpellCheckClient ( spell_check_client_ . get ( ) ) ; <nl> } <nl> <nl> + void WebFrame : : RegisterURLSchemeAsSecure ( const std : : string & scheme ) { <nl> + / / Register scheme to secure list ( https , wss , data ) . <nl> + blink : : WebSecurityPolicy : : registerURLSchemeAsSecure ( <nl> + blink : : WebString : : fromUTF8 ( scheme ) ) ; <nl> + } <nl> + <nl> + void WebFrame : : RegisterURLSchemeAsBypassingCsp ( const std : : string & scheme ) { <nl> + / / Register scheme to bypass pages ' s Content Security Policy . <nl> + blink : : WebSecurityPolicy : : registerURLSchemeAsBypassingContentSecurityPolicy ( <nl> + blink : : WebString : : fromUTF8 ( scheme ) ) ; <nl> + } <nl> + <nl> mate : : ObjectTemplateBuilder WebFrame : : GetObjectTemplateBuilder ( <nl> v8 : : Isolate * isolate ) { <nl> return mate : : ObjectTemplateBuilder ( isolate ) <nl> mate : : ObjectTemplateBuilder WebFrame : : GetObjectTemplateBuilder ( <nl> . SetMethod ( " attachGuest " , & WebFrame : : AttachGuest ) <nl> . SetMethod ( " setSpellCheckProvider " , & WebFrame : : SetSpellCheckProvider ) <nl> . SetMethod ( " registerUrlSchemeAsSecure " , <nl> - & blink : : SchemeRegistry : : registerURLSchemeAsSecure ) ; <nl> + & WebFrame : : RegisterURLSchemeAsSecure ) <nl> + . SetMethod ( " registerUrlSchemeAsBypassingCsp " , <nl> + & WebFrame : : RegisterURLSchemeAsBypassingCsp ) ; <nl> } <nl> <nl> / / static <nl> mmm a / atom / renderer / api / atom_api_web_frame . h <nl> ppp b / atom / renderer / api / atom_api_web_frame . h <nl> class WebFrame : public mate : : Wrappable { <nl> bool auto_spell_correct_turned_on , <nl> v8 : : Local < v8 : : Object > provider ) ; <nl> <nl> + void RegisterURLSchemeAsSecure ( const std : : string & scheme ) ; <nl> + void RegisterURLSchemeAsBypassingCsp ( const std : : string & scheme ) ; <nl> + <nl> / / mate : : Wrappable : <nl> virtual mate : : ObjectTemplateBuilder GetObjectTemplateBuilder ( <nl> v8 : : Isolate * isolate ) ; <nl> mmm a / docs / api / web - frame . md <nl> ppp b / docs / api / web - frame . md <nl> require ( ' web - frame ' ) . setSpellCheckProvider ( " en - US " , true , { <nl> <nl> * ` scheme ` String <nl> <nl> - Sets the ` scheme ` as secure scheme . <nl> + Registers the ` scheme ` as secure scheme . <nl> <nl> Secure schemes do not trigger mixed content warnings . For example , ` https ` and <nl> ` data ` are secure schemes because they cannot be corrupted by active network <nl> attackers . <nl> <nl> + # # webFrame . registerUrlSchemeAsBypassingCsp ( scheme ) <nl> + <nl> + * ` scheme ` String <nl> + <nl> + Resources will be loaded from this ` scheme ` regardless of <nl> + page ' s Content Security Policy . <nl> + <nl> [ spellchecker ] : https : / / github . com / atom / node - spellchecker <nl>
webFrame : api to make scheme bypass CSP
electron/electron
92ea533aee5fd2f16334796e8d8738e2b3568756
2015-07-30T17:06:02Z
mmm a / src / mongo / db / repl / repl_coordinator_impl . cpp <nl> ppp b / src / mongo / db / repl / repl_coordinator_impl . cpp <nl> namespace { <nl> } <nl> <nl> StatusWith < OpTime > lastOpTimeStatus = _externalState - > loadLastOpTime ( txn ) ; <nl> - OpTime lastOpTime ( 0 , 0 ) ; <nl> - if ( ! lastOpTimeStatus . isOK ( ) ) { <nl> - warning ( ) < < " Failed to load timestamp of most recently applied operation ; " < < <nl> - lastOpTimeStatus . getStatus ( ) ; <nl> - } <nl> - else { <nl> - lastOpTime = lastOpTimeStatus . getValue ( ) ; <nl> - } <nl> <nl> / / Use a callback here , because _finishLoadLocalConfig calls isself ( ) which requires <nl> / / that the server ' s networking layer be up and running and accepting connections , which <nl> namespace { <nl> this , <nl> stdx : : placeholders : : _1 , <nl> localConfig , <nl> - lastOpTime ) ) ; <nl> + lastOpTimeStatus ) ) ; <nl> return false ; <nl> } <nl> <nl> void ReplicationCoordinatorImpl : : _finishLoadLocalConfig ( <nl> const ReplicationExecutor : : CallbackData & cbData , <nl> const ReplicaSetConfig & localConfig , <nl> - OpTime lastOpTime ) { <nl> + const StatusWith < OpTime > & lastOpTimeStatus ) { <nl> if ( ! cbData . status . isOK ( ) ) { <nl> LOG ( 1 ) < < " Loading local replica set configuration failed due to " < < cbData . status ; <nl> return ; <nl> namespace { <nl> myIndex = StatusWith < int > ( - 1 ) ; <nl> } <nl> <nl> + / / Do not check optime , if this node is an arbiter . <nl> + bool isArbiter = myIndex . getValue ( ) ! = - 1 & & <nl> + localConfig . getMemberAt ( myIndex . getValue ( ) ) . isArbiter ( ) ; <nl> + OpTime lastOpTime ( 0 , 0 ) ; <nl> + if ( ! isArbiter ) { <nl> + if ( ! lastOpTimeStatus . isOK ( ) ) { <nl> + warning ( ) < < " Failed to load timestamp of most recently applied operation ; " < < <nl> + lastOpTimeStatus . getStatus ( ) ; <nl> + } <nl> + else { <nl> + lastOpTime = lastOpTimeStatus . getValue ( ) ; <nl> + } <nl> + } <nl> + <nl> boost : : unique_lock < boost : : mutex > lk ( _mutex ) ; <nl> invariant ( _rsConfigState = = kConfigStartingUp ) ; <nl> const PostMemberStateUpdateAction action = <nl> mmm a / src / mongo / db / repl / repl_coordinator_impl . h <nl> ppp b / src / mongo / db / repl / repl_coordinator_impl . h <nl> namespace repl { <nl> * / <nl> void _finishLoadLocalConfig ( const ReplicationExecutor : : CallbackData & cbData , <nl> const ReplicaSetConfig & localConfig , <nl> - OpTime lastOpTime ) ; <nl> + const StatusWith < OpTime > & lastOpTimeStatus ) ; <nl> <nl> / * * <nl> * Callback that finishes the work of processReplSetInitiate ( ) inside the replication <nl>
SERVER - 16220 prevent arbiters from warning about most recently applied optime on startup
mongodb/mongo
581f911b61dd3a41e5048d6129015dbfbaa158f6
2014-12-11T17:50:24Z
mmm a / examples / toy / Ch2 / mlir / MLIRGen . cpp <nl> ppp b / examples / toy / Ch2 / mlir / MLIRGen . cpp <nl> class MLIRGenImpl { <nl> <nl> / / Build the MLIR operation from the name and the two operands . The return <nl> / / type is always a generic array for binary operators . <nl> - mlir : : OperationState result ( & context , location , op_name ) ; <nl> + mlir : : OperationState result ( location , op_name ) ; <nl> result . types . push_back ( getType ( VarType { } ) ) ; <nl> result . operands . push_back ( L ) ; <nl> result . operands . push_back ( R ) ; <nl> class MLIRGenImpl { <nl> bool mlirGen ( ReturnExprAST & ret ) { <nl> auto location = loc ( ret . loc ( ) ) ; <nl> / / ` return ` takes an optional expression , we need to account for it here . <nl> - mlir : : OperationState result ( & context , location , " toy . return " ) ; <nl> + mlir : : OperationState result ( location , " toy . return " ) ; <nl> if ( ret . getExpr ( ) . hasValue ( ) ) { <nl> auto * expr = mlirGen ( * ret . getExpr ( ) . getValue ( ) ) ; <nl> if ( ! expr ) <nl> class MLIRGenImpl { <nl> . cast < mlir : : DenseElementsAttr > ( ) ) ; <nl> <nl> / / Build the MLIR op ` toy . constant ` , only boilerplate below . <nl> - mlir : : OperationState result ( & context , location , " toy . constant " ) ; <nl> + mlir : : OperationState result ( location , " toy . constant " ) ; <nl> result . types . push_back ( type ) ; <nl> result . attributes . push_back ( dataAttribute ) ; <nl> return builder - > createOperation ( result ) - > getResult ( 0 ) ; <nl> class MLIRGenImpl { <nl> } <nl> / / builtin have their custom operation , this is a straightforward emission . <nl> if ( callee = = " transpose " ) { <nl> - mlir : : OperationState result ( & context , location , " toy . transpose " ) ; <nl> + mlir : : OperationState result ( location , " toy . transpose " ) ; <nl> result . types . push_back ( getType ( VarType { } ) ) ; <nl> result . operands = std : : move ( operands ) ; <nl> return builder - > createOperation ( result ) - > getResult ( 0 ) ; <nl> class MLIRGenImpl { <nl> <nl> / / Calls to user - defined functions are mapped to a custom call that takes <nl> / / the callee name as an attribute . <nl> - mlir : : OperationState result ( & context , location , " toy . generic_call " ) ; <nl> + mlir : : OperationState result ( location , " toy . generic_call " ) ; <nl> result . types . push_back ( getType ( VarType { } ) ) ; <nl> result . operands = std : : move ( operands ) ; <nl> auto calleeAttr = builder - > getStringAttr ( call . getCallee ( ) ) ; <nl> class MLIRGenImpl { <nl> if ( ! arg ) <nl> return false ; <nl> auto location = loc ( call . loc ( ) ) ; <nl> - mlir : : OperationState result ( & context , location , " toy . print " ) ; <nl> + mlir : : OperationState result ( location , " toy . print " ) ; <nl> result . operands . push_back ( arg ) ; <nl> builder - > createOperation ( result ) ; <nl> return true ; <nl> class MLIRGenImpl { <nl> / / Emit a constant for a single number ( FIXME : semantic ? broadcast ? ) <nl> mlir : : Value * mlirGen ( NumberExprAST & num ) { <nl> auto location = loc ( num . loc ( ) ) ; <nl> - mlir : : OperationState result ( & context , location , " toy . constant " ) ; <nl> + mlir : : OperationState result ( location , " toy . constant " ) ; <nl> mlir : : Type elementType = mlir : : FloatType : : getF64 ( & context ) ; <nl> result . types . push_back ( builder - > getMemRefType ( { 1 } , elementType ) ) ; <nl> auto attr = mlir : : FloatAttr : : getChecked ( elementType , num . getValue ( ) , <nl> class MLIRGenImpl { <nl> / / with specific shape , we emit a " reshape " operation . It will get <nl> / / optimized out later as needed . <nl> if ( ! vardecl . getType ( ) . shape . empty ( ) ) { <nl> - mlir : : OperationState result ( & context , location , " toy . reshape " ) ; <nl> + mlir : : OperationState result ( location , " toy . reshape " ) ; <nl> result . types . push_back ( getType ( vardecl . getType ( ) ) ) ; <nl> result . operands . push_back ( value ) ; <nl> value = builder - > createOperation ( result ) - > getResult ( 0 ) ; <nl> mmm a / include / mlir / IR / Builders . h <nl> ppp b / include / mlir / IR / Builders . h <nl> class OpBuilder : public Builder { <nl> / / / Create an operation of specific op type at the current insertion point . <nl> template < typename OpTy , typename . . . Args > <nl> OpTy create ( Location location , Args . . . args ) { <nl> - OperationState state ( getContext ( ) , location , OpTy : : getOperationName ( ) ) ; <nl> + OperationState state ( location , OpTy : : getOperationName ( ) ) ; <nl> OpTy : : build ( this , & state , args . . . ) ; <nl> auto * op = createOperation ( state ) ; <nl> auto result = dyn_cast < OpTy > ( op ) ; <nl> mmm a / include / mlir / IR / OpDefinition . h <nl> ppp b / include / mlir / IR / OpDefinition . h <nl> void ensureRegionTerminator ( <nl> template < typename OpTy > <nl> void ensureRegionTerminator ( Region & region , Builder & builder , Location loc ) { <nl> ensureRegionTerminator ( region , loc , [ & ] { <nl> - OperationState state ( loc - > getContext ( ) , loc , OpTy : : getOperationName ( ) ) ; <nl> + OperationState state ( loc , OpTy : : getOperationName ( ) ) ; <nl> OpTy : : build ( & builder , & state ) ; <nl> return Operation : : create ( state ) ; <nl> } ) ; <nl> mmm a / include / mlir / IR / OperationSupport . h <nl> ppp b / include / mlir / IR / OperationSupport . h <nl> struct OperationState { <nl> bool resizableOperandList = false ; <nl> <nl> public : <nl> - OperationState ( MLIRContext * context , Location location , StringRef name ) ; <nl> + OperationState ( Location location , StringRef name ) ; <nl> <nl> - OperationState ( MLIRContext * context , Location location , OperationName name ) ; <nl> + OperationState ( Location location , OperationName name ) ; <nl> <nl> - OperationState ( MLIRContext * context , Location location , StringRef name , <nl> - ArrayRef < Value * > operands , ArrayRef < Type > types , <nl> - ArrayRef < NamedAttribute > attributes , <nl> + OperationState ( Location location , StringRef name , ArrayRef < Value * > operands , <nl> + ArrayRef < Type > types , ArrayRef < NamedAttribute > attributes , <nl> ArrayRef < Block * > successors = { } , <nl> MutableArrayRef < std : : unique_ptr < Region > > regions = { } , <nl> bool resizableOperandList = false ) ; <nl> struct OperationState { <nl> <nl> / / / Add an attribute with the specified name . <nl> void addAttribute ( StringRef name , Attribute attr ) { <nl> - addAttribute ( Identifier : : get ( name , context ) , attr ) ; <nl> + addAttribute ( Identifier : : get ( name , getContext ( ) ) , attr ) ; <nl> } <nl> <nl> / / / Add an attribute with the specified name . <nl> struct OperationState { <nl> void setOperandListToResizable ( bool isResizable = true ) { <nl> resizableOperandList = isResizable ; <nl> } <nl> + <nl> + / / / Get the context held by this operation state . <nl> + MLIRContext * getContext ( ) { return location - > getContext ( ) ; } <nl> } ; <nl> <nl> namespace detail { <nl> mmm a / include / mlir / IR / PatternMatch . h <nl> ppp b / include / mlir / IR / PatternMatch . h <nl> class PatternRewriter : public OpBuilder { <nl> / / / without verifying to see if it is valid . <nl> template < typename OpTy , typename . . . Args > <nl> OpTy create ( Location location , Args . . . args ) { <nl> - OperationState state ( getContext ( ) , location , OpTy : : getOperationName ( ) ) ; <nl> + OperationState state ( location , OpTy : : getOperationName ( ) ) ; <nl> OpTy : : build ( this , & state , args . . . ) ; <nl> auto * op = createOperation ( state ) ; <nl> auto result = dyn_cast < OpTy > ( op ) ; <nl> class PatternRewriter : public OpBuilder { <nl> / / / and return null . <nl> template < typename OpTy , typename . . . Args > <nl> OpTy createChecked ( Location location , Args . . . args ) { <nl> - OperationState state ( getContext ( ) , location , OpTy : : getOperationName ( ) ) ; <nl> + OperationState state ( location , OpTy : : getOperationName ( ) ) ; <nl> OpTy : : build ( this , & state , args . . . ) ; <nl> auto * op = createOperation ( state ) ; <nl> <nl> mmm a / lib / EDSC / Builders . cpp <nl> ppp b / lib / EDSC / Builders . cpp <nl> OperationHandle OperationHandle : : create ( StringRef name , <nl> ArrayRef < ValueHandle > operands , <nl> ArrayRef < Type > resultTypes , <nl> ArrayRef < NamedAttribute > attributes ) { <nl> - OperationState state ( ScopedContext : : getContext ( ) , <nl> - ScopedContext : : getLocation ( ) , name ) ; <nl> + OperationState state ( ScopedContext : : getLocation ( ) , name ) ; <nl> SmallVector < Value * , 4 > ops ( operands . begin ( ) , operands . end ( ) ) ; <nl> state . addOperands ( ops ) ; <nl> state . addTypes ( resultTypes ) ; <nl> mmm a / lib / IR / OperationSupport . cpp <nl> ppp b / lib / IR / OperationSupport . cpp <nl> using namespace mlir ; <nl> / / OperationState <nl> / / = = = mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - = = = / / <nl> <nl> - OperationState : : OperationState ( MLIRContext * context , Location location , <nl> - StringRef name ) <nl> - : context ( context ) , location ( location ) , name ( name , context ) { } <nl> + OperationState : : OperationState ( Location location , StringRef name ) <nl> + : context ( location - > getContext ( ) ) , location ( location ) , <nl> + name ( name , location - > getContext ( ) ) { } <nl> <nl> - OperationState : : OperationState ( MLIRContext * context , Location location , <nl> - OperationName name ) <nl> - : context ( context ) , location ( location ) , name ( name ) { } <nl> + OperationState : : OperationState ( Location location , OperationName name ) <nl> + : context ( location - > getContext ( ) ) , location ( location ) , name ( name ) { } <nl> <nl> - OperationState : : OperationState ( MLIRContext * context , Location location , <nl> - StringRef name , ArrayRef < Value * > operands , <nl> - ArrayRef < Type > types , <nl> + OperationState : : OperationState ( Location location , StringRef name , <nl> + ArrayRef < Value * > operands , ArrayRef < Type > types , <nl> ArrayRef < NamedAttribute > attributes , <nl> ArrayRef < Block * > successors , <nl> MutableArrayRef < std : : unique_ptr < Region > > regions , <nl> bool resizableOperandList ) <nl> - : context ( context ) , location ( location ) , name ( name , context ) , <nl> + : context ( location - > getContext ( ) ) , location ( location ) , <nl> + name ( name , location - > getContext ( ) ) , <nl> operands ( operands . begin ( ) , operands . end ( ) ) , <nl> types ( types . begin ( ) , types . end ( ) ) , <nl> attributes ( attributes . begin ( ) , attributes . end ( ) ) , <nl> mmm a / lib / Parser / Parser . cpp <nl> ppp b / lib / Parser / Parser . cpp <nl> Operation * OperationParser : : parseGenericOperation ( ) { <nl> <nl> consumeToken ( Token : : string ) ; <nl> <nl> - OperationState result ( builder . getContext ( ) , srcLocation , name ) ; <nl> + OperationState result ( srcLocation , name ) ; <nl> <nl> / / Generic operations have a resizable operation list . <nl> result . setOperandListToResizable ( ) ; <nl> Operation * OperationParser : : parseCustomOperation ( ) { <nl> auto srcLocation = getEncodedSourceLocation ( opLoc ) ; <nl> <nl> / / Have the op implementation take a crack and parsing this . <nl> - OperationState opState ( builder . getContext ( ) , srcLocation , opDefinition - > name ) ; <nl> + OperationState opState ( srcLocation , opDefinition - > name ) ; <nl> CleanupOpStateRegions guard { opState } ; <nl> if ( opAsmParser . parseOperation ( opDefinition , & opState ) ) <nl> return nullptr ; <nl> mmm a / lib / SPIRV / Serialization / ConvertFromBinary . cpp <nl> ppp b / lib / SPIRV / Serialization / ConvertFromBinary . cpp <nl> Block * createOneBlockFunction ( Builder builder , Module * module ) { <nl> auto * block = new Block ( ) ; <nl> fn - > push_back ( block ) ; <nl> <nl> - OperationState state ( builder . getContext ( ) , builder . getUnknownLoc ( ) , <nl> - ReturnOp : : getOperationName ( ) ) ; <nl> + OperationState state ( builder . getUnknownLoc ( ) , ReturnOp : : getOperationName ( ) ) ; <nl> ReturnOp : : build ( & builder , & state ) ; <nl> block - > push_back ( Operation : : create ( state ) ) ; <nl> <nl> mmm a / lib / SPIRV / Serialization / Deserializer . cpp <nl> ppp b / lib / SPIRV / Serialization / Deserializer . cpp <nl> LogicalResult Deserializer : : processMemoryModel ( ArrayRef < uint32_t > operands ) { <nl> <nl> spirv : : ModuleOp Deserializer : : createModuleOp ( ) { <nl> Builder builder ( context ) ; <nl> - OperationState state ( context , unknownLoc , <nl> - spirv : : ModuleOp : : getOperationName ( ) ) ; <nl> + OperationState state ( unknownLoc , spirv : : ModuleOp : : getOperationName ( ) ) ; <nl> / / TODO ( antiagainst ) : use target environment to select the version <nl> state . addAttribute ( " major_version " , builder . getI32IntegerAttr ( 1 ) ) ; <nl> state . addAttribute ( " minor_version " , builder . getI32IntegerAttr ( 0 ) ) ; <nl> mmm a / lib / Transforms / MaterializeVectors . cpp <nl> ppp b / lib / Transforms / MaterializeVectors . cpp <nl> static Operation * instantiate ( OpBuilder b , Operation * opInst , <nl> <nl> auto attrs = materializeAttributes ( opInst , hwVectorType ) ; <nl> <nl> - OperationState state ( b . getContext ( ) , opInst - > getLoc ( ) , <nl> - opInst - > getName ( ) . getStringRef ( ) , operands , <nl> - { hwVectorType } , attrs ) ; <nl> + OperationState state ( opInst - > getLoc ( ) , opInst - > getName ( ) . getStringRef ( ) , <nl> + operands , { hwVectorType } , attrs ) ; <nl> return b . createOperation ( state ) ; <nl> } <nl> <nl> mmm a / lib / Transforms / Utils / Utils . cpp <nl> ppp b / lib / Transforms / Utils / Utils . cpp <nl> bool mlir : : replaceAllMemRefUsesWith ( Value * oldMemRef , Value * newMemRef , <nl> unsigned memRefOperandPos = getMemRefOperandPos ( ) ; <nl> <nl> / / Construct the new operation using this memref . <nl> - OperationState state ( opInst - > getContext ( ) , opInst - > getLoc ( ) , <nl> - opInst - > getName ( ) ) ; <nl> + OperationState state ( opInst - > getLoc ( ) , opInst - > getName ( ) ) ; <nl> state . setOperandListToResizable ( opInst - > hasResizableOperandsList ( ) ) ; <nl> state . operands . reserve ( opInst - > getNumOperands ( ) + extraIndices . size ( ) ) ; <nl> / / Insert the non - memref operands . <nl> mmm a / lib / Transforms / Vectorize . cpp <nl> ppp b / lib / Transforms / Vectorize . cpp <nl> static Value * vectorizeConstant ( Operation * op , ConstantOp constant , Type type ) { <nl> auto attr = DenseElementsAttr : : get ( vectorType , constant . getValue ( ) ) ; <nl> auto * constantOpInst = constant . getOperation ( ) ; <nl> <nl> - OperationState state ( b . getContext ( ) , loc , <nl> - constantOpInst - > getName ( ) . getStringRef ( ) , { } , <nl> + OperationState state ( loc , constantOpInst - > getName ( ) . getStringRef ( ) , { } , <nl> { vectorType } , { b . getNamedAttr ( " value " , attr ) } ) ; <nl> <nl> return b . createOperation ( state ) - > getResult ( 0 ) ; <nl> static Operation * vectorizeOneOperation ( Operation * opInst , <nl> / / TODO ( ntv ) : Is it worth considering an Operation . clone operation which <nl> / / changes the type so we can promote an Operation with less boilerplate ? <nl> OpBuilder b ( opInst ) ; <nl> - OperationState newOp ( b . getContext ( ) , opInst - > getLoc ( ) , <nl> - opInst - > getName ( ) . getStringRef ( ) , vectorOperands , <nl> - vectorTypes , opInst - > getAttrs ( ) , / * successors = * / { } , <nl> + OperationState newOp ( opInst - > getLoc ( ) , opInst - > getName ( ) . getStringRef ( ) , <nl> + vectorOperands , vectorTypes , opInst - > getAttrs ( ) , <nl> + / * successors = * / { } , <nl> / * regions = * / { } , opInst - > hasResizableOperandsList ( ) ) ; <nl> return b . createOperation ( newOp ) ; <nl> } <nl>
NFC : Remove the ' context ' parameter from OperationState .
tensorflow/tensorflow
e0b025d5587a1d844e264805a1bf386a0e367581
2019-06-22T20:05:10Z
mmm a / CMakeLists . txt <nl> ppp b / CMakeLists . txt <nl> option ( SWIFT_STDLIB_ENABLE_RESILIENCE <nl> <nl> option ( SWIFT_RUNTIME_ENABLE_COW_EXISTENTIALS <nl> " Build the runtime with a copy - on - write implementation for opaque existentials " <nl> - FALSE ) <nl> + TRUE ) <nl> <nl> option ( SWIFT_STDLIB_USE_NONATOMIC_RC <nl> " Build the standard libraries and overlays with nonatomic reference count operations enabled " <nl> mmm a / lib / IRGen / GenExistential . cpp <nl> ppp b / lib / IRGen / GenExistential . cpp <nl> static llvm : : Constant * getDeallocateBoxedOpaqueExistentialBufferFunction ( <nl> llvm : : Value * pointerAlignMask = llvm : : ConstantInt : : get ( <nl> IGF . IGM . SizeTy , IGF . IGM . getPointerAlignment ( ) . getValue ( ) - 1 ) ; <nl> alignmentMask = Builder . CreateOr ( alignmentMask , pointerAlignMask ) ; <nl> - emitDeallocateHeapObject ( IGF , boxReference , size , alignmentMask ) ; <nl> + IGF . emitDeallocRawCall ( <nl> + Builder . CreateBitCast ( boxReference , IGF . IGM . Int8PtrTy ) , size , <nl> + alignmentMask ) ; <nl> / / We are done . Return . <nl> Builder . CreateRetVoid ( ) ; <nl> } , true / * noinline * / ) ; <nl> mmm a / test / IRGen / alloc . sil <nl> ppp b / test / IRGen / alloc . sil <nl> struct Huge { <nl> } <nl> <nl> / / CHECK : @ _swift_slowAlloc = external global i8 * ( [ [ SIZE_T : i ( 32 | 64 ) ] ] , <nl> - / / CHECK : define linkonce_odr hidden i8 * @ swift_rt_swift_slowAlloc ( [ [ SIZE_T : i ( 32 | 64 ) ] ] , <nl> <nl> / / CHECK : define linkonce_odr hidden void @ _T04main4HugeVwde ( <nl> / / CHECK : [ [ T0 : % . * ] ] = bitcast [ [ BUFFER : . [ 0 - 9 ] + x i8 . ] ] * { { % . * } } to i8 * * <nl> struct Huge { <nl> / / CHECK - NEXT : call void @ swift_rt_swift_slowDealloc ( i8 * [ [ T1 ] ] , [ [ SIZE_T ] ] 4097 , [ [ SIZE_T ] ] 7 ) <nl> / / CHECK - NEXT : ret void <nl> <nl> - / / CHECK : define linkonce_odr hidden [ [ OPAQUE : % swift . opaque ] ] * @ _T04main4HugeVwal ( <nl> - / / CHECK : [ [ T0 : % . * ] ] = call noalias i8 * @ swift_rt_swift_slowAlloc ( [ [ SIZE_T ] ] 4097 , [ [ SIZE_T ] ] 7 ) <nl> - / / CHECK - NEXT : [ [ T1 : % . * ] ] = bitcast [ [ BUFFER ] ] * { { % . * } } to i8 * * <nl> - / / CHECK - NEXT : store i8 * [ [ T0 ] ] , i8 * * [ [ T1 ] ] <nl> + <nl> + / / CHECK : define linkonce_odr hidden [ [ OPAQUE : % swift . opaque ] ] * @ _T04main4HugeVwal ( { { . * } } * [ [ BUFFER : % . * ] ] , % swift . type <nl> + / / CHECK : [ [ BOXPAIR : % . * ] ] = call { % swift . refcounted * , % swift . opaque * } @ swift_allocBox ( { { . * } } @ _T04main4HugeVMf <nl> + / / CHECK : [ [ REF : % . * ] ] = extractvalue { % swift . refcounted * , % swift . opaque * } % 0 , 0 <nl> + / / CHECK : [ [ BUFFER_ADDR : % . * ] ] = bitcast { { . * } } * [ [ BUFFER ] ] to % swift . refcounted * * <nl> + / / CHECK : store % swift . refcounted * [ [ REF ] ] , % swift . refcounted * * [ [ BUFFER_ADDR ] ] <nl> mmm a / test / IRGen / existentials_objc . sil <nl> ppp b / test / IRGen / existentials_objc . sil <nl> bb0 ( % 0 : $ * Any , % 1 : $ T ) : <nl> / / CHECK : [ [ T0 : % . * ] ] = getelementptr inbounds [ [ ANY ] ] , [ [ ANY ] ] * % 0 , i32 0 , i32 1 <nl> / / CHECK - NEXT : store [ [ TYPE ] ] * % T , [ [ TYPE ] ] * * [ [ T0 ] ] , align 8 <nl> / / CHECK - NEXT : [ [ T0 : % . * ] ] = getelementptr inbounds [ [ ANY ] ] , [ [ ANY ] ] * % 0 , i32 0 , i32 0 <nl> + / / CHECK - NEXT : [ [ T0 : % . * ] ] = getelementptr inbounds [ [ ANY ] ] , [ [ ANY ] ] * % 0 , i32 0 , i32 0 <nl> / / CHECK - NEXT : [ [ T1 : % . * ] ] = bitcast [ 24 x i8 ] * [ [ T0 ] ] to [ [ GIZMO ] ] * * <nl> / / CHECK - NEXT : store [ [ GIZMO ] ] * % 1 , [ [ GIZMO ] ] * * [ [ T1 ] ] , align 8 <nl> / / CHECK - NEXT : ret void <nl> bb0 ( % 0 : $ * Any , % 1 : $ * Any ) : <nl> / / CHECK - NEXT : [ [ TYPE : % . * ] ] = load % swift . type * , % swift . type * * [ [ T0 ] ] , align 8 <nl> / / CHECK - NEXT : [ [ T0 : % . * ] ] = getelementptr inbounds [ [ ANY ] ] , [ [ ANY ] ] * [ [ DEST : % 0 ] ] , i32 0 , i32 1 <nl> / / CHECK - NEXT : store % swift . type * [ [ TYPE ] ] , % swift . type * * [ [ T0 ] ] , align 8 <nl> - / / CHECK - NEXT : [ [ SRC_BUF : % . * ] ] = getelementptr inbounds [ [ ANY ] ] , [ [ ANY ] ] * [ [ SRC ] ] , i32 0 , i32 0 <nl> - / / CHECK - NEXT : [ [ DEST_BUF : % . * ] ] = getelementptr inbounds [ [ ANY ] ] , [ [ ANY ] ] * [ [ DEST ] ] , i32 0 , i32 0 <nl> - / / CHECK - NEXT : [ [ T0 : % . * ] ] = bitcast % swift . type * [ [ TYPE ] ] to i8 * * * <nl> - / / CHECK - NEXT : [ [ T1 : % . * ] ] = getelementptr inbounds i8 * * , i8 * * * [ [ T0 ] ] , i64 - 1 <nl> - / / CHECK - NEXT : [ [ VWTABLE : % . * ] ] = load i8 * * , i8 * * * [ [ T1 ] ] , align 8 <nl> - / / CHECK - NEXT : [ [ T0 : % . * ] ] = getelementptr inbounds i8 * , i8 * * [ [ VWTABLE ] ] , i32 12 <nl> - / / CHECK - NEXT : [ [ T1 : % . * ] ] = load i8 * , i8 * * [ [ T0 ] ] , align 8 <nl> - / / CHECK - NEXT : [ [ INIT : % . * ] ] = bitcast i8 * [ [ T1 ] ] to % swift . opaque * ( [ 24 x i8 ] * , [ 24 x i8 ] * , % swift . type * ) * <nl> - / / CHECK - NEXT : call % swift . opaque * [ [ INIT ] ] ( [ 24 x i8 ] * noalias [ [ DEST_BUF ] ] , [ 24 x i8 ] * [ [ SRC_BUF ] ] , % swift . type * [ [ TYPE ] ] ) <nl> + / / CHECK - NEXT : [ [ FROM_BUFFER_ADDR : % . * ] ] = getelementptr inbounds % Any , % Any * % 1 , i32 0 , i32 0 <nl> + / / CHECK - NEXT : [ [ TO_BUFFER_ADDR : % . * ] ] = getelementptr inbounds % Any , % Any * % 0 , i32 0 , i32 0 <nl> + / / CHECK - NEXT : [ [ CAST : % . * ] ] = bitcast % swift . type * [ [ TYPE ] ] to i8 * * * <nl> + / / CHECK - NEXT : [ [ VWT_ADDR : % . * ] ] = getelementptr inbounds i8 * * , i8 * * * [ [ CAST ] ] , i64 - 1 <nl> + / / CHECK - NEXT : [ [ VWT : % . * ] ] = load i8 * * , i8 * * * [ [ VWT_ADDR ] ] <nl> + / / CHECK - NEXT : [ [ VW_ADDR : % . * ] ] = getelementptr inbounds i8 * , i8 * * [ [ VWT ] ] , i32 12 <nl> + / / CHECK - NEXT : [ [ VW : % . * ] ] = load i8 * , i8 * * [ [ VW_ADDR ] ] <nl> + / / CHECK - NEXT : [ [ INITWITHTAKEBUFFER : % . * ] ] = bitcast i8 * [ [ VW ] ] <nl> + / / CHECK - NEXT : call % swift . opaque * [ [ INITWITHTAKEBUFFER ] ] ( { { . * } } [ [ TO_BUFFER_ADDR ] ] , { { . * } } [ [ FROM_BUFFER_ADDR ] ] , % swift . type * [ [ TYPE ] ] ) <nl> / / CHECK - NEXT : ret void <nl> <nl> / / rdar : / / problem / 19035529 <nl> mmm a / test / IRGen / existentials_opaque_boxed . sil <nl> ppp b / test / IRGen / existentials_opaque_boxed . sil <nl> entry : <nl> / / CHECK : [ [ ALIGNEDSTART : % . * ] ] = and { { ( i64 | i32 ) } } [ [ HEADERSIZEPLUSALIGN ] ] , [ [ NOTALIGNMASK ] ] <nl> / / CHECK : [ [ HEAPSIZE : % . * ] ] = add { { ( i64 | i32 ) } } [ [ ALIGNEDSTART ] ] , [ [ SIZE ] ] <nl> / / CHECK : [ [ ALIGNMASK_ATLEASTPOINTER : % . * ] ] = or { { ( i64 | i32 ) } } [ [ ALIGNMASK ] ] , { { ( 7 | 3 ) } } <nl> - / / CHECK : call void @ swift_rt_swift_deallocObject ( % swift . refcounted * % 10 , { { ( i64 | i32 ) } } [ [ HEAPSIZE ] ] , { { ( i64 | i32 ) } } [ [ ALIGNMASK_ATLEASTPOINTER ] ] ) <nl> + / / CHECK : [ [ PTR : % . * ] ] = bitcast % swift . refcounted * [ [ REFERENCE ] ] to i8 * <nl> + / / CHECK : call void @ swift_rt_swift_slowDealloc ( i8 * [ [ PTR ] ] , { { ( i64 | i32 ) } } [ [ HEAPSIZE ] ] , { { ( i64 | i32 ) } } [ [ ALIGNMASK_ATLEASTPOINTER ] ] ) <nl> / / CHECK : ret void <nl> <nl> / / CHECK - LABEL : define { { . * } } @ test_open_existential_addr_immutable ( % T25existentials_opaque_boxed11ExistentialP * <nl> mmm a / test / IRGen / fixed_size_buffer_peepholes . sil <nl> ppp b / test / IRGen / fixed_size_buffer_peepholes . sil <nl> import Builtin <nl> <nl> protocol P { } <nl> <nl> - / / CHECK - LABEL : define { { ( protected ) ? } } swiftcc void @ join_init_existential_copy_addr ( % T27fixed_size_buffer_peepholes1PP * noalias nocapture sret , % swift . opaque * noalias nocapture , % swift . type * % T , i8 * * % T . P ) <nl> - / / CHECK : [ [ BUFFER : % . * ] ] = getelementptr inbounds % T27fixed_size_buffer_peepholes1PP , % T27fixed_size_buffer_peepholes1PP * % 0 , i32 0 , i32 0 <nl> - / / CHECK : call % swift . opaque * % initializeBufferWithTake ( [ [ BUFFER_TYPE : \ [ . * x i8 \ ] ] ] * [ [ BUFFER ] ] , % swift . opaque * % 1 <nl> - sil @ join_init_existential_copy_addr : $ @ convention ( thin ) < T : P > ( @ in T ) - > @ out P { <nl> - entry ( % p : $ * P , % x : $ * T ) : <nl> - % y = init_existential_addr % p : $ * P , $ T <nl> - copy_addr [ take ] % x to [ initialization ] % y : $ * T <nl> - return undef : $ ( ) <nl> - } <nl> - <nl> / / CHECK - LABEL : define { { ( protected ) ? } } swiftcc void @ dont_crash ( <nl> / / CHECK : [ [ TYPE_ADDR : % . * ] ] = getelementptr inbounds % T27fixed_size_buffer_peepholes1PP , % T27fixed_size_buffer_peepholes1PP * % 0 , i32 0 , i32 1 <nl> / / CHECK : [ [ TYPE : % . * ] ] = load % swift . type * , % swift . type * * [ [ TYPE_ADDR ] ] <nl> + / / CHECK : call { { . * } } @ __swift_project_boxed_opaque_existential_1 <nl> / / CHECK : [ [ PTR : % . * ] ] = bitcast % swift . type * [ [ TYPE ] ] to i8 * * * <nl> / / CHECK : [ [ VWT_ADDR : % . * ] ] = getelementptr inbounds i8 * * , i8 * * * [ [ PTR ] ] , { { ( i64 | i32 ) } } - 1 <nl> / / CHECK : [ [ VWT : % . * ] ] = load i8 * * , i8 * * * [ [ VWT_ADDR ] ] <nl> - / / CHECK : [ [ WITNESS_ADDR : % . * ] ] = getelementptr inbounds i8 * , i8 * * [ [ VWT ] ] , { { ( i64 | i32 ) } } 2 <nl> - / / CHECK : [ [ WITNESS : % . * ] ] = load i8 * , i8 * * [ [ WITNESS_ADDR ] ] <nl> - / / CHECK : [ [ PROJECTBUFFER : % . * ] ] = bitcast i8 * [ [ WITNESS ] ] <nl> - / / CHECK : call % swift . opaque * [ [ PROJECTBUFFER ] ] ( <nl> - / / CHECK : [ [ PTR : % . * ] ] = bitcast % swift . type * [ [ TYPE ] ] to i8 * * * <nl> - / / CHECK : [ [ VWT_ADDR : % . * ] ] = getelementptr inbounds i8 * * , i8 * * * [ [ PTR ] ] , { { ( i64 | i32 ) } } - 1 <nl> - / / CHECK : [ [ VWT : % . * ] ] = load i8 * * , i8 * * * [ [ VWT_ADDR ] ] <nl> - / / CHECK : [ [ WITNESS_ADDR : % . * ] ] = getelementptr inbounds i8 * , i8 * * [ [ VWT ] ] , { { ( i64 | i32 ) } } 5 <nl> + / / CHECK : [ [ WITNESS_ADDR : % . * ] ] = getelementptr inbounds i8 * , i8 * * [ [ VWT ] ] , { { ( i64 | i32 ) } } 6 <nl> / / CHECK : [ [ WITNESS : % . * ] ] = load i8 * , i8 * * [ [ WITNESS_ADDR ] ] <nl> - / / CHECK : [ [ INITBUFFERWITHCOPY : % . * ] ] = bitcast i8 * [ [ WITNESS ] ] to % swift . opaque * ( [ { { . * } } x i8 ] * , % swift . opaque * , % swift . type * ) * <nl> - / / CHECK : call % swift . opaque * [ [ INITBUFFERWITHCOPY ] ] ( <nl> + / / CHECK : [ [ INITWITHCOPY : % . * ] ] = bitcast i8 * [ [ WITNESS ] ] to % swift . opaque * ( % swift . opaque * , % swift . opaque * , % swift . type * ) * <nl> + / / CHECK : call % swift . opaque * [ [ INITWITHCOPY ] ] ( <nl> sil @ dont_crash : $ @ convention ( thin ) ( @ in P ) - > ( ) { <nl> entry ( % p : $ * P ) : <nl> % 0 = alloc_stack $ P <nl> mmm a / test / IRGen / generic_metatypes . swift <nl> ppp b / test / IRGen / generic_metatypes . swift <nl> func protocolTypeof ( _ x : Bas ) - > Bas . Type { <nl> / / CHECK : [ [ METADATA_ADDR : % . * ] ] = getelementptr inbounds % T17generic_metatypes3BasP , % T17generic_metatypes3BasP * [ [ X : % . * ] ] , i32 0 , i32 1 <nl> / / CHECK : [ [ METADATA : % . * ] ] = load % swift . type * , % swift . type * * [ [ METADATA_ADDR ] ] <nl> / / CHECK : [ [ BUFFER : % . * ] ] = getelementptr inbounds % T17generic_metatypes3BasP , % T17generic_metatypes3BasP * [ [ X ] ] , i32 0 , i32 0 <nl> - / / CHECK : [ [ METADATA_I8 : % . * ] ] = bitcast % swift . type * [ [ METADATA ] ] to i8 * * * <nl> - / / CHECK - 32 : [ [ VW_ADDR : % . * ] ] = getelementptr inbounds i8 * * , i8 * * * [ [ METADATA_I8 ] ] , i32 - 1 <nl> - / / CHECK - 64 : [ [ VW_ADDR : % . * ] ] = getelementptr inbounds i8 * * , i8 * * * [ [ METADATA_I8 ] ] , i64 - 1 <nl> - / / CHECK : [ [ VW : % . * ] ] = load i8 * * , i8 * * * [ [ VW_ADDR ] ] <nl> - / / CHECK : [ [ PROJECT_ADDR : % . * ] ] = getelementptr inbounds i8 * , i8 * * [ [ VW ] ] , i32 2 <nl> - / / CHECK - 32 : [ [ PROJECT_PTR : % . * ] ] = load i8 * , i8 * * [ [ PROJECT_ADDR ] ] , align 4 <nl> - / / CHECK - 64 : [ [ PROJECT_PTR : % . * ] ] = load i8 * , i8 * * [ [ PROJECT_ADDR ] ] , align 8 <nl> - / / CHECK - 32 : [ [ PROJECT : % . * ] ] = bitcast i8 * [ [ PROJECT_PTR ] ] to % swift . opaque * ( [ 12 x i8 ] * , % swift . type * ) * <nl> - / / CHECK - 64 : [ [ PROJECT : % . * ] ] = bitcast i8 * [ [ PROJECT_PTR ] ] to % swift . opaque * ( [ 24 x i8 ] * , % swift . type * ) * <nl> - / / CHECK - 32 : [ [ PROJECTION : % . * ] ] = call % swift . opaque * [ [ PROJECT ] ] ( [ 12 x i8 ] * [ [ BUFFER ] ] , % swift . type * [ [ METADATA ] ] ) <nl> - / / CHECK - 64 : [ [ PROJECTION : % . * ] ] = call % swift . opaque * [ [ PROJECT ] ] ( [ 24 x i8 ] * [ [ BUFFER ] ] , % swift . type * [ [ METADATA ] ] ) <nl> - / / CHECK : [ [ METATYPE : % . * ] ] = call % swift . type * @ swift_getDynamicType ( % swift . opaque * [ [ PROJECTION ] ] , % swift . type * [ [ METADATA ] ] , i1 true ) <nl> - / / CHECK : [ [ T0 : % . * ] ] = getelementptr inbounds % T17generic_metatypes3BasP , % T17generic_metatypes3BasP * [ [ X ] ] , i32 0 , i32 2 <nl> - / / CHECK - 32 : [ [ WTABLE : % . * ] ] = load i8 * * , i8 * * * [ [ T0 ] ] , align 4 <nl> - / / CHECK - 64 : [ [ WTABLE : % . * ] ] = load i8 * * , i8 * * * [ [ T0 ] ] , align 8 <nl> + / / CHECK : [ [ VALUE_ADDR : % . * ] ] = call % swift . opaque * @ __swift_project_boxed_opaque_existential_1 ( { { . * } } [ [ BUFFER ] ] , % swift . type * [ [ METADATA ] ] ) <nl> + / / CHECK : [ [ METATYPE : % . * ] ] = call % swift . type * @ swift_getDynamicType ( % swift . opaque * [ [ VALUE_ADDR ] ] , % swift . type * [ [ METADATA ] ] , i1 true ) <nl> + / / CHECK : [ [ WTABLE_ADDR : % . * ] ] = getelementptr inbounds % T17generic_metatypes3BasP , % T17generic_metatypes3BasP * % 0 , i32 0 , i32 2 <nl> + / / CHECK : [ [ WTABLE : % . * ] ] = load i8 * * , i8 * * * [ [ WTABLE_ADDR ] ] <nl> + / / CHECK : call void @ __swift_destroy_boxed_opaque_existential_1 ( % T17generic_metatypes3BasP * % 0 ) <nl> / / CHECK : [ [ T0 : % . * ] ] = insertvalue { % swift . type * , i8 * * } undef , % swift . type * [ [ METATYPE ] ] , 0 <nl> / / CHECK : [ [ T1 : % . * ] ] = insertvalue { % swift . type * , i8 * * } [ [ T0 ] ] , i8 * * [ [ WTABLE ] ] , 1 <nl> / / CHECK : ret { % swift . type * , i8 * * } [ [ T1 ] ] <nl> mmm a / test / IRGen / generic_metatypes_arm . swift <nl> ppp b / test / IRGen / generic_metatypes_arm . swift <nl> func protocolTypeof ( _ x : Bas ) - > Bas . Type { <nl> / / CHECK : [ [ METADATA_ADDR : % . * ] ] = getelementptr inbounds % T17generic_metatypes3BasP , % T17generic_metatypes3BasP * [ [ X : % . * ] ] , i32 0 , i32 1 <nl> / / CHECK : [ [ METADATA : % . * ] ] = load % swift . type * , % swift . type * * [ [ METADATA_ADDR ] ] <nl> / / CHECK : [ [ BUFFER : % . * ] ] = getelementptr inbounds % T17generic_metatypes3BasP , % T17generic_metatypes3BasP * [ [ X ] ] , i32 0 , i32 0 <nl> - / / CHECK : [ [ METADATA_I8 : % . * ] ] = bitcast % swift . type * [ [ METADATA ] ] to i8 * * * <nl> - / / CHECK - 32 : [ [ VW_ADDR : % . * ] ] = getelementptr inbounds i8 * * , i8 * * * [ [ METADATA_I8 ] ] , i32 - 1 <nl> - / / CHECK - 64 : [ [ VW_ADDR : % . * ] ] = getelementptr inbounds i8 * * , i8 * * * [ [ METADATA_I8 ] ] , i64 - 1 <nl> - / / CHECK : [ [ VW : % . * ] ] = load i8 * * , i8 * * * [ [ VW_ADDR ] ] <nl> - / / CHECK : [ [ PROJECT_ADDR : % . * ] ] = getelementptr inbounds i8 * , i8 * * [ [ VW ] ] , i32 2 <nl> - / / CHECK - 32 : [ [ PROJECT_PTR : % . * ] ] = load i8 * , i8 * * [ [ PROJECT_ADDR ] ] , align 4 <nl> - / / CHECK - 64 : [ [ PROJECT_PTR : % . * ] ] = load i8 * , i8 * * [ [ PROJECT_ADDR ] ] , align 8 <nl> - / / CHECK - 32 : [ [ PROJECT : % . * ] ] = bitcast i8 * [ [ PROJECT_PTR ] ] to % swift . opaque * ( [ 12 x i8 ] * , % swift . type * ) * <nl> - / / CHECK - 64 : [ [ PROJECT : % . * ] ] = bitcast i8 * [ [ PROJECT_PTR ] ] to % swift . opaque * ( [ 24 x i8 ] * , % swift . type * ) * <nl> - / / CHECK - 32 : [ [ PROJECTION : % . * ] ] = call % swift . opaque * [ [ PROJECT ] ] ( [ 12 x i8 ] * [ [ BUFFER ] ] , % swift . type * [ [ METADATA ] ] ) <nl> - / / CHECK - 64 : [ [ PROJECTION : % . * ] ] = call % swift . opaque * [ [ PROJECT ] ] ( [ 24 x i8 ] * [ [ BUFFER ] ] , % swift . type * [ [ METADATA ] ] ) <nl> - / / CHECK : [ [ METATYPE : % . * ] ] = call % swift . type * @ swift_getDynamicType ( % swift . opaque * [ [ PROJECTION ] ] , % swift . type * [ [ METADATA ] ] , i1 true ) <nl> - / / CHECK : [ [ T0 : % . * ] ] = getelementptr inbounds % T17generic_metatypes3BasP , % T17generic_metatypes3BasP * [ [ X ] ] , i32 0 , i32 2 <nl> - / / CHECK - 32 : [ [ WTABLE : % . * ] ] = load i8 * * , i8 * * * [ [ T0 ] ] , align 4 <nl> - / / CHECK - 64 : [ [ WTABLE : % . * ] ] = load i8 * * , i8 * * * [ [ T0 ] ] , align 8 <nl> + / / CHECK : [ [ VALUE_ADDR : % . * ] ] = call % swift . opaque * @ __swift_project_boxed_opaque_existential_1 ( { { . * } } [ [ BUFFER ] ] , % swift . type * [ [ METADATA ] ] ) <nl> + / / CHECK : [ [ METATYPE : % . * ] ] = call % swift . type * @ swift_getDynamicType ( % swift . opaque * [ [ VALUE_ADDR ] ] , % swift . type * [ [ METADATA ] ] , i1 true ) <nl> + / / CHECK : [ [ WTABLE_ADDR : % . * ] ] = getelementptr inbounds % T17generic_metatypes3BasP , % T17generic_metatypes3BasP * % 0 , i32 0 , i32 2 <nl> + / / CHECK : [ [ WTABLE : % . * ] ] = load i8 * * , i8 * * * [ [ WTABLE_ADDR ] ] <nl> + / / CHECK : call void @ __swift_destroy_boxed_opaque_existential_1 ( % T17generic_metatypes3BasP * % 0 ) <nl> / / CHECK : [ [ T0 : % . * ] ] = insertvalue { % swift . type * , i8 * * } undef , % swift . type * [ [ METATYPE ] ] , 0 <nl> / / CHECK : [ [ T1 : % . * ] ] = insertvalue { % swift . type * , i8 * * } [ [ T0 ] ] , i8 * * [ [ WTABLE ] ] , 1 <nl> / / CHECK : ret { % swift . type * , i8 * * } [ [ T1 ] ] <nl> mmm a / test / IRGen / global_resilience . sil <nl> ppp b / test / IRGen / global_resilience . sil <nl> bb0 : <nl> return % tuple : $ ( ) <nl> } <nl> <nl> + / / CHECK - LABEL : define { { . * } } @ testOtherGlobal <nl> sil @ testOtherGlobal : $ @ convention ( thin ) ( ) - > ( ) { <nl> bb0 : <nl> / / CHECK : [ [ METADATA : % . * ] ] = call % swift . type * @ _T016resilient_struct4SizeVMa ( ) <nl> + / / CHECK : call % swift . opaque * @ __swift_allocate_value_buffer ( % swift . type * % 0 , % swift . opaque * bitcast ( [ { { . * } } x i8 ] * @ otherGlobal to % swift . opaque * ) ) <nl> + alloc_global @ otherGlobal <nl> <nl> - / / CHECK : [ [ METADATA_ADDR : % . * ] ] = bitcast % swift . type * [ [ METADATA ] ] to i8 * * * <nl> - / / CHECK : [ [ VWT_ADDR : % . * ] ] = getelementptr inbounds i8 * * , i8 * * * [ [ METADATA_ADDR ] ] , [ [ INT ] ] - 1 <nl> - / / CHECK : [ [ VWT : % . * ] ] = load i8 * * , i8 * * * [ [ VWT_ADDR ] ] <nl> - / / CHECK : [ [ WITNESS_PTR : % . * ] ] = getelementptr inbounds i8 * , i8 * * [ [ VWT ] ] , i32 11 <nl> - / / CHECK : [ [ WITNESS : % . * ] ] = load i8 * , i8 * * [ [ WITNESS_PTR ] ] <nl> - / / CHECK : [ [ allocateBuffer : % . * ] ] = bitcast i8 * [ [ WITNESS ] ] to % swift . opaque * ( [ [ BUFFER ] ] * , % swift . type * ) * <nl> - / / CHECK : [ [ VALUE : % . * ] ] = call % swift . opaque * [ [ allocateBuffer ] ] ( [ [ BUFFER ] ] * @ otherGlobal , % swift . type * [ [ METADATA ] ] ) <nl> - alloc_global @ otherGlobal <nl> - <nl> - / / CHECK : [ [ WITNESS_PTR : % . * ] ] = getelementptr inbounds i8 * , i8 * * [ [ VWT ] ] , i32 2 <nl> - / / CHECK : [ [ WITNESS : % . * ] ] = load i8 * , i8 * * [ [ WITNESS_PTR ] ] <nl> - / / CHECK : [ [ projectBuffer : % . * ] ] = bitcast i8 * [ [ WITNESS ] ] to % swift . opaque * ( [ [ BUFFER ] ] * , % swift . type * ) * <nl> - / / CHECK : [ [ VALUE : % . * ] ] = call % swift . opaque * [ [ projectBuffer ] ] ( [ [ BUFFER ] ] * @ otherGlobal , % swift . type * [ [ METADATA ] ] ) <nl> + / / CHECK : call % swift . opaque * @ __swift_project_value_buffer ( % swift . type * % 0 , % swift . opaque * bitcast ( [ { { . * } } x i8 ] * @ otherGlobal to % swift . opaque * ) ) <nl> % addr = global_addr @ otherGlobal : $ * Size <nl> <nl> % tuple = tuple ( ) <nl> bb0 : <nl> / / CHECK : ret void <nl> return % tuple : $ ( ) <nl> } <nl> + <nl> + / / CHECK - LABEL : define linkonce_odr hidden % swift . opaque * @ __swift_allocate_value_buffer ( % swift . type * , % swift . opaque * ) <nl> + / / CHECK : entry : <nl> + / / CHECK : [ [ CAST : % . * ] ] = bitcast % swift . type * % 0 to i8 * * * <nl> + / / CHECK : [ [ VWT_ADDR : % . * ] ] = getelementptr inbounds i8 * * , i8 * * * [ [ CAST ] ] , { { . * } } - 1 <nl> + / / CHECK : [ [ VWT : % . * ] ] = load i8 * * , i8 * * * [ [ VWT_ADDR ] ] <nl> + / / CHECK : [ [ FLAGS_ADDR : % . * ] ] = getelementptr inbounds i8 * , i8 * * % . valueWitnesses , i32 18 <nl> + / / CHECK : [ [ FLAGSWITNESS : % . * ] ] = load i8 * , i8 * * [ [ FLAGS_ADDR ] ] <nl> + / / CHECK : [ [ FLAGS : % . * ] ] = ptrtoint i8 * [ [ FLAGSWITNESS ] ] to i { { . * } } <nl> + / / CHECK : [ [ ISNOTINLINE : % . * ] ] = and { { . * } } [ [ FLAGS ] ] , 131072 <nl> + / / CHECK : [ [ ISINLINE : % . * ] ] = icmp eq { { . * } } [ [ ISNOTINLINE ] ] , 0 <nl> + / / CHECK : br i1 [ [ ISINLINE ] ] , label % done , label % outline . allocateValueInBuffer <nl> + / / <nl> + / / CHECK : outline . allocateValueInBuffer : <nl> + / / CHECK : [ [ CAST : % . * ] ] = bitcast % swift . type * % 0 to i8 * * * <nl> + / / CHECK : [ [ VWT_ADDR : % . * ] ] = getelementptr inbounds i8 * * , i8 * * * [ [ CAST ] ] , { { . * } } - 1 <nl> + / / CHECK : [ [ VWT : % . * ] ] = load i8 * * , i8 * * * [ [ VWT_ADDR ] ] <nl> + / / CHECK : [ [ SIZE_ADDR : % . * ] ] = getelementptr inbounds i8 * , i8 * * [ [ VWT ] ] , i32 17 <nl> + / / CHECK : [ [ SIZEWITNESS : % . * ] ] = load i8 * , i8 * * [ [ SIZE_ADDR ] ] <nl> + / / CHECK : [ [ SIZE : % . * ] ] = ptrtoint i8 * [ [ SIZEWITNESS ] ] <nl> + / / CHECK : [ [ ALIGN : % . * ] ] = and { { . * } } [ [ FLAGS ] ] , 65535 <nl> + / / CHECK : [ [ PTR : % . * ] ] = call noalias i8 * @ swift_rt_swift_slowAlloc ( { { . * } } [ [ SIZE ] ] , { { . * } } [ [ ALIGN ] ] ) <nl> + / / CHECK : [ [ ADDR : % . * ] ] = bitcast % swift . opaque * % 1 to i8 * * <nl> + / / CHECK : store i8 * [ [ PTR ] ] , i8 * * [ [ ADDR ] ] <nl> + / / CHECK : [ [ OUTLINEADDR : % . * ] ] = bitcast i8 * [ [ PTR ] ] to % swift . opaque * <nl> + / / CHECK : br label % done <nl> + / / <nl> + / / CHECK : done : <nl> + / / CHECK : [ [ PHI : % . * ] ] = phi % swift . opaque * [ % 1 , % entry ] , [ [ [ OUTLINEADDR ] ] , % outline . allocateValueInBuffer ] <nl> + / / CHECK : ret % swift . opaque * [ [ PHI ] ] <nl> + <nl> + <nl> + / / CHECK - LABEL : define linkonce_odr hidden % swift . opaque * @ __swift_project_value_buffer ( % swift . type * , % swift . opaque * ) <nl> + / / CHECK : [ [ CAST : % . * ] ] = bitcast % swift . type * % 0 to i8 * * * <nl> + / / CHECK : [ [ VWT_ADDR : % . * ] ] = getelementptr inbounds i8 * * , i8 * * * [ [ CAST ] ] , { { . * } } - 1 <nl> + / / CHECK : [ [ VWT : % . * ] ] = load i8 * * , i8 * * * [ [ VWT_ADDR ] ] <nl> + / / CHECK : [ [ FLAGS_ADDR : % . * ] ] = getelementptr inbounds i8 * , i8 * * % . valueWitnesses , i32 18 <nl> + / / CHECK : [ [ FLAGSWITNESS : % . * ] ] = load i8 * , i8 * * [ [ FLAGS_ADDR ] ] <nl> + / / CHECK : [ [ FLAGS : % . * ] ] = ptrtoint i8 * [ [ FLAGSWITNESS ] ] to i { { . * } } <nl> + / / CHECK : [ [ ISNOTINLINE : % . * ] ] = and { { . * } } [ [ FLAGS ] ] , 131072 <nl> + / / CHECK : [ [ ISINLINE : % . * ] ] = icmp eq { { . * } } [ [ ISNOTINLINE ] ] , 0 <nl> + / / CHECK : br i1 [ [ ISINLINE ] ] , label % done , label % outline . projectValueInBuffer <nl> + / / <nl> + / / CHECK : outline . projectValueInBuffer : <nl> + / / CHECK : [ [ CAST : % . * ] ] = bitcast % swift . opaque * % 1 to % swift . opaque * * <nl> + / / CHECK : [ [ PTR_TO_BUFFER : % . * ] ] = load % swift . opaque * , % swift . opaque * * [ [ CAST ] ] <nl> + / / CHECK : br label % done <nl> + / / <nl> + / / CHECK : done : <nl> + / / CHECK : [ [ PHI : % . * ] ] = phi % swift . opaque * [ % 1 , % entry ] , [ [ [ PTR_TO_BUFFER ] ] , % outline . projectValueInBuffer ] <nl> + / / CHECK : ret % swift . opaque * [ [ PHI ] ] <nl> mmm a / test / IRGen / witness_table_multifile . swift <nl> ppp b / test / IRGen / witness_table_multifile . swift <nl> <nl> / / CHECK - LABEL : define hidden swiftcc void @ _T023witness_table_multifile3baryyF <nl> func bar ( ) { <nl> / / CHECK : call swiftcc void @ _T023witness_table_multifile2goAA1P_pyF <nl> - / / CHECK : [ [ BUFFER : % [ 0 - 9 ] + ] ] = call % swift . opaque * % projectBuffer <nl> - / / CHECK - NEXT : [ [ WITNESS_TABLE_ADDR : % [ 0 - 9 ] + ] ] = getelementptr inbounds [ [ P_WITNESS_TABLE ] ] , [ [ P_WITNESS_TABLE ] ] * % 0 , i32 0 , i32 2 <nl> - / / CHECK - NEXT : [ [ WITNESS_TABLE : % [ A - Za - z0 - 9_ - ] + ] ] = load i8 * * , i8 * * * [ [ WITNESS_TABLE_ADDR ] ] <nl> + / / CHECK : [ [ WITNESS_TABLE_ADDR : % [ 0 - 9 ] + ] ] = getelementptr inbounds [ [ P_WITNESS_TABLE ] ] , [ [ P_WITNESS_TABLE ] ] * % 0 , i32 0 , i32 2 <nl> + / / CHECK : [ [ WITNESS_TABLE : % [ A - Za - z0 - 9_ - ] + ] ] = load i8 * * , i8 * * * [ [ WITNESS_TABLE_ADDR ] ] <nl> + / / CHECK : [ [ BUFFER : % [ 0 - 9 ] + ] ] = call % swift . opaque * @ __swift_project_boxed_opaque_existential_1 <nl> / / CHECK - NEXT : getelementptr inbounds i8 * , i8 * * [ [ WITNESS_TABLE ] ] , i32 3 <nl> go ( ) . foo ( ) <nl> } <nl> func bar ( ) { <nl> / / when they ' re only used as types . <nl> func useAProtocol ( ) - > ProtocolOnlyUsedAsAType ? { <nl> return nil <nl> - } <nl> \ No newline at end of file <nl> + } <nl> mmm a / test / SILGen / address_only_types . swift <nl> ppp b / test / SILGen / address_only_types . swift <nl> func address_only_materialize ( ) - > Int { <nl> / / CHECK : [ [ TEMP_PROJ : % [ 0 - 9 ] + ] ] = open_existential_addr immutable_access [ [ TEMP ] ] : $ * Unloadable to $ * [ [ OPENED : @ opened ( . * ) Unloadable ] ] <nl> / / CHECK : [ [ FOO_METHOD : % [ 0 - 9 ] + ] ] = witness_method $ [ [ OPENED ] ] , # Unloadable . foo ! 1 <nl> / / CHECK : [ [ RET : % [ 0 - 9 ] + ] ] = apply [ [ FOO_METHOD ] ] < [ [ OPENED ] ] > ( [ [ TEMP_PROJ ] ] ) <nl> - / / CHECK : destroy_addr [ [ TEMP_PROJ ] ] <nl> + / / CHECK : destroy_addr [ [ TEMP ] ] <nl> / / CHECK : dealloc_stack [ [ TEMP ] ] <nl> / / CHECK : return [ [ RET ] ] <nl> } <nl> mmm a / test / SILGen / existential_erasure . swift <nl> ppp b / test / SILGen / existential_erasure . swift <nl> func throwingFunc ( ) throws - > Bool { return true } <nl> func PQtoP ( ) { <nl> / / CHECK : [ [ PQ_PAYLOAD : % . * ] ] = open_existential_addr immutable_access [ [ PQ : % . * ] ] : $ * P & Q to $ * [ [ OPENED_TYPE : @ opened ( . * ) P & Q ] ] <nl> / / CHECK : [ [ P_PAYLOAD : % . * ] ] = init_existential_addr [ [ P : % . * ] ] : $ * P , $ [ [ OPENED_TYPE ] ] <nl> - / / CHECK : copy_addr [ take ] [ [ PQ_PAYLOAD ] ] to [ initialization ] [ [ P_PAYLOAD ] ] <nl> - / / CHECK : deinit_existential_addr [ [ PQ ] ] <nl> + / / CHECK : copy_addr [ [ PQ_PAYLOAD ] ] to [ initialization ] [ [ P_PAYLOAD ] ] <nl> + / / CHECK : destroy_addr [ [ PQ ] ] <nl> / / CHECK - NOT : destroy_addr [ [ P ] ] <nl> / / CHECK - NOT : destroy_addr [ [ P_PAYLOAD ] ] <nl> - / / CHECK - NOT : destroy_addr [ [ PQ ] ] <nl> + / / CHECK - NOT : deinit_existential_addr [ [ PQ ] ] <nl> / / CHECK - NOT : destroy_addr [ [ PQ_PAYLOAD ] ] <nl> useP ( makePQ ( ) ) <nl> } <nl> mmm a / test / SILGen / function_conversion . swift <nl> ppp b / test / SILGen / function_conversion . swift <nl> func convExistentialTrivial ( _ t2 : @ escaping ( Q ) - > Trivial , t3 : @ escaping ( Q ? ) - <nl> / / CHECK : alloc_stack $ Q <nl> / / CHECK - NEXT : open_existential_addr immutable_access % 1 : $ * P <nl> / / CHECK - NEXT : init_existential_addr % 3 : $ * Q <nl> - / / CHECK - NEXT : copy_addr [ take ] { { . * } } to [ initialization ] { { . * } } <nl> + / / CHECK - NEXT : copy_addr { { . * } } to [ initialization ] { { . * } } <nl> / / CHECK - NEXT : apply <nl> / / CHECK - NEXT : init_existential_addr <nl> / / CHECK - NEXT : store <nl> - / / CHECK : deinit_existential_addr <nl> + / / CHECK : destroy_addr <nl> / / CHECK : return <nl> <nl> / / = = = = Existential metatypes <nl> mmm a / test / SILGen / functions . swift <nl> ppp b / test / SILGen / functions . swift <nl> func calls ( _ i : Int , j : Int , k : Int ) { <nl> / / CHECK : [ [ PMETHOD : % [ 0 - 9 ] + ] ] = witness_method $ [ [ OPENED ] ] , # SomeProtocol . method ! 1 <nl> / / CHECK : [ [ I : % [ 0 - 9 ] + ] ] = load [ trivial ] [ [ IADDR ] ] <nl> / / CHECK : apply [ [ PMETHOD ] ] < [ [ OPENED ] ] > ( [ [ I ] ] , [ [ PVALUE ] ] ) <nl> - / / CHECK : destroy_addr [ [ PVALUE ] ] <nl> - / / CHECK : deinit_existential_addr [ [ TEMP ] ] <nl> + / / CHECK : destroy_addr [ [ TEMP ] ] <nl> / / CHECK : dealloc_stack [ [ TEMP ] ] <nl> p . method ( i ) <nl> <nl> mmm a / test / SILGen / objc_bridging_any . swift <nl> ppp b / test / SILGen / objc_bridging_any . swift <nl> func passingToId < T : CP , U > ( receiver : NSIdLover , <nl> / / CHECK : [ [ COPY : % . * ] ] = alloc_stack $ P <nl> / / CHECK : copy_addr [ [ EXISTENTIAL ] ] to [ initialization ] [ [ COPY ] ] <nl> / / CHECK : [ [ OPENED_COPY : % . * ] ] = open_existential_addr immutable_access [ [ COPY ] ] : $ * P to $ * [ [ OPENED_TYPE : @ opened . * P ] ] , <nl> + / / CHECK : [ [ TMP : % . * ] ] = alloc_stack $ [ [ OPENED_TYPE ] ] <nl> + / / CHECK : copy_addr [ [ OPENED_COPY ] ] to [ initialization ] [ [ TMP ] ] <nl> / / CHECK : / / function_ref _bridgeAnythingToObjectiveC <nl> / / CHECK : [ [ BRIDGE_ANYTHING : % . * ] ] = function_ref <nl> - / / CHECK : [ [ ANYOBJECT : % . * ] ] = apply [ [ BRIDGE_ANYTHING ] ] < [ [ OPENED_TYPE ] ] > ( [ [ OPENED_COPY ] ] ) <nl> - / / CHECK : deinit_existential_addr [ [ COPY ] ] <nl> + / / CHECK : [ [ ANYOBJECT : % . * ] ] = apply [ [ BRIDGE_ANYTHING ] ] < [ [ OPENED_TYPE ] ] > ( [ [ TMP ] ] ) <nl> + / / CHECK : dealloc_stack [ [ TMP ] ] <nl> + / / CHECK : destroy_addr [ [ COPY ] ] <nl> / / CHECK : dealloc_stack [ [ COPY ] ] <nl> / / CHECK : apply [ [ METHOD ] ] ( [ [ ANYOBJECT ] ] , [ [ BORROWED_SELF ] ] ) <nl> / / CHECK : destroy_value [ [ ANYOBJECT ] ] <nl> func passingToId < T : CP , U > ( receiver : NSIdLover , <nl> / / CHECK : [ [ COPY : % . * ] ] = alloc_stack $ Any <nl> / / CHECK : copy_addr [ [ ANY ] ] to [ initialization ] [ [ COPY ] ] <nl> / / CHECK : [ [ OPENED_COPY : % . * ] ] = open_existential_addr immutable_access [ [ COPY ] ] : $ * Any to $ * [ [ OPENED_TYPE : @ opened . * Any ] ] , <nl> + / / CHECK : [ [ TMP : % . * ] ] = alloc_stack $ [ [ OPENED_TYPE ] ] <nl> + / / CHECK : copy_addr [ [ OPENED_COPY ] ] to [ initialization ] [ [ TMP ] ] <nl> / / CHECK : / / function_ref _bridgeAnythingToObjectiveC <nl> / / CHECK : [ [ BRIDGE_ANYTHING : % . * ] ] = function_ref <nl> - / / CHECK : [ [ ANYOBJECT : % . * ] ] = apply [ [ BRIDGE_ANYTHING ] ] < [ [ OPENED_TYPE ] ] > ( [ [ OPENED_COPY ] ] ) <nl> - / / CHECK : deinit_existential_addr [ [ COPY ] ] <nl> + / / CHECK : [ [ ANYOBJECT : % . * ] ] = apply [ [ BRIDGE_ANYTHING ] ] < [ [ OPENED_TYPE ] ] > ( [ [ TMP ] ] ) <nl> + / / CHECK : destroy_addr [ [ COPY ] ] <nl> / / CHECK : dealloc_stack [ [ COPY ] ] <nl> / / CHECK : apply [ [ METHOD ] ] ( [ [ ANYOBJECT ] ] , [ [ BORROWED_SELF ] ] ) <nl> / / CHECK : destroy_value [ [ ANYOBJECT ] ] <nl> func passingToNullableId < T : CP , U > ( receiver : NSIdLover , <nl> / / CHECK - NEXT : [ [ COPY : % . * ] ] = alloc_stack $ P <nl> / / CHECK - NEXT : copy_addr [ [ EXISTENTIAL ] ] to [ initialization ] [ [ COPY ] ] <nl> / / CHECK - NEXT : [ [ OPENED_COPY : % . * ] ] = open_existential_addr immutable_access [ [ COPY ] ] : $ * P to $ * [ [ OPENED_TYPE : @ opened . * P ] ] , <nl> + / / CHECK : [ [ TMP : % . * ] ] = alloc_stack $ [ [ OPENED_TYPE ] ] <nl> + / / CHECK : copy_addr [ [ OPENED_COPY ] ] to [ initialization ] [ [ TMP ] ] <nl> / / CHECK - NEXT : / / function_ref _bridgeAnythingToObjectiveC <nl> / / CHECK - NEXT : [ [ BRIDGE_ANYTHING : % . * ] ] = function_ref <nl> - / / CHECK - NEXT : [ [ ANYOBJECT : % . * ] ] = apply [ [ BRIDGE_ANYTHING ] ] < [ [ OPENED_TYPE ] ] > ( [ [ OPENED_COPY ] ] ) <nl> + / / CHECK - NEXT : [ [ ANYOBJECT : % . * ] ] = apply [ [ BRIDGE_ANYTHING ] ] < [ [ OPENED_TYPE ] ] > ( [ [ TMP ] ] ) <nl> / / CHECK - NEXT : [ [ OPT_ANYOBJECT : % . * ] ] = enum { { . * } } [ [ ANYOBJECT ] ] <nl> - / / CHECK - NEXT : deinit_existential_addr [ [ COPY ] ] <nl> - / / CHECK - NEXT : dealloc_stack [ [ COPY ] ] <nl> + / / CHECK - NEXT : dealloc_stack [ [ TMP ] ] <nl> + / / CHECK - NEXT : destroy_addr [ [ COPY ] ] <nl> + / / CHECK - NEXT : dealloc_stack [ [ COPY ] ] <nl> / / CHECK - NEXT : apply [ [ METHOD ] ] ( [ [ OPT_ANYOBJECT ] ] , [ [ BORROWED_SELF ] ] ) <nl> / / CHECK - NEXT : destroy_value [ [ OPT_ANYOBJECT ] ] <nl> / / CHECK - NEXT : end_borrow [ [ BORROWED_SELF ] ] from [ [ SELF ] ] <nl> func passingToNullableId < T : CP , U > ( receiver : NSIdLover , <nl> / / CHECK - NEXT : [ [ COPY : % . * ] ] = alloc_stack $ Any <nl> / / CHECK - NEXT : copy_addr [ [ ANY ] ] to [ initialization ] [ [ COPY ] ] <nl> / / CHECK - NEXT : [ [ OPENED_COPY : % . * ] ] = open_existential_addr immutable_access [ [ COPY ] ] : $ * Any to $ * [ [ OPENED_TYPE : @ opened . * Any ] ] , <nl> + / / CHECK : [ [ TMP : % . * ] ] = alloc_stack $ [ [ OPENED_TYPE ] ] <nl> + / / CHECK : copy_addr [ [ OPENED_COPY ] ] to [ initialization ] [ [ TMP ] ] <nl> / / CHECK - NEXT : / / function_ref _bridgeAnythingToObjectiveC <nl> / / CHECK - NEXT : [ [ BRIDGE_ANYTHING : % . * ] ] = function_ref <nl> - / / CHECK - NEXT : [ [ ANYOBJECT : % . * ] ] = apply [ [ BRIDGE_ANYTHING ] ] < [ [ OPENED_TYPE ] ] > ( [ [ OPENED_COPY ] ] ) <nl> + / / CHECK - NEXT : [ [ ANYOBJECT : % . * ] ] = apply [ [ BRIDGE_ANYTHING ] ] < [ [ OPENED_TYPE ] ] > ( [ [ TMP ] ] ) <nl> / / CHECK - NEXT : [ [ OPT_ANYOBJECT : % . * ] ] = enum { { . * } } [ [ ANYOBJECT ] ] <nl> - / / CHECK - NEXT : deinit_existential_addr [ [ COPY ] ] <nl> + / / CHECK - NEXT : dealloc_stack [ [ TMP ] ] <nl> + / / CHECK - NEXT : destroy_addr [ [ COPY ] ] <nl> / / CHECK - NEXT : dealloc_stack [ [ COPY ] ] <nl> / / CHECK - NEXT : apply [ [ METHOD ] ] ( [ [ OPT_ANYOBJECT ] ] , [ [ BORROWED_SELF ] ] ) <nl> / / CHECK - NEXT : destroy_value [ [ OPT_ANYOBJECT ] ] <nl> class SwiftIdLover : NSObject , Anyable { <nl> / / CHECK : end_borrow [ [ BORROWED_SELF_COPY ] ] from [ [ SELF_COPY ] ] <nl> / / CHECK : destroy_value [ [ SELF_COPY ] ] <nl> / / CHECK : [ [ OPEN_RESULT : % . * ] ] = open_existential_addr immutable_access [ [ NATIVE_RESULT ] ] <nl> + / / CHECK : [ [ TMP : % . * ] ] = alloc_stack <nl> + / / CHECK : copy_addr [ [ OPEN_RESULT ] ] to [ initialization ] [ [ TMP ] ] <nl> / / CHECK : [ [ BRIDGE_ANYTHING : % . * ] ] = function_ref @ _T0s27_bridgeAnythingToObjectiveC { { . * } } F <nl> - / / CHECK : [ [ OBJC_RESULT : % . * ] ] = apply [ [ BRIDGE_ANYTHING ] ] < { { . * } } > ( [ [ OPEN_RESULT ] ] ) <nl> + / / CHECK : [ [ OBJC_RESULT : % . * ] ] = apply [ [ BRIDGE_ANYTHING ] ] < { { . * } } > ( [ [ TMP ] ] ) <nl> / / CHECK : return [ [ OBJC_RESULT ] ] <nl> / / CHECK : } / / end sil function ' _T017objc_bridging_any12SwiftIdLoverC18methodReturningAnyypyFTo ' <nl> <nl> class SwiftIdLover : NSObject , Anyable { <nl> / / CHECK - LABEL : sil shared [ transparent ] [ reabstraction_thunk ] @ _T0s9AnyObject_pIyBy_ypIxi_TR <nl> / / CHECK : bb0 ( [ [ ANY : % . * ] ] : $ * Any , [ [ BLOCK : % . * ] ] : $ @ convention ( block ) ( AnyObject ) - > ( ) ) : <nl> / / CHECK - NEXT : [ [ OPENED_ANY : % . * ] ] = open_existential_addr immutable_access [ [ ANY ] ] : $ * Any to $ * [ [ OPENED_TYPE : @ opened . * Any ] ] , <nl> + / / CHECK : [ [ TMP : % . * ] ] = alloc_stack <nl> + / / CHECK : copy_addr [ [ OPENED_ANY ] ] to [ initialization ] [ [ TMP ] ] <nl> / / CHECK - NEXT : / / function_ref _bridgeAnythingToObjectiveC <nl> / / CHECK - NEXT : [ [ BRIDGE_ANYTHING : % . * ] ] = function_ref <nl> - / / CHECK - NEXT : [ [ BRIDGED : % . * ] ] = apply [ [ BRIDGE_ANYTHING ] ] < [ [ OPENED_TYPE ] ] > ( [ [ OPENED_ANY ] ] ) <nl> + / / CHECK - NEXT : [ [ BRIDGED : % . * ] ] = apply [ [ BRIDGE_ANYTHING ] ] < [ [ OPENED_TYPE ] ] > ( [ [ TMP ] ] ) <nl> / / CHECK - NEXT : apply [ [ BLOCK ] ] ( [ [ BRIDGED ] ] ) <nl> / / CHECK - NEXT : [ [ VOID : % . * ] ] = tuple ( ) <nl> / / CHECK - NEXT : destroy_value [ [ BLOCK ] ] <nl> / / CHECK - NEXT : destroy_value [ [ BRIDGED ] ] <nl> - / / CHECK - NEXT : deinit_existential_addr [ [ ANY ] ] <nl> + / / CHECK - NEXT : dealloc_stack [ [ TMP ] ] <nl> + / / CHECK - NEXT : destroy_addr [ [ ANY ] ] <nl> / / CHECK - NEXT : return [ [ VOID ] ] <nl> <nl> func methodTakingBlockTakingAny ( _ : ( Any ) - > ( ) ) { } <nl> class SwiftIdLover : NSObject , Anyable { <nl> / / CHECK - NEXT : [ [ RESULT : % . * ] ] = alloc_stack $ Any <nl> / / CHECK - NEXT : apply [ [ FUNCTION ] ] ( [ [ RESULT ] ] ) <nl> / / CHECK - NEXT : [ [ OPENED : % . * ] ] = open_existential_addr immutable_access [ [ RESULT ] ] : $ * Any to $ * [ [ OPENED_TYPE : @ opened . * Any ] ] , <nl> + / / CHECK : [ [ TMP : % . * ] ] = alloc_stack $ [ [ OPENED_TYPE ] ] <nl> + / / CHECK : copy_addr [ [ OPENED ] ] to [ initialization ] [ [ TMP ] ] <nl> / / CHECK - NEXT : / / function_ref _bridgeAnythingToObjectiveC <nl> / / CHECK - NEXT : [ [ BRIDGE_ANYTHING : % . * ] ] = function_ref <nl> - / / CHECK - NEXT : [ [ BRIDGED : % . * ] ] = apply [ [ BRIDGE_ANYTHING ] ] < [ [ OPENED_TYPE ] ] > ( [ [ OPENED ] ] ) <nl> - / / CHECK - NEXT : deinit_existential_addr [ [ RESULT ] ] <nl> + / / CHECK - NEXT : [ [ BRIDGED : % . * ] ] = apply [ [ BRIDGE_ANYTHING ] ] < [ [ OPENED_TYPE ] ] > ( [ [ TMP ] ] ) <nl> + / / CHECK - NEXT : dealloc_stack [ [ TMP ] ] <nl> + / / CHECK - NEXT : destroy_addr [ [ RESULT ] ] <nl> / / CHECK - NEXT : dealloc_stack [ [ RESULT ] ] <nl> / / CHECK - NEXT : return [ [ BRIDGED ] ] <nl> <nl> mmm a / test / SILGen / opaque_values_silgen . swift <nl> ppp b / test / SILGen / opaque_values_silgen . swift <nl> enum AddressOnlyEnum { <nl> / / CHECK - LABEL : sil shared [ transparent ] [ reabstraction_thunk ] @ _T020opaque_values_silgen1P_pAA13TrivialStructVIxid_AA2P2_pAaE_pIxir_TR : $ @ convention ( thin ) ( @ in P2 , @ owned @ callee_owned ( @ in P ) - > TrivialStruct ) - > @ out P2 { <nl> / / CHECK : bb0 ( [ [ ARG0 : % . * ] ] : $ P2 , [ [ ARG1 : % . * ] ] : $ @ callee_owned ( @ in P ) - > TrivialStruct ) : <nl> / / CHECK : [ [ OPENED_ARG : % . * ] ] = open_existential_opaque [ [ ARG0 ] ] : $ P2 to $ @ opened ( { { . * } } ) P2 <nl> - / / CHECK : [ [ INIT_P : % . * ] ] = init_existential_opaque [ [ OPENED_ARG ] ] : $ @ opened ( { { . * } } ) P2 , $ @ opened ( { { . * } } ) P2 , $ P <nl> + / / CHECK : [ [ COPIED_VAL : % . * ] ] = copy_value [ [ OPENED_ARG ] ] <nl> + / / CHECK : [ [ INIT_P : % . * ] ] = init_existential_opaque [ [ COPIED_VAL ] ] : $ @ opened ( { { . * } } ) P2 , $ @ opened ( { { . * } } ) P2 , $ P <nl> / / CHECK : [ [ BORROWED_ARG : % . * ] ] = begin_borrow [ [ INIT_P ] ] <nl> / / CHECK : [ [ APPLY_P : % . * ] ] = apply [ [ ARG1 ] ] ( [ [ BORROWED_ARG ] ] ) : $ @ callee_owned ( @ in P ) - > TrivialStruct <nl> / / CHECK : [ [ RETVAL : % . * ] ] = init_existential_opaque [ [ APPLY_P ] ] : $ TrivialStruct , $ TrivialStruct , $ P2 <nl> / / CHECK : end_borrow [ [ BORROWED_ARG ] ] from [ [ INIT_P ] ] : $ P , $ P <nl> - / / CHECK : destroy_value [ [ OPENED_ARG ] ] <nl> - / / CHECK : deinit_existential_opaque [ [ ARG0 ] ] <nl> + / / CHECK : destroy_value [ [ COPIED_VAL ] ] <nl> + / / CHECK : destroy_value [ [ ARG0 ] ] <nl> / / CHECK : return [ [ RETVAL ] ] : $ P2 <nl> / / CHECK - LABEL : } / / end sil function ' _T020opaque_values_silgen1P_pAA13TrivialStructVIxid_AA2P2_pAaE_pIxir_TR ' <nl> <nl> func s190___return_foo_var ( ) - > Foo { <nl> / / CHECK : [ [ OPEN_VAR : % . * ] ] = open_existential_opaque [ [ LOAD_GLOBAL ] ] : $ Foo <nl> / / CHECK : [ [ WITNESS : % . * ] ] = witness_method $ @ opened <nl> / / CHECK : apply [ [ WITNESS ] ] <nl> - / / CHECK : destroy_value [ [ OPEN_VAR ] ] <nl> - / / CHECK : deinit_existential_opaque [ [ LOAD_GLOBAL ] ] : $ Foo <nl> + / / CHECK : destroy_value [ [ LOAD_GLOBAL ] ] <nl> / / CHECK : return % { { . * } } : $ ( ) <nl> / / CHECK - LABEL : } / / end sil function ' _T020opaque_values_silgen21s200______use_foo_varyyF ' <nl> func s200______use_foo_var ( ) { <nl> mmm a / test / SILGen / protocol_extensions . swift <nl> ppp b / test / SILGen / protocol_extensions . swift <nl> func test_open_existential_semantics_opaque ( _ guaranteed : P1 , <nl> / / CHECK : [ [ METHOD : % . * ] ] = function_ref <nl> / / - - Can consume the value from our own copy <nl> / / CHECK : apply [ [ METHOD ] ] < { { . * } } > ( [ [ VALUE ] ] ) <nl> - / / CHECK : deinit_existential_addr [ [ IMMEDIATE ] ] <nl> + / / CHECK : destroy_addr [ [ IMMEDIATE ] ] <nl> / / CHECK : dealloc_stack [ [ IMMEDIATE ] ] <nl> immediate . f1 ( ) <nl> <nl> func test_open_existential_semantics_opaque ( _ guaranteed : P1 , <nl> / / CHECK : [ [ METHOD : % . * ] ] = function_ref <nl> / / - - Can consume the value from our own copy <nl> / / CHECK : apply [ [ METHOD ] ] < { { . * } } > ( [ [ VALUE ] ] ) <nl> - / / CHECK : deinit_existential_addr [ [ PLUS_ONE ] ] <nl> + / / CHECK : destroy_addr [ [ PLUS_ONE ] ] <nl> / / CHECK : dealloc_stack [ [ PLUS_ONE ] ] <nl> plusOneP1 ( ) . f1 ( ) <nl> } <nl> mmm a / test / SILOptimizer / mandatory_inlining . sil <nl> ppp b / test / SILOptimizer / mandatory_inlining . sil <nl> bb0 ( % 0 : $ * L ) : <nl> % 5 = open_existential_addr immutable_access % 3 : $ * P2 to $ * @ opened ( " 5C6E227C - 235E - 11E6 - AA98 - B8E856428C60 " ) P2 <nl> % 6 = function_ref @ P2_s : $ @ convention ( method ) < τ_0_0 where τ_0_0 : P2 > ( @ in_guaranteed τ_0_0 ) - > Int32 <nl> % 7 = apply % 6 < @ opened ( " 5C6E227C - 235E - 11E6 - AA98 - B8E856428C60 " ) P2 > ( % 5 ) : $ @ convention ( method ) < τ_0_0 where τ_0_0 : P2 > ( @ in_guaranteed τ_0_0 ) - > Int32 <nl> - destroy_addr % 5 : $ * @ opened ( " 5C6E227C - 235E - 11E6 - AA98 - B8E856428C60 " ) P2 <nl> - deinit_existential_addr % 3 : $ * P2 <nl> + destroy_addr % 3 : $ * P2 <nl> dealloc_stack % 3 : $ * P2 <nl> return % 7 : $ Int32 <nl> } <nl> mmm a / test / SILOptimizer / sil_combine . sil <nl> ppp b / test / SILOptimizer / sil_combine . sil <nl> bb0 ( % 0 : $ VV ) : <nl> % 13 = init_existential_addr % 8 : $ * PM , $ @ opened ( " 090C3DB0 - 1C76 - 11E6 - 81C4 - B8E856428C60 " ) PM <nl> % 14 = function_ref @ plus : $ @ convention ( method ) < τ_0_0 where τ_0_0 : PM > ( @ in_guaranteed τ_0_0 ) - > @ out τ_0_0 <nl> % 15 = apply % 14 < @ opened ( " 090C3DB0 - 1C76 - 11E6 - 81C4 - B8E856428C60 " ) PM > ( % 13 , % 12 ) : $ @ convention ( method ) < τ_0_0 where τ_0_0 : PM > ( @ in_guaranteed τ_0_0 ) - > @ out τ_0_0 <nl> - destroy_addr % 12 : $ * @ opened ( " 090C3DB0 - 1C76 - 11E6 - 81C4 - B8E856428C60 " ) PM <nl> - deinit_existential_addr % 10 : $ * PM <nl> + destroy_addr % 10 : $ * PM <nl> dealloc_stack % 10 : $ * PM <nl> % 20 = function_ref @ minus : $ @ convention ( method ) < τ_0_0 where τ_0_0 : PM > ( @ in_guaranteed τ_0_0 ) - > ( ) <nl> % 21 = apply % 20 < @ opened ( " 090C3DB0 - 1C76 - 11E6 - 81C4 - B8E856428C60 " ) PM > ( % 13 ) : $ @ convention ( method ) < τ_0_0 where τ_0_0 : PM > ( @ in_guaranteed τ_0_0 ) - > ( ) <nl> mmm a / test / sil - llvm - gen / alloc . sil <nl> ppp b / test / sil - llvm - gen / alloc . sil <nl> struct Huge { <nl> } <nl> <nl> / / CHECK : @ _swift_slowAlloc = external global i8 * ( [ [ SIZE_T : i ( 32 | 64 ) ] ] , <nl> - / / CHECK : define linkonce_odr hidden i8 * @ swift_rt_swift_slowAlloc ( [ [ SIZE_T : i ( 32 | 64 ) ] ] , <nl> <nl> / / CHECK : define linkonce_odr hidden void @ _T04main4HugeVwde ( <nl> / / CHECK : [ [ T0 : % . * ] ] = bitcast [ [ BUFFER : . [ 0 - 9 ] + x i8 . ] ] * { { % . * } } to i8 * * <nl> struct Huge { <nl> / / CHECK - NEXT : call void @ swift_rt_swift_slowDealloc ( i8 * [ [ T1 ] ] , [ [ SIZE_T ] ] 4097 , [ [ SIZE_T ] ] 7 ) <nl> / / CHECK - NEXT : ret void <nl> <nl> - / / CHECK : define linkonce_odr hidden [ [ OPAQUE : % swift . opaque ] ] * @ _T04main4HugeVwal ( <nl> - / / CHECK : [ [ T0 : % . * ] ] = call noalias i8 * @ swift_rt_swift_slowAlloc ( [ [ SIZE_T ] ] 4097 , [ [ SIZE_T ] ] 7 ) <nl> - / / CHECK - NEXT : [ [ T1 : % . * ] ] = bitcast [ [ BUFFER ] ] * { { % . * } } to i8 * * <nl> - / / CHECK - NEXT : store i8 * [ [ T0 ] ] , i8 * * [ [ T1 ] ] <nl> + <nl> + / / CHECK : define linkonce_odr hidden [ [ OPAQUE : % swift . opaque ] ] * @ _T04main4HugeVwal ( { { . * } } * [ [ BUFFER : % . * ] ] , % swift . type <nl> + / / CHECK : [ [ BOXPAIR : % . * ] ] = call { % swift . refcounted * , % swift . opaque * } @ swift_allocBox ( { { . * } } @ _T04main4HugeVMf <nl> + / / CHECK : [ [ REF : % . * ] ] = extractvalue { % swift . refcounted * , % swift . opaque * } % 0 , 0 <nl> + / / CHECK : [ [ BUFFER_ADDR : % . * ] ] = bitcast { { . * } } * [ [ BUFFER ] ] to % swift . refcounted * * <nl> + / / CHECK : store % swift . refcounted * [ [ REF ] ] , % swift . refcounted * * [ [ BUFFER_ADDR ] ] <nl> + <nl> + <nl> + / / CHECK : define linkonce_odr hidden i8 * @ swift_rt_swift_slowAlloc ( [ [ SIZE_T : i ( 32 | 64 ) ] ] , <nl> mmm a / utils / build - script - impl <nl> ppp b / utils / build - script - impl <nl> KNOWN_SETTINGS = ( <nl> coverage - db " " " If set , coverage database to use when prioritizing testing " <nl> build - toolchain - only " " " If set , only build the necessary tools to build an external toolchain " <nl> skip - local - host - install " " " If we are cross - compiling multiple targets , skip an install pass locally if the hosts match " <nl> - swift - runtime - enable - cow - existentials " 0 " " Enable the copy - on - write existential implementation " <nl> + swift - runtime - enable - cow - existentials " 1 " " Enable the copy - on - write existential implementation " <nl> ) <nl> <nl> # Centralized access point for traced command invocation . <nl>
Merge pull request from aschwaighofer / wip_enable_cow_existentials
apple/swift
47fbdfdbfcfd9fee06d7670e7a0842a4d216fa66
2017-03-29T16:59:29Z