diff
stringlengths
41
2.03M
msg
stringlengths
1
1.5k
repo
stringlengths
5
40
sha
stringlengths
40
40
time
stringlengths
20
20
mmm a / src / buffer_cache / blob . cc <nl> ppp b / src / buffer_cache / blob . cc <nl> void blob_t : : write_from_string ( const std : : string & val , transaction_t * txn , int64 <nl> expose_region ( txn , rwi_write , offset , val . size ( ) , & dest , & acq ) ; <nl> <nl> buffer_group_t src ; <nl> - src . add_buffer ( val . size ( ) , val . c_str ( ) ) ; <nl> + src . add_buffer ( val . size ( ) , val . data ( ) ) ; <nl> buffer_group_copy_data ( & dest , const_view ( & src ) ) ; <nl> } <nl> <nl>
Use . data not . c_str thank you .
rethinkdb/rethinkdb
38726c273ff44b6c6cc8522fb4436cc160e661a7
2012-10-16T08:46:19Z
mmm a / dbms / src / Common / DiskSpaceMonitor . cpp <nl> ppp b / dbms / src / Common / DiskSpaceMonitor . cpp <nl> bool Disk : : tryReserve ( UInt64 bytes ) const <nl> std : : lock_guard lock ( mutex ) ; <nl> if ( bytes = = 0 ) <nl> { <nl> - LOG_DEBUG ( & Logger : : get ( " DiskSpaceMonitor " ) , " Reserve 0 bytes on disk " < < name ) ; <nl> + LOG_DEBUG ( & Logger : : get ( " DiskSpaceMonitor " ) , " Reserving 0 bytes on disk " < < name ) ; <nl> + + reservation_count ; <nl> return true ; <nl> } <nl> + <nl> available_space - = std : : min ( available_space , reserved_bytes ) ; <nl> - LOG_DEBUG ( & Logger : : get ( " DiskSpaceMonitor " ) , " Unreserved " < < available_space < < " , to reserve " < < bytes < < " on disk " < < name ) ; <nl> + LOG_DEBUG ( & Logger : : get ( " DiskSpaceMonitor " ) , " Reserving " < < bytes < < " bytes on disk " < < name < < " having available " < < available_space < < " bytes . " ) ; <nl> if ( available_space > = bytes ) <nl> { <nl> + + reservation_count ; <nl> Reservation : : ~ Reservation ( ) <nl> if ( disk_ptr - > reserved_bytes < size ) <nl> { <nl> disk_ptr - > reserved_bytes = 0 ; <nl> - LOG_ERROR ( & Logger : : get ( " DiskSpaceMonitor " ) , " Unbalanced reservations size ; it ' s a bug " ) ; <nl> + LOG_ERROR ( & Logger : : get ( " DiskSpaceMonitor " ) , " Unbalanced reservations size . It ' s a bug . " ) ; <nl> } <nl> else <nl> { <nl> Reservation : : ~ Reservation ( ) <nl> } <nl> <nl> if ( disk_ptr - > reservation_count = = 0 ) <nl> - LOG_ERROR ( & Logger : : get ( " DiskSpaceMonitor " ) , " Unbalanced reservation count ; it ' s a bug " ) ; <nl> + LOG_ERROR ( & Logger : : get ( " DiskSpaceMonitor " ) , " Unbalanced reservation count . It ' s a bug . " ) ; <nl> else <nl> - - disk_ptr - > reservation_count ; <nl> } <nl> Volume : : Volume ( <nl> } <nl> <nl> if ( disks . empty ( ) ) <nl> - throw Exception ( " Volume must contain at least one disk " , ErrorCodes : : EXCESSIVE_ELEMENT_IN_CONFIG ) ; <nl> + throw Exception ( " Volume must contain at least one disk . " , ErrorCodes : : EXCESSIVE_ELEMENT_IN_CONFIG ) ; <nl> <nl> auto has_max_bytes = config . has ( config_prefix + " . max_data_part_size_bytes " ) ; <nl> auto has_max_ratio = config . has ( config_prefix + " . max_data_part_size_ratio " ) ; <nl> if ( has_max_bytes & & has_max_ratio ) <nl> - { <nl> - throw Exception ( " Only one of ' max_data_part_size_bytes ' and ' max_data_part_size_ratio ' should be specified " , <nl> + throw Exception ( " Only one of ' max_data_part_size_bytes ' and ' max_data_part_size_ratio ' should be specified . " , <nl> ErrorCodes : : EXCESSIVE_ELEMENT_IN_CONFIG ) ; <nl> - } <nl> <nl> if ( has_max_bytes ) <nl> { <nl> Volume : : Volume ( <nl> { <nl> auto ratio = config . getDouble ( config_prefix + " . max_data_part_size_ratio " ) ; <nl> if ( ratio < 0 ) <nl> - throw Exception ( " ' max_data_part_size_ratio ' have to be not less then 0 " , <nl> + throw Exception ( " ' max_data_part_size_ratio ' have to be not less then 0 . " , <nl> ErrorCodes : : EXCESSIVE_ELEMENT_IN_CONFIG ) ; <nl> UInt64 sum_size = 0 ; <nl> std : : vector < UInt64 > sizes ; <nl> UInt64 Volume : : getMaxUnreservedFreeSpace ( ) const <nl> return res ; <nl> } <nl> <nl> - StoragePolicy : : StoragePolicy ( String name_ , const Poco : : Util : : AbstractConfiguration & config , const std : : string & config_prefix , <nl> - const DiskSelector & disks ) : name ( std : : move ( name_ ) ) <nl> + StoragePolicy : : StoragePolicy ( <nl> + String name_ , <nl> + const Poco : : Util : : AbstractConfiguration & config , <nl> + const std : : string & config_prefix , <nl> + const DiskSelector & disks ) <nl> + : name ( std : : move ( name_ ) ) <nl> { <nl> String volumes_prefix = config_prefix + " . volumes " ; <nl> if ( ! config . has ( volumes_prefix ) ) <nl> StoragePolicy : : StoragePolicy ( String name_ , const Poco : : Util : : AbstractConfigurati <nl> } <nl> <nl> if ( volumes . empty ( ) ) <nl> - throw Exception ( " StoragePolicy must contain at least one Volume " , ErrorCodes : : EXCESSIVE_ELEMENT_IN_CONFIG ) ; <nl> + throw Exception ( " StoragePolicy must contain at least one Volume . " , ErrorCodes : : EXCESSIVE_ELEMENT_IN_CONFIG ) ; <nl> <nl> / / / Check that disks are unique in Policy <nl> std : : set < String > disk_names ; <nl> StoragePolicy : : StoragePolicy ( String name_ , const Poco : : Util : : AbstractConfigurati <nl> for ( const auto & disk : volume - > disks ) <nl> { <nl> if ( disk_names . find ( disk - > getName ( ) ) ! = disk_names . end ( ) ) <nl> - throw Exception ( " StoragePolicy disks must not be repeated : " + disk - > getName ( ) , ErrorCodes : : EXCESSIVE_ELEMENT_IN_CONFIG ) ; <nl> + throw Exception ( " Duplicate disk ' " + disk - > getName ( ) + " ' in storage policy ' " + name + " ' . " , ErrorCodes : : EXCESSIVE_ELEMENT_IN_CONFIG ) ; <nl> <nl> disk_names . insert ( disk - > getName ( ) ) ; <nl> } <nl> StoragePolicy : : StoragePolicy ( String name_ , const Poco : : Util : : AbstractConfigurati <nl> <nl> move_factor = config . getDouble ( config_prefix + " . move_factor " , 0 . 1 ) ; <nl> if ( move_factor > 1 ) <nl> - throw Exception ( " Disk move factor have to be in [ 0 . , 1 . ] interval , but set to " + toString ( move_factor ) , <nl> + throw Exception ( " Disk move factor have to be in [ 0 . , 1 . ] interval , but set to " + toString ( move_factor ) + " . " , <nl> ErrorCodes : : LOGICAL_ERROR ) ; <nl> <nl> } <nl> <nl> <nl> StoragePolicy : : StoragePolicy ( String name_ , Volumes volumes_ , double move_factor_ ) <nl> - : volumes ( std : : move ( volumes_ ) ) , <nl> - name ( std : : move ( name_ ) ) , <nl> - move_factor ( move_factor_ ) <nl> + : volumes ( std : : move ( volumes_ ) ) <nl> + , name ( std : : move ( name_ ) ) <nl> + , move_factor ( move_factor_ ) <nl> { <nl> if ( volumes . empty ( ) ) <nl> - throw Exception ( " StoragePolicy must contain at least one Volume " , ErrorCodes : : UNKNOWN_POLICY ) ; <nl> + throw Exception ( " StoragePolicy must contain at least one Volume . " , ErrorCodes : : UNKNOWN_POLICY ) ; <nl> <nl> if ( move_factor > 1 ) <nl> throw Exception ( " Disk move factor have to be in [ 0 . , 1 . ] interval , but set to " + toString ( move_factor ) , <nl> StoragePolicy : : StoragePolicy ( String name_ , Volumes volumes_ , double move_factor_ <nl> for ( size_t i = 0 ; i < volumes . size ( ) ; + + i ) <nl> { <nl> if ( volumes_names . find ( volumes [ i ] - > getName ( ) ) ! = volumes_names . end ( ) ) <nl> - throw Exception ( " Volumes names must be unique ( " + volumes [ i ] - > getName ( ) + " duplicated ) " , ErrorCodes : : UNKNOWN_POLICY ) ; <nl> + throw Exception ( " Volumes names must be unique ( " + volumes [ i ] - > getName ( ) + " duplicated ) . " , ErrorCodes : : UNKNOWN_POLICY ) ; <nl> volumes_names [ volumes [ i ] - > getName ( ) ] = i ; <nl> } <nl> } <nl> DiskPtr StoragePolicy : : getAnyDisk ( ) const <nl> / / / StoragePolicy must contain at least one Volume <nl> / / / Volume must contain at least one Disk <nl> if ( volumes . empty ( ) ) <nl> - { <nl> - LOG_ERROR ( & Logger : : get ( " StoragePolicy " ) , " No volumes at StoragePolicy " < < name ) ; <nl> - throw Exception ( " StoragePolicy has no Volumes . it ' s a bug " , ErrorCodes : : NOT_ENOUGH_SPACE ) ; <nl> - } <nl> + throw Exception ( " StoragePolicy has no Volumes . It ' s a bug . " , ErrorCodes : : NOT_ENOUGH_SPACE ) ; <nl> + <nl> if ( volumes [ 0 ] - > disks . empty ( ) ) <nl> - { <nl> - LOG_ERROR ( & Logger : : get ( " StoragePolicy " ) , " No Disks at volume 0 at StoragePolicy " < < name ) ; <nl> - throw Exception ( " StoragePolicy Volume 1 has no Disks . it ' s a bug " , ErrorCodes : : NOT_ENOUGH_SPACE ) ; <nl> - } <nl> + throw Exception ( " StoragePolicy Volume 1 has no disks . It ' s a bug . " , ErrorCodes : : NOT_ENOUGH_SPACE ) ; <nl> + <nl> return volumes [ 0 ] - > disks [ 0 ] ; <nl> } <nl> <nl> size_t StoragePolicy : : getVolumePriorityByDisk ( const DiskPtr & disk_ptr ) const <nl> for ( size_t i = 0 ; i < volumes . size ( ) ; + + i ) <nl> { <nl> const auto & volume = volumes [ i ] ; <nl> - for ( auto & & disk : volume - > disks ) <nl> - { <nl> + for ( const auto & disk : volume - > disks ) <nl> if ( disk - > getName ( ) = = disk_ptr - > getName ( ) ) <nl> return i ; <nl> - } <nl> } <nl> - throw Exception ( " No disk " + disk_ptr - > getName ( ) + " in Policy " + name , ErrorCodes : : UNKNOWN_DISK ) ; <nl> + throw Exception ( " No disk " + disk_ptr - > getName ( ) + " in policy " + name , ErrorCodes : : UNKNOWN_DISK ) ; <nl> } <nl> <nl> <nl> StoragePolicySelector : : StoragePolicySelector ( <nl> constexpr auto default_storage_policy_name = " default " ; <nl> constexpr auto default_volume_name = " default " ; <nl> constexpr auto default_disk_name = " default " ; <nl> + <nl> if ( policies . find ( default_storage_policy_name ) = = policies . end ( ) ) <nl> { <nl> auto default_volume = std : : make_shared < Volume > ( <nl> StoragePolicySelector : : StoragePolicySelector ( <nl> } <nl> } <nl> <nl> - const StoragePolicyPtr & StoragePolicySelector : : operator [ ] ( const String & name ) const <nl> + const StoragePolicyPtr & StoragePolicySelector : : operator [ ] ( const String & name ) const <nl> { <nl> auto it = policies . find ( name ) ; <nl> if ( it = = policies . end ( ) ) <nl> mmm a / dbms / src / Common / DiskSpaceMonitor . h <nl> ppp b / dbms / src / Common / DiskSpaceMonitor . h <nl> class Disk : public Space <nl> <nl> bool tryReserve ( UInt64 bytes ) const ; <nl> <nl> - const String & getName ( ) const override <nl> - { <nl> - return name ; <nl> - } <nl> + const String & getName ( ) const override { return name ; } <nl> <nl> - const String & getPath ( ) const <nl> - { <nl> - return path ; <nl> - } <nl> + const String & getPath ( ) const { return path ; } <nl> <nl> - UInt64 getKeepingFreeSpace ( ) const <nl> - { <nl> - return keep_free_space_bytes ; <nl> - } <nl> + UInt64 getKeepingFreeSpace ( ) const { return keep_free_space_bytes ; } <nl> <nl> - Stat getSpaceInformation ( ) const <nl> - { <nl> - return Stat ( * this ) ; <nl> - } <nl> + Stat getSpaceInformation ( ) const { return Stat ( * this ) ; } <nl> <nl> - UInt64 getTotalSpace ( ) const <nl> - { <nl> - return getSpaceInformation ( ) . getTotalSpace ( ) ; <nl> - } <nl> + UInt64 getTotalSpace ( ) const { return getSpaceInformation ( ) . getTotalSpace ( ) ; } <nl> <nl> - UInt64 getAvailableSpace ( ) const <nl> - { <nl> - return getSpaceInformation ( ) . getAvailableSpace ( ) ; <nl> - } <nl> + UInt64 getAvailableSpace ( ) const { return getSpaceInformation ( ) . getAvailableSpace ( ) ; } <nl> <nl> UInt64 getUnreservedSpace ( ) const ; <nl> <nl> mmm a / dbms / src / Storages / MergeTree / DataPartsExchange . cpp <nl> ppp b / dbms / src / Storages / MergeTree / DataPartsExchange . cpp <nl> namespace DataPartsExchange <nl> namespace <nl> { <nl> <nl> + static constexpr auto REPLICATION_PROTOCOL_VERSION_WITHOUT_PARTS_SIZE = " 0 " ; <nl> + static constexpr auto REPLICATION_PROTOCOL_VERSION_WITH_PARTS_SIZE = " 1 " ; <nl> + <nl> std : : string getEndpointId ( const std : : string & node_id ) <nl> { <nl> return " DataPartsExchange : " + node_id ; <nl> void Service : : processQuery ( const Poco : : Net : : HTMLForm & params , ReadBuffer & / * bo <nl> if ( blocker . isCancelled ( ) ) <nl> throw Exception ( " Transferring part to replica was cancelled " , ErrorCodes : : ABORTED ) ; <nl> <nl> - / / / " 0 " for backward compatibility <nl> - String protocol_version = params . get ( " protocol_version " , " 0 " ) ; <nl> - <nl> - String part_name ; <nl> + String protocol_version = params . get ( " protocol_version " , REPLICATION_PROTOCOL_VERSION_WITHOUT_PARTS_SIZE ) ; <nl> + String part_name = params . get ( " part " ) ; <nl> <nl> - if ( protocol_version = = " 0 " ) <nl> - part_name = params . get ( " part " ) ; <nl> - else if ( protocol_version = = " 1 " ) <nl> - part_name = params . get ( " part_name " ) ; <nl> - else <nl> + if ( protocol_version ! = REPLICATION_PROTOCOL_VERSION_WITH_PARTS_SIZE & & protocol_version ! = REPLICATION_PROTOCOL_VERSION_WITHOUT_PARTS_SIZE ) <nl> throw Exception ( " Unsupported fetch protocol version " , ErrorCodes : : UNKNOWN_PROTOCOL ) ; <nl> <nl> const auto data_settings = data . getSettings ( ) ; <nl> void Service : : processQuery ( const Poco : : Net : : HTMLForm & params , ReadBuffer & / * bo <nl> <nl> MergeTreeData : : DataPart : : Checksums data_checksums ; <nl> <nl> - if ( protocol_version = = " 1 " ) <nl> - { <nl> - / / / Get size of all files <nl> - UInt64 all_part_files_size = 0 ; <nl> - for ( const auto & it : checksums . files ) <nl> - { <nl> - String file_name = it . first ; <nl> - String path = part - > getFullPath ( ) + file_name ; <nl> - all_part_files_size + = Poco : : File ( path ) . getSize ( ) ; <nl> - } <nl> - writeBinary ( all_part_files_size , out ) ; <nl> - } <nl> + if ( protocol_version = = REPLICATION_PROTOCOL_VERSION_WITH_PARTS_SIZE ) <nl> + writeBinary ( checksums . getTotalSizeOnDisk ( ) , out ) ; <nl> <nl> writeBinary ( checksums . files . size ( ) , out ) ; <nl> <nl> MergeTreeData : : MutableDataPartPtr Fetcher : : fetchPart ( <nl> { <nl> { " endpoint " , getEndpointId ( replica_path ) } , <nl> { " part_name " , part_name } , <nl> - { " protocol_version " , " 1 " } , <nl> + { " protocol_version " , REPLICATION_PROTOCOL_VERSION_WITH_PARTS_SIZE } , <nl> { " compress " , " false " } <nl> } ) ; <nl> <nl> mmm a / dbms / src / Storages / MergeTree / MergeTreeDataMergerMutator . cpp <nl> ppp b / dbms / src / Storages / MergeTree / MergeTreeDataMergerMutator . cpp <nl> <nl> # include < Storages / MergeTree / TTLMergeSelector . h > <nl> # include < Storages / MergeTree / MergeList . h > <nl> # include < Storages / MergeTree / StorageFromMergeTreeDataPart . h > <nl> - # include < Storages / MergeTree / BackgroundProcessingPool . h > <nl> # include < DataStreams / TTLBlockInputStream . h > <nl> # include < DataStreams / DistinctSortedBlockInputStream . h > <nl> # include < DataStreams / ExpressionBlockInputStream . h > <nl> void FutureMergedMutatedPart : : assign ( MergeTreeData : : DataPartsVector parts_ ) <nl> name = part_info . getPartName ( ) ; <nl> } <nl> <nl> - MergeTreeDataMergerMutator : : MergeTreeDataMergerMutator ( MergeTreeData & data_ , const BackgroundProcessingPool & pool_ ) <nl> - : data ( data_ ) , pool ( pool_ ) , log ( & Logger : : get ( data . getLogName ( ) + " ( MergerMutator ) " ) ) <nl> + MergeTreeDataMergerMutator : : MergeTreeDataMergerMutator ( MergeTreeData & data_ , size_t background_pool_size_ ) <nl> + : data ( data_ ) , background_pool_size ( background_pool_size_ ) , log ( & Logger : : get ( data . getLogName ( ) + " ( MergerMutator ) " ) ) <nl> { <nl> } <nl> <nl> <nl> UInt64 MergeTreeDataMergerMutator : : getMaxSourcePartsSizeForMerge ( ) <nl> { <nl> - size_t total_threads_in_pool = pool . getNumberOfThreads ( ) ; <nl> size_t busy_threads_in_pool = CurrentMetrics : : values [ CurrentMetrics : : BackgroundPoolTask ] . load ( std : : memory_order_relaxed ) ; <nl> <nl> - return getMaxSourcePartsSizeForMerge ( total_threads_in_pool , busy_threads_in_pool = = 0 ? 0 : busy_threads_in_pool - 1 ) ; / / / 1 is current thread <nl> + return getMaxSourcePartsSizeForMerge ( background_pool_size , busy_threads_in_pool = = 0 ? 0 : busy_threads_in_pool - 1 ) ; / / / 1 is current thread <nl> } <nl> <nl> <nl> UInt64 MergeTreeDataMergerMutator : : getMaxSourcePartsSizeForMerge ( size_t pool_siz <nl> UInt64 MergeTreeDataMergerMutator : : getMaxSourcePartSizeForMutation ( ) <nl> { <nl> const auto data_settings = data . getSettings ( ) ; <nl> - size_t total_threads_in_pool = pool . getNumberOfThreads ( ) ; <nl> size_t busy_threads_in_pool = CurrentMetrics : : values [ CurrentMetrics : : BackgroundPoolTask ] . load ( std : : memory_order_relaxed ) ; <nl> <nl> / / / Allow mutations only if there are enough threads , leave free threads for merges else <nl> - if ( total_threads_in_pool - busy_threads_in_pool > = data_settings - > number_of_free_entries_in_pool_to_execute_mutation ) <nl> + if ( background_pool_size - busy_threads_in_pool > = data_settings - > number_of_free_entries_in_pool_to_execute_mutation ) <nl> return static_cast < UInt64 > ( data . storage_policy - > getMaxUnreservedFreeSpace ( ) / DISK_USAGE_COEFFICIENT_TO_RESERVE ) ; <nl> <nl> return 0 ; <nl> mmm a / dbms / src / Storages / MergeTree / MergeTreeDataMergerMutator . h <nl> ppp b / dbms / src / Storages / MergeTree / MergeTreeDataMergerMutator . h <nl> class MergeTreeDataMergerMutator <nl> using AllowedMergingPredicate = std : : function < bool ( const MergeTreeData : : DataPartPtr & , const MergeTreeData : : DataPartPtr & , String * reason ) > ; <nl> <nl> public : <nl> - MergeTreeDataMergerMutator ( MergeTreeData & data_ , const BackgroundProcessingPool & pool_ ) ; <nl> + MergeTreeDataMergerMutator ( MergeTreeData & data_ , size_t background_pool_size ) ; <nl> <nl> / * * Get maximum total size of parts to do merge , at current moment of time . <nl> * It depends on number of free threads in background_pool and amount of free space in disk . <nl> class MergeTreeDataMergerMutator <nl> <nl> private : <nl> MergeTreeData & data ; <nl> - const BackgroundProcessingPool & pool ; <nl> + const size_t background_pool_size ; <nl> <nl> Logger * log ; <nl> <nl> mmm a / dbms / src / Storages / MergeTree / ReplicatedMergeTreeQueue . cpp <nl> ppp b / dbms / src / Storages / MergeTree / ReplicatedMergeTreeQueue . cpp <nl> bool ReplicatedMergeTreeQueue : : removeFromVirtualParts ( const MergeTreePartInfo & <nl> return virtual_parts . remove ( part_info ) ; <nl> } <nl> <nl> - bool ReplicatedMergeTreeQueue : : removeFromVirtualParts ( const String & part_name ) <nl> - { <nl> - std : : lock_guard lock ( state_mutex ) ; <nl> - return virtual_parts . remove ( part_name ) ; <nl> - } <nl> - <nl> void ReplicatedMergeTreeQueue : : pullLogsToQueue ( zkutil : : ZooKeeperPtr zookeeper , Coordination : : WatchCallback watch_callback ) <nl> { <nl> std : : lock_guard lock ( pull_logs_to_queue_mutex ) ; <nl> bool ReplicatedMergeTreeMergePredicate : : operator ( ) ( <nl> Int64 left_mutation_ver = queue . getCurrentMutationVersionImpl ( <nl> left - > info . partition_id , left - > info . getDataVersion ( ) , lock ) ; <nl> <nl> - / / / left - > info . partition_id = = right - > info . partition_id <nl> Int64 right_mutation_ver = queue . getCurrentMutationVersionImpl ( <nl> left - > info . partition_id , right - > info . getDataVersion ( ) , lock ) ; <nl> <nl> mmm a / dbms / src / Storages / MergeTree / ReplicatedMergeTreeQueue . h <nl> ppp b / dbms / src / Storages / MergeTree / ReplicatedMergeTreeQueue . h <nl> class ReplicatedMergeTreeQueue <nl> private : <nl> friend class CurrentlyExecuting ; <nl> friend class ReplicatedMergeTreeMergePredicate ; <nl> - friend class ReplicatedMergeTreeMovePredicate ; <nl> <nl> using LogEntry = ReplicatedMergeTreeLogEntry ; <nl> using LogEntryPtr = LogEntry : : Ptr ; <nl> class ReplicatedMergeTreeQueue <nl> <nl> bool removeFromVirtualParts ( const MergeTreePartInfo & part_info ) ; <nl> <nl> - bool removeFromVirtualParts ( const String & part_name ) ; <nl> - <nl> / * * Copy the new entries from the shared log to the queue of this replica . Set the log_pointer to the appropriate value . <nl> * If watch_callback is not empty , will call it when new entries appear in the log . <nl> * If there were new entries , notifies storage . queue_task_handle . <nl> class ReplicatedMergeTreeMergePredicate <nl> } ; <nl> <nl> <nl> - class ReplicatedMergeTreeMovePredicate <nl> - { <nl> - public : <nl> - ReplicatedMergeTreeMovePredicate ( const ReplicatedMergeTreeQueue & queue_ ) ; <nl> - <nl> - bool operator ( ) ( const MergeTreeData : : DataPartPtr & part , String * out_reason = nullptr ) const ; <nl> - <nl> - ~ ReplicatedMergeTreeMovePredicate ( ) ; <nl> - <nl> - private : <nl> - const ReplicatedMergeTreeQueue & queue ; <nl> - <nl> - / / / Locks queue state in constructor and unlocks in desctructor <nl> - std : : unique_lock < std : : mutex > queue_state_lock ; <nl> - } ; <nl> - <nl> / * * Convert a number to a string in the format of the suffixes of auto - incremental nodes in ZooKeeper . <nl> * Negative numbers are also supported - for them the name of the node looks somewhat silly <nl> * and does not match any auto - incremented node in ZK . <nl> mmm a / dbms / src / Storages / StorageLog . h <nl> ppp b / dbms / src / Storages / StorageLog . h <nl> friend struct ext : : shared_ptr_helper < StorageLog > ; <nl> <nl> void truncate ( const ASTPtr & , const Context & , TableStructureWriteLockHolder & ) override ; <nl> <nl> - std : : string full_path ( ) const { return path + escapeForFileName ( table_name ) + ' / ' ; } <nl> + std : : string fullPath ( ) const { return path + escapeForFileName ( table_name ) + ' / ' ; } <nl> <nl> - Strings getDataPaths ( ) const override { return { full_path ( ) } ; } <nl> + Strings getDataPaths ( ) const override { return { fullPath ( ) } ; } <nl> <nl> protected : <nl> / * * Attach the table with the appropriate name , along the appropriate path ( with / at the end ) , <nl> mmm a / dbms / src / Storages / StorageMergeTree . cpp <nl> ppp b / dbms / src / Storages / StorageMergeTree . cpp <nl> StorageMergeTree : : StorageMergeTree ( <nl> std : : move ( storage_settings_ ) , false , attach ) , <nl> background_pool ( context_ . getBackgroundPool ( ) ) , <nl> reader ( * this ) , writer ( * this ) , <nl> - merger_mutator ( * this , global_context . getBackgroundPool ( ) ) , <nl> + merger_mutator ( * this , global_context . getBackgroundPool ( ) . getNumberOfThreads ( ) ) , <nl> parts_mover ( * this ) <nl> { <nl> loadDataParts ( has_force_restore_data_flag ) ; <nl> void StorageMergeTree : : shutdown ( ) <nl> <nl> if ( background_task_handle ) <nl> background_pool . removeTask ( background_task_handle ) ; <nl> - <nl> - background_task_handle . reset ( ) ; <nl> } <nl> <nl> <nl> mmm a / dbms / src / Storages / StorageMergeTree . h <nl> ppp b / dbms / src / Storages / StorageMergeTree . h <nl> class StorageMergeTree : public ext : : shared_ptr_helper < StorageMergeTree > , public <nl> friend class MergeTreeBlockOutputStream ; <nl> friend class MergeTreeData ; <nl> friend struct CurrentlyMergingPartsTagger ; <nl> - friend struct CurrentlyMovingPartsTagger ; <nl> <nl> protected : <nl> <nl> mmm a / dbms / src / Storages / StorageReplicatedMergeTree . cpp <nl> ppp b / dbms / src / Storages / StorageReplicatedMergeTree . cpp <nl> StorageReplicatedMergeTree : : StorageReplicatedMergeTree ( <nl> [ this ] ( const std : : string & name ) { enqueuePartForCheck ( name ) ; } ) , <nl> zookeeper_path ( global_context . getMacros ( ) - > expand ( zookeeper_path_ , database_name_ , table_name_ ) ) , <nl> replica_name ( global_context . getMacros ( ) - > expand ( replica_name_ , database_name_ , table_name_ ) ) , <nl> - reader ( * this ) , writer ( * this ) , merger_mutator ( * this , global_context . getBackgroundPool ( ) ) , parts_mover ( * this ) , <nl> - queue ( * this ) , fetcher ( * this ) , cleanup_thread ( * this ) , alter_thread ( * this ) , <nl> + reader ( * this ) , writer ( * this ) , merger_mutator ( * this , global_context . getBackgroundPool ( ) . getNumberOfThreads ( ) ) , <nl> + parts_mover ( * this ) , queue ( * this ) , fetcher ( * this ) , cleanup_thread ( * this ) , alter_thread ( * this ) , <nl> part_check_thread ( * this ) , restarting_thread ( * this ) <nl> { <nl> if ( ! zookeeper_path . empty ( ) & & zookeeper_path . back ( ) = = ' / ' ) <nl> void StorageReplicatedMergeTree : : fetchPartition ( const ASTPtr & partition , const <nl> <nl> if ( missing_parts . empty ( ) ) <nl> { <nl> - auto tmp = active_parts_set . getParts ( ) ; <nl> - for ( auto elem : tmp ) <nl> - parts_to_fetch . push_back ( elem ) ; <nl> + parts_to_fetch = active_parts_set . getParts ( ) ; <nl> <nl> / / / Leaving only the parts of the desired partition . <nl> Strings parts_to_fetch_partition ; <nl> mmm a / dbms / src / Storages / StorageReplicatedMergeTree . h <nl> ppp b / dbms / src / Storages / StorageReplicatedMergeTree . h <nl> class StorageReplicatedMergeTree : public ext : : shared_ptr_helper < StorageReplicat <nl> std : : unordered_set < String > currently_fetching_parts ; <nl> std : : mutex currently_fetching_parts_mutex ; <nl> <nl> - / / / <nl> + / / / Parts currently moving to another disks or volumes . <nl> + / / / This operation doesn ' t replicate . <nl> DataParts currently_moving_parts ; <nl> + <nl> + / / / Mutex for currenly_moving_parts <nl> std : : mutex moving_parts_mutex ; <nl> <nl> / / / With the quorum being tracked , add a replica to the quorum for the part . <nl> mmm a / dbms / src / Storages / StorageTinyLog . cpp <nl> ppp b / dbms / src / Storages / StorageTinyLog . cpp <nl> Block TinyLogBlockInputStream : : readImpl ( ) <nl> <nl> { <nl> / / / if there are no files in the folder , it means that the table is empty <nl> - if ( Poco : : DirectoryIterator ( storage . full_path ( ) ) = = Poco : : DirectoryIterator ( ) ) <nl> + if ( Poco : : DirectoryIterator ( storage . fullPath ( ) ) = = Poco : : DirectoryIterator ( ) ) <nl> return res ; <nl> } <nl> <nl> Block TinyLogBlockInputStream : : readImpl ( ) <nl> } <nl> catch ( Exception & e ) <nl> { <nl> - e . addMessage ( " while reading column " + name_type . name + " at " + storage . full_path ( ) ) ; <nl> + e . addMessage ( " while reading column " + name_type . name + " at " + storage . fullPath ( ) ) ; <nl> throw ; <nl> } <nl> <nl> mmm a / dbms / src / Storages / StorageTinyLog . h <nl> ppp b / dbms / src / Storages / StorageTinyLog . h <nl> friend struct ext : : shared_ptr_helper < StorageTinyLog > ; <nl> } ; <nl> using Files_t = std : : map < String , ColumnData > ; <nl> <nl> - std : : string full_path ( ) const { return path + escapeForFileName ( table_name ) + ' / ' ; } <nl> + std : : string fullPath ( ) const { return path + escapeForFileName ( table_name ) + ' / ' ; } <nl> <nl> - Strings getDataPaths ( ) const override { return { full_path ( ) } ; } <nl> + Strings getDataPaths ( ) const override { return { fullPath ( ) } ; } <nl> <nl> void truncate ( const ASTPtr & , const Context & , TableStructureWriteLockHolder & ) override ; <nl> <nl>
Remove trash
ClickHouse/ClickHouse
13bbae5860da5e89f28842a31048c271396087af
2019-09-04T16:00:20Z
mmm a / template / xcode3 / cocos2d - x_box2d_app / Classes / HelloWorldScene . cpp <nl> ppp b / template / xcode3 / cocos2d - x_box2d_app / Classes / HelloWorldScene . cpp <nl> HelloWorld : : HelloWorld ( ) <nl> <nl> / / Set up sprite <nl> <nl> - CCSpriteBatchNode * mgr = CCSpriteBatchNode : : spriteSheetWithFile ( " blocks . png " , 150 ) ; <nl> + CCSpriteBatchNode * mgr = CCSpriteBatchNode : : batchNodeWithFile ( " blocks . png " , 150 ) ; <nl> addChild ( mgr , 0 , kTagSpriteManager ) ; <nl> <nl> addNewSpriteWithCoords ( CCPointMake ( screenSize . width / 2 , screenSize . height / 2 ) ) ; <nl> void HelloWorld : : addNewSpriteWithCoords ( CCPoint p ) <nl> / / just randomly picking one of the images <nl> int idx = ( CCRANDOM_0_1 ( ) > . 5 ? 0 : 1 ) ; <nl> int idy = ( CCRANDOM_0_1 ( ) > . 5 ? 0 : 1 ) ; <nl> - CCSprite * sprite = sheet - > createSpriteWithRect ( CCRectMake ( 32 * idx , 32 * idy , 32 , 32 ) ) ; <nl> + CCSprite * sprite = CCSprite : : spritWithBatchNode ( sheet , CCRectMake ( 32 * idx , 32 * idy , 32 , 32 ) ) ; <nl> sheet - > addChild ( sprite ) ; <nl> <nl> sprite - > setPosition ( CCPointMake ( p . x , p . y ) ) ; <nl> new file mode 100644 <nl> index 000000000000 . . cfc16a8a4e19 <nl> mmm / dev / null <nl> ppp b / template / xcode3 / cocos2d - x_lua_app / Resources / background . mp3 . REMOVED . git - id <nl> @ @ - 0 , 0 + 1 @ @ <nl> + aec1c0a8c8068377fddca5ddd32084d8c3c3c419 <nl> \ No newline at end of file <nl> mmm a / template / xcode3 / cocos2d - x_lua_app / Resources / hello . lua <nl> ppp b / template / xcode3 / cocos2d - x_lua_app / Resources / hello . lua <nl> function btnTouchMove ( e ) <nl> local v = e [ 1 ] <nl> local pointMove = v : locationInView ( v : view ( ) ) <nl> pointMove = cocos2d . CCDirector : sharedDirector ( ) : convertToGL ( pointMove ) <nl> - local positionCurrent = layerFarm . __CCNode__ : getPosition ( ) <nl> - layerFarm . __CCNode__ : setPosition ( cocos2d . CCPoint ( positionCurrent . x + pointMove . x - pointBegin . x , positionCurrent . y + pointMove . y - pointBegin . y ) ) <nl> + local positionCurrent = layerFarm : getPosition ( ) <nl> + layerFarm : setPosition ( cocos2d . CCPoint ( positionCurrent . x + pointMove . x - pointBegin . x , positionCurrent . y + pointMove . y - pointBegin . y ) ) <nl> pointBegin = pointMove <nl> end <nl> end <nl> <nl> function btnTouchBegin ( e ) <nl> + cocos2d . CCLuaLog ( " btnTouchBegin " ) <nl> for k , v in ipairs ( e ) do <nl> pointBegin = v : locationInView ( v : view ( ) ) <nl> pointBegin = cocos2d . CCDirector : sharedDirector ( ) : convertToGL ( pointBegin ) <nl> - cocos2d . CCLuaLog ( " btnTouchBegin , x = % d , y = % d " , pointBegin . x , pointBegin . y ) <nl> end <nl> end <nl> <nl> animFrames = cocos2d . CCMutableArray_CCSpriteFrame__ : new ( 2 ) <nl> animFrames : addObject ( frame0 ) <nl> animFrames : addObject ( frame1 ) <nl> <nl> - animation = cocos2d . CCAnimation : animationWithName ( " wait " , 0 . 5 , animFrames ) <nl> + animation = cocos2d . CCAnimation : animationWithFrames ( animFrames , 0 . 5 ) <nl> <nl> animate = cocos2d . CCAnimate : actionWithAnimation ( animation , false ) ; <nl> spriteDog : runAction ( cocos2d . CCRepeatForever : actionWithAction ( animate ) ) <nl> spriteDog : runAction ( cocos2d . CCRepeatForever : actionWithAction ( animate ) ) <nl> - - add a popup menu <nl> <nl> function menuCallbackClosePopup ( ) <nl> + - - stop test sound effect <nl> + CocosDenshion . SimpleAudioEngine : sharedEngine ( ) : stopEffect ( effectID ) <nl> menuPopup : setIsVisible ( false ) <nl> end <nl> <nl> layerMenu : addChild ( menuPopup ) <nl> - - add the left - bottom " tools " menu to invoke menuPopup <nl> <nl> function menuCallbackOpenPopup ( ) <nl> + - - loop test sound effect <nl> + - - NOTE : effectID is global , so it can be used to stop <nl> + effectID = CocosDenshion . SimpleAudioEngine : sharedEngine ( ) : playEffect ( " effect1 . wav " ) <nl> menuPopup : setIsVisible ( true ) <nl> end <nl> <nl> end <nl> cocos2d . CCScheduler : sharedScheduler ( ) : scheduleScriptFunc ( " tick " , 0 . 01 , false ) <nl> <nl> - - run <nl> + - - play background music <nl> + CocosDenshion . SimpleAudioEngine : sharedEngine ( ) : playBackgroundMusic ( " background . mp3 " , true ) ; <nl> <nl> cocos2d . CCDirector : sharedDirector ( ) : runWithScene ( sceneGame ) <nl> mmm a / template / xcode3 / cocos2d - x_lua_app / ___PROJECTNAME___ . xcodeproj / project . pbxproj . REMOVED . git - id <nl> ppp b / template / xcode3 / cocos2d - x_lua_app / ___PROJECTNAME___ . xcodeproj / project . pbxproj . REMOVED . git - id <nl> @ @ - 1 + 1 @ @ <nl> - 8feced937b259baf35f5ea1d21cf2adad5d02f7d <nl> \ No newline at end of file <nl> + 159e4feec6f5338fcd4bf5e76acd00280ff4e828 <nl> \ No newline at end of file <nl>
update xcode3 templates
cocos2d/cocos2d-x
4894f8c6ab55081472b53e7dde743433ace4d958
2011-08-17T06:08:47Z
mmm a / xbmc / input / IRTranslator . cpp <nl> ppp b / xbmc / input / IRTranslator . cpp <nl> bool CIRTranslator : : LoadIRMap ( const std : : string & irMapPath ) <nl> std : : string strValue = pRoot - > Value ( ) ; <nl> if ( strValue ! = remoteMapTag ) <nl> { <nl> - CLog : : Log ( LOGERROR , " % sl Doesn ' t contain < % s > " , irMapPath . c_str ( ) , remoteMapTag . c_str ( ) ) ; <nl> + CLog : : Log ( LOGERROR , " % s Doesn ' t contain < % s > " , irMapPath . c_str ( ) , remoteMapTag . c_str ( ) ) ; <nl> return false ; <nl> } <nl> <nl>
[ logging ] Fix typo in log message when IR keymap can ' t be loaded
xbmc/xbmc
48fec3fefc6cd9ed403df871a3596f80bd894ede
2017-06-17T18:40:39Z
mmm a / src / app / ui / brush_popup . cpp <nl> ppp b / src / app / ui / brush_popup . cpp <nl> using namespace ui ; <nl> <nl> namespace { <nl> <nl> + void show_popup_menu ( PopupWindow * popupWindow , Menu * popupMenu , <nl> + const gfx : : Point & pt ) <nl> + { <nl> + / / Here we make the popup window temporaly floating , so it ' s <nl> + / / not closed by the popup menu . <nl> + popupWindow - > makeFloating ( ) ; <nl> + <nl> + popupMenu - > showPopup ( pt ) ; <nl> + <nl> + / / Add the menu popup region to the window popup hot region so it ' s <nl> + / / not closed after we close the menu . <nl> + popupWindow - > makeFixed ( ) ; <nl> + <nl> + gfx : : Region rgn ; <nl> + rgn . createUnion ( gfx : : Region ( popupWindow - > bounds ( ) ) , <nl> + gfx : : Region ( popupMenu - > bounds ( ) ) ) ; <nl> + popupWindow - > setHotRegion ( rgn ) ; <nl> + } <nl> + <nl> class SelectBrushItem : public ButtonSet : : Item { <nl> public : <nl> SelectBrushItem ( BrushPopupDelegate * delegate , const BrushRef & brush , int slot = - 1 ) <nl> class BrushOptionsItem : public ButtonSet : : Item { <nl> menu . addChild ( new MenuSeparator ) ; <nl> menu . addChild ( & deleteAllItem ) ; <nl> <nl> - / / Here we make the popup window temporaly floating , so it ' s <nl> - / / not closed by the popup menu . <nl> - m_popup - > makeFloating ( ) ; <nl> - <nl> - menu . showPopup ( gfx : : Point ( origin ( ) . x , origin ( ) . y + bounds ( ) . h ) ) ; <nl> - <nl> - / / Add the menu popup region to the hot region so the BrushPopup ( m_popup ) <nl> - / / isn ' t closed after we click the menu popup . <nl> - m_popup - > makeFixed ( ) ; <nl> - <nl> - gfx : : Region rgn ; <nl> - rgn . createUnion ( gfx : : Region ( m_popup - > bounds ( ) ) , <nl> - gfx : : Region ( menu . bounds ( ) ) ) ; <nl> - m_popup - > setHotRegion ( rgn ) ; <nl> + show_popup_menu ( m_popup , & menu , <nl> + gfx : : Point ( origin ( ) . x , origin ( ) . y + bounds ( ) . h ) ) ; <nl> } <nl> <nl> private : <nl>
Add show_popup_menu ( ) internal function in brush_popup . cpp
aseprite/aseprite
0f35102a4bd68b61f0ab96ee270813f567c28bc9
2015-12-16T13:49:15Z
mmm a / lib / SILOptimizer / Utils / Generics . cpp <nl> ppp b / lib / SILOptimizer / Utils / Generics . cpp <nl> static void prepareCallArguments ( ApplySite AI , SILBuilder & Builder , <nl> / / / function being applied . <nl> static ApplySite replaceWithSpecializedCallee ( ApplySite AI , <nl> SILValue Callee , <nl> - SILBuilder & Builder , <nl> const ReabstractionInfo & ReInfo ) { <nl> + SILBuilderWithScope Builder ( AI . getInstruction ( ) ) ; <nl> SILLocation Loc = AI . getLoc ( ) ; <nl> SmallVector < SILValue , 4 > Arguments ; <nl> SILValue StoreResultTo ; <nl> replaceWithSpecializedFunction ( ApplySite AI , SILFunction * NewF , <nl> const ReabstractionInfo & ReInfo ) { <nl> SILBuilderWithScope Builder ( AI . getInstruction ( ) ) ; <nl> FunctionRefInst * FRI = Builder . createFunctionRef ( AI . getLoc ( ) , NewF ) ; <nl> - return replaceWithSpecializedCallee ( AI , FRI , Builder , ReInfo ) ; <nl> + return replaceWithSpecializedCallee ( AI , FRI , ReInfo ) ; <nl> } <nl> <nl> namespace { <nl> void swift : : trySpecializeApplyOfGeneric ( <nl> / / thunk which converts from the re - abstracted function back to the <nl> / / original function with indirect parameters / results . <nl> auto * PAI = cast < PartialApplyInst > ( Apply . getInstruction ( ) ) ; <nl> - SILBuilderWithScope Builder ( PAI ) ; <nl> SILFunction * Thunk = <nl> ReabstractionThunkGenerator ( FuncBuilder , ReInfo , PAI , SpecializedF ) <nl> . createThunk ( ) ; <nl> NewFunctions . push_back ( Thunk ) ; <nl> + SILBuilderWithScope Builder ( PAI ) ; <nl> auto * FRI = Builder . createFunctionRef ( PAI - > getLoc ( ) , Thunk ) ; <nl> SmallVector < SILValue , 4 > Arguments ; <nl> for ( auto & Op : PAI - > getArgumentOperands ( ) ) { <nl> void swift : : trySpecializeApplyOfGeneric ( <nl> for ( Operand * Use : NewPAI - > getUses ( ) ) { <nl> SILInstruction * User = Use - > getUser ( ) ; <nl> if ( auto FAS = FullApplySite : : isa ( User ) ) { <nl> - SILBuilder Builder ( User ) ; <nl> - replaceWithSpecializedCallee ( FAS , NewPAI , Builder , ReInfo ) ; <nl> + replaceWithSpecializedCallee ( FAS , NewPAI , ReInfo ) ; <nl> DeadApplies . insert ( FAS . getInstruction ( ) ) ; <nl> continue ; <nl> } <nl>
[ gardening ] Eliminate passing around of SILBuilders .
apple/swift
89f33644445977461df6dc09577a155b922cfc05
2020-03-11T06:04:16Z
mmm a / hphp / hack / src / hhbc / emit_function . ml <nl> ppp b / hphp / hack / src / hhbc / emit_function . ml <nl> let from_ast_no_memoization : Ast . fun_ - > Hhas_function . t = <nl> let body_instrs , function_params , function_return_type = Emit_body . from_ast <nl> ast_fun . Ast . f_tparams ast_fun . Ast . f_params ast_fun . Ast . f_ret <nl> ast_fun . Ast . f_body in <nl> - let body_instrs = Label_rewriter . relabel_instrseq body_instrs in <nl> + let function_params , body_instrs = <nl> + Label_rewriter . relabel_function function_params body_instrs in <nl> let function_decl_vars = extract_decl_vars body_instrs in <nl> let body_instrs = Local_id_rewriter . unname_instrseq <nl> ( List . map ast_fun . Ast . f_params ( fun p - > snd p . Ast . param_id ) @ <nl> mmm a / hphp / hack / src / hhbc / hhbc_hhas . ml <nl> ppp b / hphp / hack / src / hhbc / hhbc_hhas . ml <nl> let string_of_param_default_value expr = <nl> let string_of_param_default_value_option = function <nl> | None - > " " <nl> | Some ( label , expr ) - > <nl> - " = DV " <nl> - ^ ( string_of_int ( Label . id label ) ) <nl> + " = " <nl> + ^ ( string_of_label label ) <nl> ^ " ( \ " \ " \ " " <nl> ^ ( string_of_param_default_value expr ) <nl> ^ " \ " \ " \ " ) " <nl> let add_implements buf class_implements = <nl> begin <nl> B . add_string buf " implements ( " ; <nl> B . add_string buf ( String . concat " " ( List . map fmt_name class_implements ) ) ; <nl> - B . add_string buf " ) " ; <nl> + B . add_string buf " ) " ; <nl> end <nl> <nl> let property_attributes p = <nl> mmm a / hphp / hack / src / hhbc / label . ml <nl> ppp b / hphp / hack / src / hhbc / label . ml <nl> <nl> * <nl> * ) <nl> <nl> + ( * Labels , regardless of flavor have unique IDs * ) <nl> type t = <nl> | Regular of int <nl> | Catch of int <nl> let id label = <nl> | Fault id <nl> | DefaultArg id - > id <nl> <nl> + let option_map f label = <nl> + match label with <nl> + | Regular id - > <nl> + begin match f id with None - > None | Some id - > Some ( Regular id ) end <nl> + | Catch id - > <nl> + begin match f id with None - > None | Some id - > Some ( Catch id ) end <nl> + | Fault id - > <nl> + begin match f id with None - > None | Some id - > Some ( Fault id ) end <nl> + | DefaultArg id - > <nl> + begin match f id with None - > None | Some id - > Some ( DefaultArg id ) end <nl> + <nl> + let map f label = <nl> + match label with <nl> + | Regular id - > Regular ( f id ) <nl> + | Catch id - > Catch ( f id ) <nl> + | Fault id - > Fault ( f id ) <nl> + | DefaultArg id - > DefaultArg ( f id ) <nl> + <nl> ( * Numbers for string label * ) <nl> let next_label = ref 0 <nl> <nl> mmm a / hphp / hack / src / hhbc / label_rewriter . ml <nl> ppp b / hphp / hack / src / hhbc / label_rewriter . ml <nl> let create_label_to_offset_map instrseq = <nl> snd @ @ <nl> InstrSeq . fold_left instrseq ~ init : ( 0 , IMap . empty ) ~ f : ( fun ( i , m ) instr - > <nl> begin match instr with <nl> - | ILabel ( Label . Regular l ) - > ( i , IMap . add l i m ) <nl> + | ILabel l - > ( i , IMap . add ( Label . id l ) i m ) <nl> | _ - > ( i + 1 , m ) <nl> end ) <nl> <nl> let lookup_def l defs = <nl> | None - > failwith " lookup_def : label missing " <nl> | Some ix - > ix <nl> <nl> - ( * Generate new labels for all labels referenced in instructions , in the <nl> - * order that the instructions appear . Also record which labels are <nl> + ( * Get any regular labels referenced by this instruction * ) <nl> + let get_regular_labels instr = <nl> + match instr with <nl> + | IIterator ( IterInit ( _ , l , _ ) ) <nl> + | IIterator ( IterInitK ( _ , l , _ , _ ) ) <nl> + | IIterator ( WIterInit ( _ , l , _ ) ) <nl> + | IIterator ( WIterInitK ( _ , l , _ , _ ) ) <nl> + | IIterator ( MIterInit ( _ , l , _ ) ) <nl> + | IIterator ( MIterInitK ( _ , l , _ , _ ) ) <nl> + | IIterator ( IterNext ( _ , l , _ ) ) <nl> + | IIterator ( IterNextK ( _ , l , _ , _ ) ) <nl> + | IIterator ( WIterNext ( _ , l , _ ) ) <nl> + | IIterator ( WIterNextK ( _ , l , _ , _ ) ) <nl> + | IIterator ( MIterNext ( _ , l , _ ) ) <nl> + | IIterator ( MIterNextK ( _ , l , _ , _ ) ) <nl> + | IIterator ( IterBreak ( l , _ ) ) <nl> + | ICall ( DecodeCufIter ( _ , l ) ) <nl> + | IContFlow ( Jmp l | JmpNS l | JmpZ l | JmpNZ l ) - > [ l ] <nl> + | IContFlow ( Switch ( _ , _ , ls ) ) - > ls <nl> + | IContFlow ( SSwitch pairs ) - > List . map pairs snd <nl> + | _ - > [ ] <nl> + <nl> + ( * Get any labels referred to in catch or fault handlers * ) <nl> + let get_catch_or_fault_labels instr = <nl> + match instr with <nl> + | ITry ( TryCatchBegin l | TryFaultBegin l ) - > [ l ] <nl> + | _ - > [ ] <nl> + <nl> + ( * Generate new labels for all labels referenced in instructions and default <nl> + * parameter values , in the same order as used by DumpHhas : <nl> + * 1 . First , labels referenced by normal control - flow ( jumps , switches , etc ) <nl> + * 2 . Next , labels referenced by catch or fault handlers <nl> + * 3 . Last , labels referenced by default parameter values <nl> * ) <nl> - let create_label_ref_map defs instrseq = <nl> - snd @ @ <nl> - InstrSeq . fold_left instrseq ~ init : ( 0 , ( ISet . empty , IMap . empty ) ) <nl> + let create_label_ref_map defs params body = <nl> + let process_ref ( n , ( used , refs ) as acc ) l = <nl> + let l = Label . id l in <nl> + let ix = lookup_def l defs in <nl> + match IMap . get ix refs with <nl> + ( * This is the first time we ' ve seen a reference to a label for <nl> + * this instruction offset , so generate a new label * ) <nl> + | None - > ( n + 1 , ( ISet . add l used , IMap . add ix n refs ) ) <nl> + ( * We already have a label for this instruction offset * ) <nl> + | Some _ - > acc in <nl> + let gather_using get_labels acc instrseq = <nl> + InstrSeq . fold_left instrseq ~ init : acc <nl> ~ f : ( fun acc instr - > <nl> - let process_ref ( n , ( used , refs ) as acc ) l = <nl> - let l = Label . id l in <nl> - let ix = lookup_def l defs in <nl> - match IMap . get ix refs with <nl> - ( * This is the first time we ' ve seen a reference to a label for <nl> - * this instruction offset , so generate a new label * ) <nl> - | None - > ( n + 1 , ( ISet . add l used , IMap . add ix n refs ) ) <nl> - ( * We already have a label for this instruction offset * ) <nl> - | Some _ - > acc in <nl> - match instr with <nl> - | IIterator ( IterInit ( _ , l , _ ) ) <nl> - | IIterator ( IterInitK ( _ , l , _ , _ ) ) <nl> - | IIterator ( WIterInit ( _ , l , _ ) ) <nl> - | IIterator ( WIterInitK ( _ , l , _ , _ ) ) <nl> - | IIterator ( MIterInit ( _ , l , _ ) ) <nl> - | IIterator ( MIterInitK ( _ , l , _ , _ ) ) <nl> - | IIterator ( IterNext ( _ , l , _ ) ) <nl> - | IIterator ( IterNextK ( _ , l , _ , _ ) ) <nl> - | IIterator ( WIterNext ( _ , l , _ ) ) <nl> - | IIterator ( WIterNextK ( _ , l , _ , _ ) ) <nl> - | IIterator ( MIterNext ( _ , l , _ ) ) <nl> - | IIterator ( MIterNextK ( _ , l , _ , _ ) ) <nl> - | IContFlow ( Jmp l | JmpNS l | JmpZ l | JmpNZ l ) - > <nl> - process_ref acc l <nl> - | IContFlow ( Switch ( _ , _ , ls ) ) - > <nl> - List . fold_left ls ~ f : process_ref ~ init : acc <nl> - | IContFlow ( SSwitch pairs ) - > <nl> - List . fold_left pairs ~ f : ( fun acc ( _ , l ) - > process_ref acc l ) ~ init : acc <nl> - ( * TODO : other uses of Label . t in instructions : <nl> - DecodeCufIter <nl> - IterBreak <nl> - * ) <nl> - | _ - > acc ) <nl> + List . fold_left ( get_labels instr ) ~ init : acc ~ f : process_ref ) in <nl> + let acc = ( 0 , ( ISet . empty , IMap . empty ) ) in <nl> + let acc = gather_using get_regular_labels acc body in <nl> + let acc = gather_using get_catch_or_fault_labels acc body in <nl> + let acc = <nl> + List . fold_left params ~ init : acc <nl> + ~ f : ( fun acc param - > <nl> + match Hhas_param . default_value param with <nl> + | None - > acc <nl> + | Some ( l , _ ) - > process_ref acc l ) in <nl> + snd acc <nl> <nl> - ( * Relabel the instruction sequence so that <nl> + ( * Relabel the instruction sequence and parameter values so that <nl> * 1 . No instruction is preceded by more than one label <nl> * 2 . No label is unreferenced <nl> * 3 . References to labels occur in strict label number order , starting at 0 <nl> * ) <nl> - let relabel_instrseq instrseq = <nl> - let defs = create_label_to_offset_map instrseq in <nl> - let used , refs = create_label_ref_map defs instrseq in <nl> - let relabel l = <nl> - let l = Label . id l in <nl> + let rewrite_params_and_body defs used refs params body = <nl> + let relabel_id l = <nl> let ix = lookup_def l defs in <nl> match IMap . get ix refs with <nl> | None - > failwith " relabel_instrseq : offset not in refs " <nl> - | Some l ' - > Label . Regular l ' in <nl> - InstrSeq . filter_map instrseq ~ f : ( fun instr - > <nl> + | Some l ' - > l ' in <nl> + ( * Rewrite a label that ' s referenced by an instruction or parameter * ) <nl> + let relabel l = Label . map relabel_id l in <nl> + ( * Rewrite or remove a label definition * ) <nl> + let relabel_define_label_id id = <nl> + if ISet . mem id used then IMap . get ( lookup_def id defs ) refs <nl> + else None in <nl> + ( * Rewrite a single instruction * ) <nl> + let rewrite_instr instr = <nl> match instr with <nl> | IIterator ( IterInit ( id , l , v ) ) - > <nl> Some ( IIterator ( IterInit ( id , relabel l , v ) ) ) <nl> let relabel_instrseq instrseq = <nl> Some ( IIterator ( MIterNext ( id , relabel l , v ) ) ) <nl> | IIterator ( MIterNextK ( id , l , k , v ) ) - > <nl> Some ( IIterator ( MIterNextK ( id , relabel l , k , v ) ) ) <nl> + | IIterator ( IterBreak ( l , x ) ) - > <nl> + Some ( IIterator ( IterBreak ( relabel l , x ) ) ) <nl> + | ICall ( DecodeCufIter ( x , l ) ) - > <nl> + Some ( ICall ( DecodeCufIter ( x , relabel l ) ) ) <nl> | IContFlow ( Jmp l ) - > Some ( IContFlow ( Jmp ( relabel l ) ) ) <nl> | IContFlow ( JmpNS l ) - > Some ( IContFlow ( JmpNS ( relabel l ) ) ) <nl> | IContFlow ( JmpZ l ) - > Some ( IContFlow ( JmpZ ( relabel l ) ) ) <nl> let relabel_instrseq instrseq = <nl> | IContFlow ( SSwitch pairs ) - > <nl> Some ( IContFlow ( SSwitch <nl> ( List . map pairs ( fun ( id , l ) - > ( id , relabel l ) ) ) ) ) <nl> - ( * TODO : other uses of Label . t in instructions : <nl> - DecodeCufIter <nl> - IterBreak <nl> - * ) <nl> - | ILabel ( Label . Regular l ) - > <nl> - ( * TODO : Write test cases for things like catch and fault labels followed <nl> - by loop start labels . * ) <nl> - if ISet . mem l used then <nl> - let ix = lookup_def l defs in <nl> - begin match IMap . get ix refs with <nl> - | Some l ' - > Some ( ILabel ( Label . Regular l ' ) ) <nl> - | None - > None <nl> - end <nl> - else None <nl> - | _ - > Some instr ) <nl> + | ITry ( TryCatchBegin l ) - > Some ( ITry ( TryCatchBegin ( relabel l ) ) ) <nl> + | ITry ( TryFaultBegin l ) - > Some ( ITry ( TryFaultBegin ( relabel l ) ) ) <nl> + | ILabel l - > <nl> + begin match Label . option_map relabel_define_label_id l with <nl> + | None - > None <nl> + | Some l ' - > Some ( ILabel l ' ) <nl> + end <nl> + | _ - > Some instr in <nl> + ( * Rewrite any label referred to in a default value * ) <nl> + let rewrite_param param = <nl> + let dv = Hhas_param . default_value param in <nl> + match dv with <nl> + | None - > param <nl> + | Some ( l , e ) - > <nl> + Hhas_param . make ( Hhas_param . name param ) ( Hhas_param . type_info param ) <nl> + ( Some ( relabel l , e ) ) in <nl> + let params = List . map params rewrite_param in <nl> + let body = InstrSeq . filter_map body ~ f : rewrite_instr in <nl> + ( params , body ) <nl> + <nl> + let relabel_function params body = <nl> + let defs = create_label_to_offset_map body in <nl> + let used , refs = create_label_ref_map defs params body in <nl> + rewrite_params_and_body defs used refs params body <nl>
Codegen : fix relabelling to match DumpHhas
facebook/hhvm
275a46fc252cdb52563ebbf4c3f885ad782d721e
2017-03-13T11:25:47Z
mmm a / src / ProtocolBuffers / ByteString . cs <nl> ppp b / src / ProtocolBuffers / ByteString . cs <nl> public sealed class ByteString : IEnumerable < byte > , IEquatable < ByteString > { <nl> } <nl> <nl> public string ToString ( Encoding encoding ) { <nl> - return encoding . GetString ( bytes ) ; <nl> + return encoding . GetString ( bytes , 0 , bytes . Length ) ; <nl> } <nl> <nl> public string ToStringUtf8 ( ) { <nl> mmm a / src / ProtocolBuffers / CodedInputStream . cs <nl> ppp b / src / ProtocolBuffers / CodedInputStream . cs <nl> public sealed class CodedInputStream { <nl> bufferPos + = size ; <nl> return result ; <nl> } <nl> - / / Slow path : Build a byte array first then copy it . <nl> - return Encoding . UTF8 . GetString ( ReadRawBytes ( size ) ) ; <nl> + / / Slow path : Build a byte array first then copy it . <nl> + return Encoding . UTF8 . GetString ( ReadRawBytes ( size ) , 0 , size ) ; <nl> } <nl> <nl> / / / < summary > <nl> mmm a / src / ProtocolBuffers / FieldSet . cs <nl> ppp b / src / ProtocolBuffers / FieldSet . cs <nl> internal sealed class FieldSet { <nl> } <nl> <nl> public static FieldSet CreateInstance ( ) { <nl> - / / Use SortedList to keep fields in the canonical order <nl> - return new FieldSet ( new SortedList < FieldDescriptor , object > ( ) ) ; <nl> + / / Use SortedDictionary to keep fields in the canonical order <nl> + return new FieldSet ( new SortedDictionary < FieldDescriptor , object > ( ) ) ; <nl> } <nl> <nl> / / / < summary > <nl> internal sealed class FieldSet { <nl> } <nl> <nl> if ( hasRepeats ) { <nl> - var tmp = new SortedList < FieldDescriptor , object > ( ) ; <nl> + var tmp = new SortedDictionary < FieldDescriptor , object > ( ) ; <nl> foreach ( KeyValuePair < FieldDescriptor , object > entry in fields ) { <nl> IList < object > list = entry . Value as IList < object > ; <nl> tmp [ entry . Key ] = list = = null ? entry . Value : Lists . AsReadOnly ( list ) ; <nl> mmm a / src / ProtocolBuffers / GeneratedMessage . cs <nl> ppp b / src / ProtocolBuffers / GeneratedMessage . cs <nl> public abstract class GeneratedMessage < TMessage , TBuilder > : AbstractMessage < TMe <nl> <nl> internal IDictionary < FieldDescriptor , Object > GetMutableFieldMap ( ) { <nl> <nl> - / / Use a SortedList so we ' ll end up serializing fields in order <nl> - var ret = new SortedList < FieldDescriptor , object > ( ) ; <nl> + / / Use a SortedDictionary so we ' ll end up serializing fields in order <nl> + var ret = new SortedDictionary < FieldDescriptor , object > ( ) ; <nl> MessageDescriptor descriptor = DescriptorForType ; <nl> foreach ( FieldDescriptor field in descriptor . Fields ) { <nl> IFieldAccessor < TMessage , TBuilder > accessor = InternalFieldAccessors [ field ] ; <nl> mmm a / src / ProtocolBuffers / NameHelpers . cs <nl> ppp b / src / ProtocolBuffers / NameHelpers . cs <nl> <nl>  using System ; <nl> using System . Collections . Generic ; <nl> using System . Text ; <nl> + using System . Globalization ; <nl> <nl> namespace Google . ProtocolBuffers { <nl> / / / < summary > <nl> internal class NameHelpers { <nl> char c = input [ i ] ; <nl> if ( ' a ' < = c & & c < = ' z ' ) { <nl> if ( capitaliseNext ) { <nl> - result . Append ( char . ToUpperInvariant ( c ) ) ; <nl> + result . Append ( char . ToUpper ( c , CultureInfo . InvariantCulture ) ) ; <nl> } else { <nl> result . Append ( c ) ; <nl> } <nl> internal class NameHelpers { <nl> if ( i = = 0 & & ! pascal ) { <nl> / / Force first letter to lower - case unless explicitly told to <nl> / / capitalize it . <nl> - result . Append ( char . ToLowerInvariant ( c ) ) ; <nl> + result . Append ( char . ToLower ( c , CultureInfo . InvariantCulture ) ) ; <nl> } else { <nl> / / Capital letters after the first are left as - is . <nl> result . Append ( c ) ; <nl> mmm a / src / ProtocolBuffers / TextFormat . cs <nl> ppp b / src / ProtocolBuffers / TextFormat . cs <nl> public static class TextFormat { <nl> if ( field = = null ) { <nl> / / Explicitly specify the invariant culture so that this code does not break when <nl> / / executing in Turkey . <nl> - String lowerName = name . ToLowerInvariant ( ) ; <nl> + String lowerName = name . ToLower ( CultureInfo . InvariantCulture ) ; <nl> field = type . FindDescriptor < FieldDescriptor > ( lowerName ) ; <nl> / / If the case - insensitive match worked but the field is NOT a group , <nl> / / TODO ( jonskeet ) : What ? Java comment ends here ! <nl> mmm a / src / ProtocolBuffers / TextTokenizer . cs <nl> ppp b / src / ProtocolBuffers / TextTokenizer . cs <nl> internal sealed class TextTokenizer { <nl> / / / < / summary > <nl> private int previousColumn = 0 ; <nl> <nl> + # if SILVERLIGHT <nl> + private const RegexOptions CompiledRegexWhereAvailable = RegexOptions . None ; <nl> + # else <nl> + private const RegexOptions CompiledRegexWhereAvailable = RegexOptions . Compiled ; <nl> + # endif <nl> + <nl> / / Note : atomic groups used to mimic possessive quantifiers in Java in both of these regexes <nl> private static readonly Regex WhitespaceAndCommentPattern = new Regex ( " \ \ G ( ? > ( \ \ s | ( # . * $ ) ) + ) " , <nl> - RegexOptions . Compiled | RegexOptions . Multiline ) ; <nl> + CompiledRegexWhereAvailable | RegexOptions . Multiline ) ; <nl> private static readonly Regex TokenPattern = new Regex ( <nl> " \ \ G [ a - zA - Z_ ] ( ? > [ 0 - 9a - zA - Z_ + - ] * ) | " + / / an identifier <nl> " \ \ G [ 0 - 9 + - ] ( ? > [ 0 - 9a - zA - Z_ . + - ] * ) | " + / / a number <nl> internal sealed class TextTokenizer { <nl> " \ \ G \ ' ( ? > ( [ ^ \ " \ \ \ n \ \ \ \ ] | \ \ \ \ . ) * ) ( \ ' | \ \ \ \ ? $ ) " , / / a single - quoted string <nl> RegexOptions . Compiled | RegexOptions . Multiline ) ; <nl> <nl> - private static readonly Regex DoubleInfinity = new Regex ( " ^ - ? inf ( inity ) ? $ " , RegexOptions . Compiled | RegexOptions . IgnoreCase ) ; <nl> - private static readonly Regex FloatInfinity = new Regex ( " ^ - ? inf ( inity ) ? f ? $ " , RegexOptions . Compiled | RegexOptions . IgnoreCase ) ; <nl> - private static readonly Regex FloatNan = new Regex ( " ^ nanf ? $ " , RegexOptions . Compiled | RegexOptions . IgnoreCase ) ; <nl> + private static readonly Regex DoubleInfinity = new Regex ( " ^ - ? inf ( inity ) ? $ " , CompiledRegexWhereAvailable | RegexOptions . IgnoreCase ) ; <nl> + private static readonly Regex FloatInfinity = new Regex ( " ^ - ? inf ( inity ) ? f ? $ " , CompiledRegexWhereAvailable | RegexOptions . IgnoreCase ) ; <nl> + private static readonly Regex FloatNan = new Regex ( " ^ nanf ? $ " , CompiledRegexWhereAvailable | RegexOptions . IgnoreCase ) ; <nl> <nl> / * * Construct a tokenizer that parses tokens from the given text . * / <nl> public TextTokenizer ( string text ) { <nl> mmm a / src / ProtocolBuffers / UnknownFieldSet . cs <nl> ppp b / src / ProtocolBuffers / UnknownFieldSet . cs <nl> public sealed class UnknownFieldSet { <nl> public sealed class Builder <nl> { <nl> / / / < summary > <nl> - / / / Mapping from number to field . Note that by using a SortedList we ensure <nl> + / / / Mapping from number to field . Note that by using a SortedDictionary we ensure <nl> / / / that the fields will be serialized in ascending order . <nl> / / / < / summary > <nl> - private IDictionary < int , UnknownField > fields = new SortedList < int , UnknownField > ( ) ; <nl> + private IDictionary < int , UnknownField > fields = new SortedDictionary < int , UnknownField > ( ) ; <nl> <nl> / / Optimization : We keep around a builder for the last field that was <nl> / / modified so that we can efficiently add to it multiple times in a <nl>
Initial Silverlight compatibility work
protocolbuffers/protobuf
60fb63e3704091d0d681181dbab2055f6878f2ea
2009-06-20T19:46:28Z
mmm a / docs / api / download - item . md <nl> ppp b / docs / api / download - item . md <nl> Returns ` Boolean ` - Whether the download is paused . <nl> <nl> Resumes the download that has been paused . <nl> <nl> - * * Note : * * To enable resumable downloads the server you are downloading from must support range requests and provide both ` Last - Modified ` and ` ETag ` header values . Otherwise ` resume ( ) ` will dismiss previously received bytes and restart the download from the beginning . <nl> + * * Note : * * To enable resumable downloads the server you are downloading from must support range requests and provide both ` Last - Modified ` and ` ETag ` header values . Otherwise ` resume ( ) ` will dismiss previously received bytes and restart the download from the beginning . <nl> <nl> # # # # ` downloadItem . canResume ( ) ` <nl> <nl> - Resumes ` Boolean ` - Whether the download can resume . <nl> + Returns ` Boolean ` - Whether the download can resume . <nl> <nl> # # # # ` downloadItem . cancel ( ) ` <nl> <nl>
fix doc in download - item
electron/electron
dd139706ea54caf49bda5d0d5213ecf8b3a489ee
2017-11-23T12:31:09Z
mmm a / tensorflow / core / BUILD <nl> ppp b / tensorflow / core / BUILD <nl> load ( <nl> " tf_additional_lib_deps " , <nl> " tf_additional_libdevice_data " , <nl> " tf_additional_libdevice_deps " , <nl> - " tf_additional_minimal_lib_srcs " , <nl> " tf_additional_monitoring_hdrs " , <nl> " tf_additional_numa_copts " , <nl> " tf_additional_numa_deps " , <nl> cc_library ( <nl> copts = tf_copts ( ) + tf_additional_numa_copts ( ) , <nl> visibility = [ " : __subpackages__ " ] , <nl> deps = [ <nl> + " / / tensorflow / core / platform : mutex " , <nl> " / / tensorflow / core / platform : platform " , <nl> " / / tensorflow / core / platform : thread_annotations " , <nl> " : platform_base " , <nl> cc_library ( <nl> <nl> cc_library ( <nl> name = " framework_lite " , <nl> - srcs = [ <nl> - " / / tensorflow / core / platform : legacy_minimal_lib_srcs " , <nl> - ] , <nl> hdrs = [ <nl> " framework / numeric_types . h " , <nl> " framework / tensor_types . h " , <nl> cc_library ( <nl> " / / tensorflow / core / platform : cpu_info . h " , <nl> " / / tensorflow / core / platform : default / integral_types . h " , <nl> " / / tensorflow / core / platform : default / logging . h " , <nl> - " / / tensorflow / core / platform : default / mutex . h " , <nl> " / / tensorflow / core / platform : dynamic_annotations . h " , <nl> " / / tensorflow / core / platform : macros . h " , <nl> " / / tensorflow / core / platform : mutex . h " , <nl> cc_library ( <nl> " / / tensorflow / core / lib / bfloat16 " , <nl> " / / tensorflow / core / platform : dynamic_annotations " , <nl> " / / tensorflow / core / platform : thread_annotations " , <nl> + " / / tensorflow / core / platform : mutex " , <nl> " / / tensorflow / core / platform / default / build_config : minimal " , <nl> " / / tensorflow / core / platform : types " , <nl> ] , <nl> cc_library ( <nl> " / / tensorflow / core / platform : abi " , <nl> " / / tensorflow / core / platform : annotation " , <nl> " / / tensorflow / core / platform : cpu_info " , <nl> + " / / tensorflow / core / platform : mutex " , <nl> " / / tensorflow / core / platform : numbers " , <nl> " / / tensorflow / core / platform : platform_strings " , <nl> " / / tensorflow / core / platform : scanner " , <nl> mmm a / tensorflow / core / platform / BUILD <nl> ppp b / tensorflow / core / platform / BUILD <nl> load ( <nl> " tf_additional_lib_hdrs " , <nl> " tf_additional_lib_srcs " , <nl> " tf_additional_libdevice_srcs " , <nl> - " tf_additional_minimal_lib_srcs " , <nl> " tf_additional_monitoring_srcs " , <nl> " tf_additional_proto_hdrs " , <nl> " tf_additional_rocdl_deps " , <nl> cc_library ( <nl> cc_library ( <nl> name = " mutex " , <nl> textual_hdrs = [ " mutex . h " ] , <nl> - deps = [ <nl> - " : mutex_impl " , <nl> - ] , <nl> + deps = tf_mobile_aware_deps ( " mutex_impl " ) , <nl> ) <nl> <nl> cc_library ( <nl> filegroup ( <nl> " * * / stream_executor . h " , <nl> " * * / env_time . cc " , <nl> " * * / logger . cc " , <nl> + " * * / mutex . cc " , <nl> " * * / logging . cc " , <nl> " * * / human_readable_json . cc " , <nl> " * * / rocm . h " , <nl> filegroup ( <nl> " * * / monitoring . cc " , <nl> " * * / cuda_libdevice_path . cc " , <nl> " * * / logger . cc " , <nl> + " * * / mutex . cc " , <nl> " * * / logging . cc " , <nl> " * * / human_readable_json . cc " , <nl> " * * / rocm_rocdl_path . cc " , <nl> filegroup ( <nl> visibility = [ " / / tensorflow / core : __pkg__ " ] , <nl> ) <nl> <nl> - filegroup ( <nl> - name = " legacy_minimal_lib_srcs " , <nl> - srcs = tf_additional_minimal_lib_srcs ( ) , <nl> - visibility = [ " / / tensorflow / core : __pkg__ " ] , <nl> - ) <nl> - <nl> filegroup ( <nl> name = " legacy_libdevice_srcs " , <nl> srcs = tf_additional_libdevice_srcs ( ) , <nl> mmm a / tensorflow / core / platform / default / build_config . bzl <nl> ppp b / tensorflow / core / platform / default / build_config . bzl <nl> def tf_additional_monitoring_srcs ( ) : <nl> " default / monitoring . cc " , <nl> ] <nl> <nl> - def tf_additional_minimal_lib_srcs ( ) : <nl> - return [ <nl> - " default / integral_types . h " , <nl> - " default / mutex . h " , <nl> - " default / mutex_data . h " , <nl> - ] <nl> - <nl> def tf_additional_proto_hdrs ( ) : <nl> return [ <nl> " default / integral_types . h " , <nl>
Wiring tf / core / platform : mutex into tf / core / BUILD .
tensorflow/tensorflow
0d3376d6362d5b21e087d4142f600dbb14e02024
2019-10-02T23:45:56Z
mmm a / src / python / grpcio / grpc / _links / service . py <nl> ppp b / src / python / grpcio / grpc / _links / service . py <nl> def add_ticket ( self , ticket ) : <nl> termination = None <nl> else : <nl> termination = links . Ticket . Termination . COMPLETION <nl> - ticket = links . Ticket ( <nl> + early_read_ticket = links . Ticket ( <nl> call , rpc_state . sequence_number , None , None , None , None , None , <nl> None , payload , None , None , None , termination ) <nl> rpc_state . sequence_number + = 1 <nl> - self . _relay . add_value ( ticket ) <nl> + self . _relay . add_value ( early_read_ticket ) <nl> <nl> if ticket . payload is not None : <nl> call . write ( rpc_state . response_serializer ( ticket . payload ) , call ) <nl>
Fix parameter reassignment defect
grpc/grpc
c9dc74b2cd8f57c1b6b1770c43c9f0e861c566c0
2015-08-24T21:59:03Z
mmm a / src / mongo / db / repl / replication_executor . cpp <nl> ppp b / src / mongo / db / repl / replication_executor . cpp <nl> ReplicationExecutor : : ~ ReplicationExecutor ( ) { <nl> invariant ( ! _executorThread . joinable ( ) ) ; <nl> } <nl> <nl> + BSONObj ReplicationExecutor : : getDiagnosticBSON ( ) { <nl> + stdx : : lock_guard < stdx : : mutex > lk ( _mutex ) ; <nl> + BSONObjBuilder builder ; <nl> + builder . appendIntOrLL ( " networkInProgress " , _networkInProgressQueue . size ( ) ) ; <nl> + builder . appendIntOrLL ( " dbWorkInProgress " , _dbWorkInProgressQueue . size ( ) ) ; <nl> + builder . appendIntOrLL ( " exclusiveInProgress " , _exclusiveLockInProgressQueue . size ( ) ) ; <nl> + builder . appendIntOrLL ( " sleeperQueue " , _sleepersQueue . size ( ) ) ; <nl> + builder . appendIntOrLL ( " ready " , _readyQueue . size ( ) ) ; <nl> + builder . appendIntOrLL ( " free " , _freeQueue . size ( ) ) ; <nl> + builder . appendIntOrLL ( " unsignaledEvents " , _unsignaledEvents . size ( ) ) ; <nl> + builder . appendIntOrLL ( " eventWaiters " , _totalEventWaiters ) ; <nl> + builder . append ( " shuttingDown " , _inShutdown ) ; <nl> + builder . append ( " networkInterface " , _networkInterface - > getDiagnosticString ( ) ) ; <nl> + return builder . obj ( ) ; <nl> + } <nl> + <nl> std : : string ReplicationExecutor : : getDiagnosticString ( ) { <nl> stdx : : lock_guard < stdx : : mutex > lk ( _mutex ) ; <nl> return _getDiagnosticString_inlock ( ) ; <nl> void ReplicationExecutor : : shutdown ( ) { <nl> _readyQueue . splice ( _readyQueue . end ( ) , _getEventFromHandle ( event ) - > _waiters ) ; <nl> } <nl> for ( auto readyWork : _readyQueue ) { <nl> - _getCallbackFromHandle ( readyWork . callback ) - > _isCanceled = true ; <nl> + auto callback = _getCallbackFromHandle ( readyWork . callback ) ; <nl> + callback - > _isCanceled = true ; <nl> + callback - > _isSleeper = false ; <nl> } <nl> <nl> _networkInterface - > signalWorkAvailable ( ) ; <nl> StatusWith < ReplicationExecutor : : CallbackHandle > ReplicationExecutor : : scheduleWor <nl> StatusWith < CallbackHandle > cbHandle = enqueueWork_inlock ( & temp , work ) ; <nl> if ( ! cbHandle . isOK ( ) ) <nl> return cbHandle ; <nl> - _getCallbackFromHandle ( cbHandle . getValue ( ) ) - > _iter - > readyDate = when ; <nl> + auto callback = _getCallbackFromHandle ( cbHandle . getValue ( ) ) ; <nl> + callback - > _iter - > readyDate = when ; <nl> + callback - > _isSleeper = true ; <nl> WorkQueue : : iterator insertBefore = _sleepersQueue . begin ( ) ; <nl> while ( insertBefore ! = _sleepersQueue . end ( ) & & insertBefore - > readyDate < = when ) <nl> + + insertBefore ; <nl> int64_t ReplicationExecutor : : nextRandomInt64 ( int64_t limit ) { <nl> Date_t ReplicationExecutor : : scheduleReadySleepers_inlock ( const Date_t now ) { <nl> WorkQueue : : iterator iter = _sleepersQueue . begin ( ) ; <nl> while ( ( iter ! = _sleepersQueue . end ( ) ) & & ( iter - > readyDate < = now ) ) { <nl> + auto callback = ReplicationExecutor : : _getCallbackFromHandle ( iter - > callback ) ; <nl> + callback - > _isSleeper = false ; <nl> + + iter ; <nl> } <nl> _readyQueue . splice ( _readyQueue . end ( ) , _sleepersQueue , _sleepersQueue . begin ( ) , iter ) ; <nl> ReplicationExecutor : : Callback : : Callback ( ReplicationExecutor * executor , <nl> _executor ( executor ) , <nl> _callbackFn ( callbackFn ) , <nl> _isCanceled ( false ) , <nl> + _isSleeper ( false ) , <nl> _iter ( iter ) , <nl> _finishedEvent ( finishedEvent ) { } <nl> <nl> ReplicationExecutor : : Callback : : ~ Callback ( ) { } <nl> void ReplicationExecutor : : Callback : : cancel ( ) { <nl> stdx : : unique_lock < stdx : : mutex > lk ( _executor - > _mutex ) ; <nl> _isCanceled = true ; <nl> + <nl> + if ( _isSleeper ) { <nl> + _isSleeper = false ; <nl> + _executor - > _readyQueue . splice ( <nl> + _executor - > _readyQueue . end ( ) , _executor - > _sleepersQueue , _iter ) ; <nl> + } <nl> + <nl> if ( _iter - > isNetworkOperation ) { <nl> lk . unlock ( ) ; <nl> _executor - > _networkInterface - > cancelCommand ( _iter - > callback ) ; <nl> void callNoExcept ( const stdx : : function < void ( ) > & fn ) { <nl> try { <nl> fn ( ) ; <nl> } catch ( . . . ) { <nl> + auto status = exceptionToStatus ( ) ; <nl> + log ( ) < < " Exception thrown in ReplicationExecutor callback : " < < status ; <nl> std : : terminate ( ) ; <nl> } <nl> } <nl> mmm a / src / mongo / db / repl / replication_executor . h <nl> ppp b / src / mongo / db / repl / replication_executor . h <nl> class ReplicationExecutor final : public executor : : TaskExecutor { <nl> virtual ~ ReplicationExecutor ( ) ; <nl> <nl> std : : string getDiagnosticString ( ) override ; <nl> + BSONObj getDiagnosticBSON ( ) ; <nl> Date_t now ( ) override ; <nl> void startup ( ) override ; <nl> void shutdown ( ) override ; <nl> class ReplicationExecutor : : Callback : public executor : : TaskExecutor : : CallbackSta <nl> / / All members other than _executor are protected by the executor ' s _mutex . <nl> CallbackFn _callbackFn ; <nl> bool _isCanceled ; <nl> + bool _isSleeper ; <nl> WorkQueue : : iterator _iter ; <nl> EventHandle _finishedEvent ; <nl> } ; <nl> mmm a / src / mongo / db / repl / replication_executor_test . cpp <nl> ppp b / src / mongo / db / repl / replication_executor_test . cpp <nl> TEST_F ( ReplicationExecutorTest , ShutdownBeforeRunningSecondExclusiveLockOperatio <nl> ASSERT_EQUALS ( ErrorCodes : : CallbackCanceled , status2 . code ( ) ) ; <nl> } <nl> <nl> + TEST_F ( ReplicationExecutorTest , CancelBeforeRunningFutureWork ) { <nl> + ReplicationExecutor & executor = getReplExecutor ( ) ; <nl> + using CallbackData = ReplicationExecutor : : CallbackArgs ; <nl> + Status status1 = getDetectableErrorStatus ( ) ; <nl> + auto cbhWithStatus = <nl> + executor . scheduleWorkAt ( executor . now ( ) + Milliseconds ( 1000 ) , <nl> + [ & ] ( const CallbackData & cbData ) { <nl> + status1 = cbData . status ; <nl> + if ( cbData . status ! = ErrorCodes : : CallbackCanceled ) <nl> + cbData . executor - > shutdown ( ) ; <nl> + } ) ; <nl> + ASSERT_OK ( cbhWithStatus . getStatus ( ) ) ; <nl> + <nl> + ASSERT_EQUALS ( 1 , executor . getDiagnosticBSON ( ) [ " sleeperQueue " ] . Int ( ) ) ; <nl> + ASSERT_EQUALS ( 0 , executor . getDiagnosticBSON ( ) [ " ready " ] . Int ( ) ) ; <nl> + executor . cancel ( cbhWithStatus . getValue ( ) ) ; <nl> + <nl> + ASSERT_EQUALS ( 0 , executor . getDiagnosticBSON ( ) [ " sleeperQueue " ] . Int ( ) ) ; <nl> + ASSERT_EQUALS ( 1 , executor . getDiagnosticBSON ( ) [ " ready " ] . Int ( ) ) ; <nl> + } <nl> + <nl> } / / namespace <nl> } / / namespace repl <nl> } / / namespace mongo <nl>
SERVER - 20658 move cancelled work items from the sleepersQueue to the readyQueue in the ReplicationExecutor
mongodb/mongo
e1dde496ced6fdfa9e4cbc55a9e71084bf01d8c1
2015-10-01T09:08:07Z
mmm a / scripts / apollo_base . sh <nl> ppp b / scripts / apollo_base . sh <nl> function set_lib_path ( ) { <nl> if [ - d " $ { LIB_USER } " ] ; then <nl> export LD_LIBRARY_PATH = $ { LD_LIBRARY_PATH } : $ { LIB_USER } <nl> else <nl> - warn " $ { LIB_USER } doesn ' t exist " <nl> + warning " $ { LIB_USER } doesn ' t exist " <nl> fi <nl> <nl> if [ " $ RELEASE_DOCKER " = = 1 ] ; then <nl>
Scripts : Fix function name .
ApolloAuto/apollo
a6223e9a009b44efb394565790c86fe10c047847
2019-12-07T13:36:40Z
mmm a / Documentation / UserManual / Foxx . md <nl> ppp b / Documentation / UserManual / Foxx . md <nl> ArangoDB . It is inspired by Sinatra , the classy Ruby web framework . If <nl> Foxx is Sinatra , @ ref UserManualActions are the corresponding ` Rack ` . <nl> They provide all the HTTP goodness . <nl> <nl> - If you just want to install an existiting application , please use the <nl> + If you just want to install an existing application , please use the <nl> @ ref UserManualFoxxManager . If you want to create your own application , <nl> please continue . <nl> <nl> So let ' s get started , shall we ? <nl> <nl> + Overview <nl> + mmmmmm - - <nl> + <nl> + The typical request to a Foxx application will work as follows ( only conceptually , <nl> + a lot of the steps are cached in reality ) : <nl> + <nl> + 1 . The request is routed to a Foxx application depending on the mount point <nl> + 2 . The according controller of this application is determined ( via something called the manifest file ) <nl> + 3 . The request is then routed to a specific handler in this controller <nl> + <nl> + The handler will now parse the request . This includes determining all parameters <nl> + from the body ( which is typically JSON encoded ) to the path parameters of the URL . <nl> + It is then up to you to handle this request and generate a response . In this process <nl> + you will probably access the database . This is done via the * * Repository * * : This is an <nl> + entity that is responsible for a collection and specifically : <nl> + <nl> + 1 . Creating new entries in this collection <nl> + 2 . Modify or delete existing entries in this collection <nl> + 3 . Search for entries in this collection <nl> + <nl> + To represent an entry in this collection it will use a * * Model * * , which is a wrapper around <nl> + the raw data from the database . Here you can implement helper functions or simple access <nl> + methods . <nl> + <nl> + Now let ' s get into the details . <nl> + <nl> Creating the application files <nl> mmmmmmmmmmmmmmmmmmmmmmmmmmmmmm <nl> <nl>
Foxx Manual : Overview
arangodb/arangodb
9266e4da23a2d3cbb47d719c3384e91ee9b737ff
2013-09-27T20:33:50Z
mmm a / src / runtime / runtime - debug . cc <nl> ppp b / src / runtime / runtime - debug . cc <nl> static MaybeHandle < JSArray > GetIteratorInternalProperties ( <nl> Isolate * isolate , Handle < IteratorType > object ) { <nl> Factory * factory = isolate - > factory ( ) ; <nl> Handle < IteratorType > iterator = Handle < IteratorType > : : cast ( object ) ; <nl> - RUNTIME_ASSERT_HANDLIFIED ( iterator - > kind ( ) - > IsSmi ( ) , JSArray ) ; <nl> + CHECK ( iterator - > kind ( ) - > IsSmi ( ) ) ; <nl> const char * kind = NULL ; <nl> switch ( Smi : : cast ( iterator - > kind ( ) ) - > value ( ) ) { <nl> case IteratorType : : kKindKeys : <nl> static MaybeHandle < JSArray > GetIteratorInternalProperties ( <nl> kind = " entries " ; <nl> break ; <nl> default : <nl> - RUNTIME_ASSERT_HANDLIFIED ( false , JSArray ) ; <nl> + UNREACHABLE ( ) ; <nl> } <nl> <nl> Handle < FixedArray > result = factory - > NewFixedArray ( 2 * 3 ) ; <nl> MaybeHandle < JSArray > Runtime : : GetInternalProperties ( Isolate * isolate , <nl> <nl> Handle < Object > status_obj = <nl> DebugGetProperty ( promise , isolate - > factory ( ) - > promise_state_symbol ( ) ) ; <nl> - RUNTIME_ASSERT_HANDLIFIED ( status_obj - > IsSmi ( ) , JSArray ) ; <nl> + CHECK ( status_obj - > IsSmi ( ) ) ; <nl> const char * status = " rejected " ; <nl> int status_val = Handle < Smi > : : cast ( status_obj ) - > value ( ) ; <nl> switch ( status_val ) { <nl> mmm a / src / runtime / runtime - literals . cc <nl> ppp b / src / runtime / runtime - literals . cc <nl> MUST_USE_RESULT static MaybeHandle < AllocationSite > GetLiteralAllocationSite ( <nl> static MaybeHandle < JSObject > CreateArrayLiteralImpl ( <nl> Isolate * isolate , Handle < LiteralsArray > literals , int literals_index , <nl> Handle < FixedArray > elements , int flags ) { <nl> - RUNTIME_ASSERT_HANDLIFIED ( <nl> - literals_index > = 0 & & literals_index < literals - > literals_count ( ) , <nl> - JSObject ) ; <nl> + CHECK ( literals_index > = 0 & & literals_index < literals - > literals_count ( ) ) ; <nl> Handle < AllocationSite > site ; <nl> ASSIGN_RETURN_ON_EXCEPTION ( <nl> isolate , site , <nl> mmm a / src / runtime / runtime - utils . h <nl> ppp b / src / runtime / runtime - utils . h <nl> namespace internal { <nl> } \ <nl> } while ( 0 ) <nl> <nl> - # define RUNTIME_ASSERT_HANDLIFIED ( value , T ) \ <nl> - do { \ <nl> - if ( ! ( value ) ) { \ <nl> - V8_RuntimeError ( __FILE__ , __LINE__ , # value ) ; \ <nl> - isolate - > ThrowIllegalOperation ( ) ; \ <nl> - return MaybeHandle < T > ( ) ; \ <nl> - } \ <nl> - } while ( 0 ) <nl> - <nl> # else <nl> <nl> # define RUNTIME_ASSERT ( value ) \ <nl> namespace internal { <nl> } \ <nl> } while ( 0 ) <nl> <nl> - # define RUNTIME_ASSERT_HANDLIFIED ( value , T ) \ <nl> - do { \ <nl> - if ( ! ( value ) ) { \ <nl> - isolate - > ThrowIllegalOperation ( ) ; \ <nl> - return MaybeHandle < T > ( ) ; \ <nl> - } \ <nl> - } while ( 0 ) <nl> - <nl> # endif <nl> <nl> / / Cast the given object to a value of the specified type and store <nl>
[ runtime ] Remove RUNTIME_ASSERT_HANDLIFIED .
v8/v8
960a87bbd5c59dea2ac7009be0ca288aee74c030
2016-06-06T09:29:03Z
mmm a / docs / tools / requirements . txt <nl> ppp b / docs / tools / requirements . txt <nl> soupsieve = = 2 . 0 . 1 <nl> termcolor = = 1 . 1 . 0 <nl> tornado = = 5 . 1 . 1 <nl> Unidecode = = 1 . 1 . 1 <nl> - urllib3 = = 1 . 25 . 9 <nl> + urllib3 = = 1 . 25 . 10 <nl> mmm a / docs / tools / translate / requirements . txt <nl> ppp b / docs / tools / translate / requirements . txt <nl> python - slugify = = 4 . 0 . 1 <nl> PyYAML = = 5 . 3 . 1 <nl> requests = = 2 . 24 . 0 <nl> text - unidecode = = 1 . 3 <nl> - urllib3 = = 1 . 25 . 9 <nl> + urllib3 = = 1 . 25 . 10 <nl>
Bump urllib3 from 1 . 25 . 9 to 1 . 25 . 10 in / docs / tools
ClickHouse/ClickHouse
4a1fff0795a2d518a1931c7c004e0511c2918de9
2020-07-23T07:11:56Z
mmm a / xbmc / network / AirPlayServer . cpp <nl> ppp b / xbmc / network / AirPlayServer . cpp <nl> CAirPlayServer * CAirPlayServer : : ServerInstance = NULL ; <nl> # define EVENT_PLAYING 0 <nl> # define EVENT_PAUSED 1 <nl> # define EVENT_LOADING 2 <nl> - char * eventStrings [ ] = { " playing " , " paused " , " loading " } ; <nl> + const char * eventStrings [ ] = { " playing " , " paused " , " loading " } ; <nl> <nl> # define PLAYBACK_INFO " < ? xml version = \ " 1 . 0 \ " encoding = \ " UTF - 8 \ " ? > \ r \ n " \ <nl> " < ! DOCTYPE plist PUBLIC \ " - / / Apple / / DTD PLIST 1 . 0 / / EN \ " \ " http : / / www . apple . com / DTDs / PropertyList - 1 . 0 . dtd \ " > \ r \ n " \ <nl>
cosmetic : silent compiler warning
xbmc/xbmc
863b39718637bee837817b108eca415e9b12ba66
2011-08-27T10:10:11Z
mmm a / src / parser . cc <nl> ppp b / src / parser . cc <nl> FunctionLiteral * Parser : : ParseProgram ( ) { <nl> PrintF ( " - took % 0 . 3f ms ] \ n " , ms ) ; <nl> } <nl> if ( cached_data_mode_ = = PRODUCE_CACHED_DATA ) { <nl> - Vector < unsigned > store = recorder . ExtractData ( ) ; <nl> - * cached_data_ = new ScriptData ( store ) ; <nl> + if ( result ! = NULL ) { <nl> + Vector < unsigned > store = recorder . ExtractData ( ) ; <nl> + * cached_data_ = new ScriptData ( store ) ; <nl> + } <nl> log_ = NULL ; <nl> } <nl> return result ; <nl>
Produce cached data only for scripts which compile without errors .
v8/v8
43f9c6784218d90435158a9d011e2e1157edfa02
2014-05-06T11:22:54Z
mmm a / spec / chromium - spec . js <nl> ppp b / spec / chromium - spec . js <nl> describe ( ' chromium feature ' , function ( ) { <nl> } ) <nl> } ) <nl> } ) <nl> + <nl> + describe ( ' fetch ' , function ( ) { <nl> + it ( ' does not crash ' , function ( done ) { <nl> + const server = http . createServer ( function ( req , res ) { <nl> + res . end ( ' test ' ) <nl> + server . close ( ) <nl> + } ) <nl> + server . listen ( 0 , ' 127 . 0 . 0 . 1 ' , function ( ) { <nl> + const port = server . address ( ) . port <nl> + fetch ( ` http : / / 127 . 0 . 0 . 1 : $ { port } ` ) . then ( ( res ) = > { <nl> + return res . body . getReader ( ) <nl> + } ) . then ( ( reader ) = > { <nl> + reader . read ( ) . then ( ( r ) = > { <nl> + reader . cancel ( ) <nl> + } ) <nl> + } ) . catch ( function ( e ) { <nl> + done ( ) <nl> + } ) <nl> + } ) <nl> + } ) <nl> + } ) <nl> } ) <nl>
spec : Add test case for fetch
electron/electron
9bc6302aead522f4a3695dbbf40a891dff2fd595
2016-08-22T10:26:07Z
mmm a / configure . ac <nl> ppp b / configure . ac <nl> AC_COMPILE_IFELSE ( <nl> <nl> AC_MSG_RESULT ( [ $ CLANG ] ) <nl> <nl> - dnl * * * * * * * * * * * * * * * * * * * * <nl> - dnl turn on c + + 11 <nl> - dnl * * * * * * * * * * * * * * * * * * * * <nl> + dnl * * * * * * * * * * * * * * * * * * * * * * <nl> + dnl Turn on C + + 11 or newer <nl> + dnl * * * * * * * * * * * * * * * * * * * * * * <nl> <nl> - OLD_CXXFLAGS = $ CXXFLAGS <nl> - AC_MSG_CHECKING ( [ whether compiler supports C + + 11 ] ) <nl> - CXXFLAGS = " $ CXXFLAGS - std = c + + 11 " <nl> - AC_COMPILE_IFELSE ( <nl> - [ <nl> - AC_LANG_SOURCE ( [ [ <nl> - # if ( __cplusplus < 201103L ) <nl> - # error C + + 11 is unsupported <nl> - # endif <nl> - ] ] ) <nl> - ] , [ <nl> - AC_MSG_RESULT ( yes ) <nl> - ] , <nl> - [ <nl> - AC_MSG_RESULT ( no ) <nl> - AC_MSG_ERROR ( [ Your compiler does not have the necessary c + + 11 support ! Cannot proceed . ] ) <nl> - ] ) <nl> - CXXFLAGS = " $ OLD_CXXFLAGS " <nl> - <nl> - <nl> - # set c + + 11 support based on platform / compiler <nl> + CPLUSPLUS = <nl> + <nl> + AX_CHECK_COMPILE_FLAG ( [ - std = c + + 11 ] , [ cplusplus11 = true ] , [ cplusplus11 = false ] ) <nl> + if $ cplusplus11 ; then <nl> + CPLUSPLUS = 11 <nl> + fi <nl> + <nl> + AX_CHECK_COMPILE_FLAG ( [ - std = c + + 14 ] , [ cplusplus14 = true ] , [ cplusplus14 = false ] ) <nl> + if $ cplusplus14 ; then <nl> + CPLUSPLUS = 14 <nl> + fi <nl> + <nl> + if test - z " $ CPLUSPLUS " ; then <nl> + AC_MSG_ERROR ( [ Your compiler does not have the necessary C + + 11 support ! Cannot proceed . ] ) <nl> + fi <nl> + <nl> + # Set C + + 11 or C + + 14 support based on platform / compiler <nl> case " $ { host_os } " in <nl> cygwin * ) <nl> - CXXFLAGS = " $ CXXFLAGS - std = gnu + + 11 " <nl> + CXXFLAGS = " $ CXXFLAGS - std = gnu + + $ CPLUSPLUS " <nl> ; ; <nl> * - darwin * | * - macos10 * ) <nl> if test " x $ CLANG " = " xyes " ; then <nl> - CXXFLAGS = " $ CXXFLAGS - std = c + + 11 " <nl> + CXXFLAGS = " $ CXXFLAGS - std = c + + $ CPLUSPLUS " <nl> LDFLAGS = " $ LDFLAGS - stdlib = libc + + " <nl> else <nl> - CXXFLAGS = " $ CXXFLAGS - std = c + + 11 " <nl> + CXXFLAGS = " $ CXXFLAGS - std = c + + $ CPLUSPLUS " <nl> fi <nl> ; ; <nl> * ) <nl> # default <nl> - CXXFLAGS = " $ CXXFLAGS - std = c + + 11 " <nl> + CXXFLAGS = " $ CXXFLAGS - std = c + + $ CPLUSPLUS " <nl> ; ; <nl> esac <nl> <nl>
Use C + + 14 compiler if possible
tesseract-ocr/tesseract
fd6e281c61d5ca60dd233e6f2aa631bc1a5571ca
2019-02-13T10:05:34Z
mmm a / emcc . py <nl> ppp b / emcc . py <nl> def get_final ( ) : <nl> open ( final + ' . mem . js ' , ' w ' ) . write ( src ) <nl> final + = ' . mem . js ' <nl> <nl> + log_time ( ' memory initializer ' ) <nl> + <nl> + with ToolchainProfiler . profile_block ( ' binaryen ' ) : <nl> + do_binaryen ( target , options , memfile , wasm_target , <nl> + wasm_source_map_target , misc_temp_files ) <nl> + <nl> + log_time ( ' binaryen ' ) <nl> + # If we are not emitting any JS then we are all done now <nl> + if shared . Settings . SIDE_MODULE or final_suffix in WASM_ENDINGS : <nl> + return <nl> + <nl> + with ToolchainProfiler . profile_block ( ' final emitting ' ) : <nl> + # Remove some trivial whitespace <nl> + # TODO : do not run when compress has already been done on all parts of the code <nl> + # src = open ( final ) . read ( ) <nl> + # src = re . sub ( r ' \ n + [ \ n ] * \ n + ' , ' \ n ' , src ) <nl> + # open ( final , ' w ' ) . write ( src ) <nl> + <nl> if shared . Settings . USE_PTHREADS : <nl> target_dir = os . path . dirname ( os . path . abspath ( target ) ) <nl> worker_output = os . path . join ( target_dir , shared . Settings . PTHREAD_WORKER_FILE ) <nl> def get_final ( ) : <nl> minified_worker = building . acorn_optimizer ( worker_output , [ ' minifyWhitespace ' ] , return_output = True ) <nl> open ( worker_output , ' w ' ) . write ( minified_worker ) <nl> <nl> - log_time ( ' js opts ' ) <nl> - <nl> - with ToolchainProfiler . profile_block ( ' final emitting ' ) : <nl> - # Remove some trivial whitespace <nl> - # TODO : do not run when compress has already been done on all parts of the code <nl> - # src = open ( final ) . read ( ) <nl> - # src = re . sub ( r ' \ n + [ \ n ] * \ n + ' , ' \ n ' , src ) <nl> - # open ( final , ' w ' ) . write ( src ) <nl> - <nl> # track files that will need native eols <nl> generated_text_files_with_native_eols = [ ] <nl> <nl> - do_binaryen ( target , options , memfile , wasm_target , <nl> - wasm_source_map_target , misc_temp_files ) <nl> - # If we are building a wasm side module then we are all done now <nl> - if shared . Settings . SIDE_MODULE : <nl> - return <nl> - <nl> if shared . Settings . MODULARIZE : <nl> modularize ( ) <nl> <nl> def get_final ( ) : <nl> <nl> if final_suffix in JS_ENDINGS : <nl> js_target = target <nl> - elif final_suffix in WASM_ENDINGS : <nl> - js_target = misc_temp_files . get ( suffix = ' . js ' ) . name <nl> else : <nl> js_target = unsuffixed ( target ) + ' . js ' <nl> <nl>
Don ' t generate js output is final output is wasm file ( )
emscripten-core/emscripten
bc78a3c0d2123bd67d3139e467e6ce1217cd4b0f
2020-09-13T17:54:07Z
mmm a / tensorflow / opensource_only . files <nl> ppp b / tensorflow / opensource_only . files <nl> tensorflow / third_party / toolchains / remote_config / rbe_config . bzl <nl> tensorflow / third_party / wrapt . BUILD <nl> tensorflow / third_party / zlib . BUILD <nl> tensorflow / tools / build_info / BUILD <nl> + tensorflow / tools / ci_build / horovod / gpu / nightly . sh <nl> tensorflow / tools / ci_build / release / common . sh <nl> tensorflow / tools / ci_build / release / common_win . bat <nl> tensorflow / tools / ci_build / release / macos / cpu_libtensorflow / build . sh <nl> new file mode 100644 <nl> index 0000000000000 . . 50f5f49cfad73 <nl> mmm / dev / null <nl> ppp b / tensorflow / tools / ci_build / horovod / gpu / nightly . sh <nl> <nl> + # ! / bin / bash <nl> + # Copyright 2020 The TensorFlow Authors . All Rights Reserved . <nl> + # <nl> + # Licensed under the Apache License , Version 2 . 0 ( the " License " ) ; <nl> + # you may not use this file except in compliance with the License . <nl> + # You may obtain a copy of the License at <nl> + # <nl> + # http : / / www . apache . org / licenses / LICENSE - 2 . 0 <nl> + # <nl> + # Unless required by applicable law or agreed to in writing , software <nl> + # distributed under the License is distributed on an " AS IS " BASIS , <nl> + # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied . <nl> + # See the License for the specific language governing permissions and <nl> + # limitations under the License . <nl> + # = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = <nl> + set - e <nl> + <nl> + # Source the external common scripts . <nl> + source tensorflow / tools / ci_build / release / common . sh <nl> + <nl> + <nl> + # Install latest bazel <nl> + install_bazelisk <nl> + which bazel <nl> + <nl> + # Install realpath <nl> + sudo apt - get install realpath <nl> + <nl> + # Update the version string to nightly <nl> + if [ - n " $ { IS_NIGHTLY_BUILD } " ] ; then <nl> + . / tensorflow / tools / ci_build / update_version . py - - nightly <nl> + fi <nl> + <nl> + # Download and install open - mpi . <nl> + wget https : / / download . open - mpi . org / release / open - mpi / v4 . 0 / openmpi - 4 . 0 . 4 . tar . gz <nl> + tar xvf openmpi - 4 . 0 . 4 . tar . gz <nl> + <nl> + cd openmpi <nl> + . / configure <nl> + <nl> + # Install open - mpi . <nl> + sudo make all install <nl> + sudo ldconfig <nl> + <nl> + <nl> + <nl>
Add kokoro tests for Horovod to run against tf - nightly .
tensorflow/tensorflow
699728208803884c2c882bf04ee74c52632a1ad0
2020-06-30T21:57:14Z
mmm a / trunk / src / core / srs_core . hpp <nl> ppp b / trunk / src / core / srs_core . hpp <nl> CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE . <nl> / / current release version <nl> # define VERSION_MAJOR 2 <nl> # define VERSION_MINOR 0 <nl> - # define VERSION_REVISION 48 <nl> + # define VERSION_REVISION 49 <nl> / / server info . <nl> # define RTMP_SIG_SRS_KEY " SRS " <nl> # define RTMP_SIG_SRS_ROLE " origin / edge server " <nl> mmm a / trunk / src / kernel / srs_kernel_error . hpp <nl> ppp b / trunk / src / kernel / srs_kernel_error . hpp <nl> CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE . <nl> # define ERROR_OpenSslSha256DigestSize 2037 <nl> # define ERROR_OpenSslGetPeerPublicKey 2038 <nl> # define ERROR_OpenSslComputeSharedKey 2039 <nl> + # define ERROR_RTMP_BUFFER_OVERFLOW 2040 <nl> / / <nl> / / system control message , <nl> / / not an error , but special control logic . <nl> mmm a / trunk / src / rtmp / srs_protocol_buffer . cpp <nl> ppp b / trunk / src / rtmp / srs_protocol_buffer . cpp <nl> CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE . <nl> # include < srs_kernel_log . hpp > <nl> # include < srs_kernel_utility . hpp > <nl> <nl> + / / the max header size , <nl> + / / @ see SrsProtocol : : read_message_header ( ) . <nl> + # define SRS_RTMP_MAX_MESSAGE_HEADER 11 <nl> + <nl> SrsSimpleBuffer : : SrsSimpleBuffer ( ) <nl> { <nl> } <nl> SrsFastBuffer : : SrsFastBuffer ( ) <nl> merged_read = false ; <nl> _handler = NULL ; <nl> <nl> - nb_buffer = SOCKET_READ_SIZE ; <nl> - buffer = new char [ nb_buffer ] ; <nl> + p = end = buffer = NULL ; <nl> + nb_buffer = 0 ; <nl> + <nl> + reset_buffer ( SOCKET_READ_SIZE ) ; <nl> } <nl> <nl> SrsFastBuffer : : ~ SrsFastBuffer ( ) <nl> SrsFastBuffer : : ~ SrsFastBuffer ( ) <nl> srs_freep ( buffer ) ; <nl> } <nl> <nl> - int SrsFastBuffer : : length ( ) <nl> + char SrsFastBuffer : : read_1byte ( ) <nl> { <nl> - int len = ( int ) data . size ( ) ; <nl> - srs_assert ( len > = 0 ) ; <nl> - return len ; <nl> - } <nl> - <nl> - char * SrsFastBuffer : : bytes ( ) <nl> - { <nl> - return ( length ( ) = = 0 ) ? NULL : & data . at ( 0 ) ; <nl> + srs_assert ( end - p > = 1 ) ; <nl> + return * p + + ; <nl> } <nl> <nl> - void SrsFastBuffer : : erase ( int size ) <nl> + char * SrsFastBuffer : : read_slice ( int size ) <nl> { <nl> - if ( size < = 0 ) { <nl> - return ; <nl> - } <nl> + srs_assert ( end - p > = size ) ; <nl> + srs_assert ( p + size > buffer ) ; <nl> <nl> - if ( size > = length ( ) ) { <nl> - data . clear ( ) ; <nl> - return ; <nl> - } <nl> + char * ptr = p ; <nl> + p + = size ; <nl> <nl> - data . erase ( data . begin ( ) , data . begin ( ) + size ) ; <nl> + / / reset when consumed all . <nl> + if ( p = = end ) { <nl> + p = end = buffer ; <nl> + srs_verbose ( " all consumed , reset fast buffer " ) ; <nl> + } <nl> + <nl> + return ptr ; <nl> } <nl> <nl> - void SrsFastBuffer : : append ( const char * bytes , int size ) <nl> + void SrsFastBuffer : : skip ( int size ) <nl> { <nl> - srs_assert ( size > 0 ) ; <nl> - <nl> - data . insert ( data . end ( ) , bytes , bytes + size ) ; <nl> + srs_assert ( end - p > = size ) ; <nl> + srs_assert ( p + size > buffer ) ; <nl> + p + = size ; <nl> } <nl> <nl> int SrsFastBuffer : : grow ( ISrsBufferReader * reader , int required_size ) <nl> int SrsFastBuffer : : grow ( ISrsBufferReader * reader , int required_size ) <nl> return ret ; <nl> } <nl> <nl> - while ( length ( ) < required_size ) { <nl> + / / when read payload and need to grow , reset buffer . <nl> + if ( end - p < required_size & & required_size > SRS_RTMP_MAX_MESSAGE_HEADER ) { <nl> + int nb_cap = end - p ; <nl> + srs_verbose ( " move fast buffer % d bytes " , nb_cap ) ; <nl> + buffer = ( char * ) memmove ( buffer , p , nb_cap ) ; <nl> + p = buffer ; <nl> + end = p + nb_cap ; <nl> + } <nl> + <nl> + while ( end - p < required_size ) { <nl> + / / the max to read is the left bytes . <nl> + size_t max_to_read = buffer + nb_buffer - end ; <nl> + <nl> + if ( max_to_read < = 0 ) { <nl> + ret = ERROR_RTMP_BUFFER_OVERFLOW ; <nl> + srs_error ( " buffer overflow , required = % d , max = % d , ret = % d " , required_size , nb_buffer , ret ) ; <nl> + return ret ; <nl> + } <nl> + <nl> ssize_t nread ; <nl> - if ( ( ret = reader - > read ( buffer , nb_buffer , & nread ) ) ! = ERROR_SUCCESS ) { <nl> + if ( ( ret = reader - > read ( end , max_to_read , & nread ) ) ! = ERROR_SUCCESS ) { <nl> return ret ; <nl> } <nl> <nl> int SrsFastBuffer : : grow ( ISrsBufferReader * reader , int required_size ) <nl> _handler - > on_read ( nread ) ; <nl> } <nl> <nl> + / / we just move the ptr to next . <nl> srs_assert ( ( int ) nread > 0 ) ; <nl> - append ( buffer , ( int ) nread ) ; <nl> + end + = nread ; <nl> } <nl> <nl> return ret ; <nl> int SrsFastBuffer : : buffer_size ( ) <nl> <nl> void SrsFastBuffer : : reset_buffer ( int size ) <nl> { <nl> + / / remember the cap . <nl> + int nb_cap = end - p ; <nl> + <nl> + / / atleast to put the old data . <nl> + nb_buffer = srs_max ( nb_cap , size ) ; <nl> + <nl> + / / copy old data to buf . <nl> + char * buf = new char [ nb_buffer ] ; <nl> + if ( nb_cap > 0 ) { <nl> + memcpy ( buf , p , nb_cap ) ; <nl> + } <nl> + <nl> srs_freep ( buffer ) ; <nl> - <nl> - nb_buffer = size ; <nl> - buffer = new char [ nb_buffer ] ; <nl> + p = buffer = buf ; <nl> + end = p + nb_cap ; <nl> } <nl> mmm a / trunk / src / rtmp / srs_protocol_buffer . hpp <nl> ppp b / trunk / src / rtmp / srs_protocol_buffer . hpp <nl> class SrsFastBuffer <nl> / / the merged handler <nl> bool merged_read ; <nl> IMergeReadHandler * _handler ; <nl> - / / data and socket buffer <nl> - std : : vector < char > data ; <nl> + / / the user - space buffer to fill by reader , <nl> + / / which use fast index and reset when chunk body read ok . <nl> + / / @ see https : / / github . com / winlinvip / simple - rtmp - server / issues / 248 <nl> + / / ptr to the current read position . <nl> + char * p ; <nl> + / / ptr to the content end . <nl> + char * end ; <nl> + / / ptr to the buffer . <nl> + / / buffer < = p < = end < = buffer + nb_buffer <nl> char * buffer ; <nl> + / / the max size of buffer . <nl> int nb_buffer ; <nl> public : <nl> SrsFastBuffer ( ) ; <nl> virtual ~ SrsFastBuffer ( ) ; <nl> public : <nl> / * * <nl> - * get the length of buffer . empty if zero . <nl> - * @ remark assert length ( ) is not negative . <nl> - * / <nl> - virtual int length ( ) ; <nl> - / * * <nl> - * get the buffer bytes . <nl> - * @ return the bytes , NULL if empty . <nl> + * read 1byte from buffer , move to next bytes . <nl> + * @ remark assert buffer already grow ( 1 ) . <nl> * / <nl> - virtual char * bytes ( ) ; <nl> - public : <nl> + virtual char read_1byte ( ) ; <nl> / * * <nl> - * erase size of bytes from begin . <nl> - * @ param size to erase size of bytes . <nl> - * clear if size greater than or equals to length ( ) <nl> - * @ remark ignore size is not positive . <nl> + * read a slice in size bytes , move to next bytes . <nl> + * user can use this char * ptr directly , and should never free it . <nl> + * @ remark assert buffer already grow ( size ) . <nl> + * @ remark the ptr returned maybe invalid after grow ( x ) . <nl> * / <nl> - virtual void erase ( int size ) ; <nl> - private : <nl> + virtual char * read_slice ( int size ) ; <nl> / * * <nl> - * append specified bytes to buffer . <nl> - * @ param size the size of bytes <nl> - * @ remark assert size is positive . <nl> + * skip some bytes in buffer . <nl> + * @ param size the bytes to skip . positive to next ; negative to previous . <nl> + * @ remark assert buffer already grow ( size ) . <nl> * / <nl> - virtual void append ( const char * bytes , int size ) ; <nl> + virtual void skip ( int size ) ; <nl> public : <nl> / * * <nl> * grow buffer to the required size , loop to read from skt to fill . <nl> mmm a / trunk / src / rtmp / srs_protocol_stack . cpp <nl> ppp b / trunk / src / rtmp / srs_protocol_stack . cpp <nl> int SrsProtocol : : recv_interlaced_message ( SrsMessage * * pmsg ) <nl> / / chunk stream basic header . <nl> char fmt = 0 ; <nl> int cid = 0 ; <nl> - int bh_size = 0 ; <nl> - if ( ( ret = read_basic_header ( fmt , cid , bh_size ) ) ! = ERROR_SUCCESS ) { <nl> + if ( ( ret = read_basic_header ( fmt , cid ) ) ! = ERROR_SUCCESS ) { <nl> if ( ret ! = ERROR_SOCKET_TIMEOUT & & ! srs_is_client_gracefully_close ( ret ) ) { <nl> srs_error ( " read basic header failed . ret = % d " , ret ) ; <nl> } <nl> return ret ; <nl> } <nl> - srs_verbose ( " read basic header success . fmt = % d , cid = % d , bh_size = % d " , fmt , cid , bh_size ) ; <nl> + srs_verbose ( " read basic header success . fmt = % d , cid = % d " , fmt , cid ) ; <nl> <nl> / / once we got the chunk message header , <nl> / / that is there is a real message in cache , <nl> int SrsProtocol : : recv_interlaced_message ( SrsMessage * * pmsg ) <nl> } <nl> <nl> / / chunk stream message header <nl> - int mh_size = 0 ; <nl> - if ( ( ret = read_message_header ( chunk , fmt , bh_size , mh_size ) ) ! = ERROR_SUCCESS ) { <nl> + if ( ( ret = read_message_header ( chunk , fmt ) ) ! = ERROR_SUCCESS ) { <nl> if ( ret ! = ERROR_SOCKET_TIMEOUT & & ! srs_is_client_gracefully_close ( ret ) ) { <nl> srs_error ( " read message header failed . ret = % d " , ret ) ; <nl> } <nl> int SrsProtocol : : recv_interlaced_message ( SrsMessage * * pmsg ) <nl> <nl> / / read msg payload from chunk stream . <nl> SrsMessage * msg = NULL ; <nl> - int payload_size = 0 ; <nl> - if ( ( ret = read_message_payload ( chunk , bh_size , mh_size , payload_size , & msg ) ) ! = ERROR_SUCCESS ) { <nl> + if ( ( ret = read_message_payload ( chunk , & msg ) ) ! = ERROR_SUCCESS ) { <nl> if ( ret ! = ERROR_SOCKET_TIMEOUT & & ! srs_is_client_gracefully_close ( ret ) ) { <nl> srs_error ( " read message payload failed . ret = % d " , ret ) ; <nl> } <nl> int SrsProtocol : : recv_interlaced_message ( SrsMessage * * pmsg ) <nl> * Chunk stream IDs with values 64 - 319 could be represented by both 2 - <nl> * byte version and 3 - byte version of this field . <nl> * / <nl> - int SrsProtocol : : read_basic_header ( char & fmt , int & cid , int & bh_size ) <nl> + int SrsProtocol : : read_basic_header ( char & fmt , int & cid ) <nl> { <nl> int ret = ERROR_SUCCESS ; <nl> <nl> - int required_size = 1 ; <nl> - if ( ( ret = in_buffer - > grow ( skt , required_size ) ) ! = ERROR_SUCCESS ) { <nl> + if ( ( ret = in_buffer - > grow ( skt , 1 ) ) ! = ERROR_SUCCESS ) { <nl> if ( ret ! = ERROR_SOCKET_TIMEOUT & & ! srs_is_client_gracefully_close ( ret ) ) { <nl> - srs_error ( " read 1bytes basic header failed . required_size = % d , ret = % d " , required_size , ret ) ; <nl> + srs_error ( " read 1bytes basic header failed . required_size = % d , ret = % d " , 1 , ret ) ; <nl> } <nl> return ret ; <nl> } <nl> <nl> - char * p = in_buffer - > bytes ( ) ; <nl> - <nl> - fmt = ( * p > > 6 ) & 0x03 ; <nl> - cid = * p & 0x3f ; <nl> - bh_size = 1 ; <nl> + fmt = in_buffer - > read_1byte ( ) ; <nl> + cid = fmt & 0x3f ; <nl> + fmt = ( fmt > > 6 ) & 0x03 ; <nl> <nl> / / 2 - 63 , 1B chunk header <nl> if ( cid > 1 ) { <nl> - srs_verbose ( " % dbytes basic header parsed . fmt = % d , cid = % d " , bh_size , fmt , cid ) ; <nl> + srs_verbose ( " basic header parsed . fmt = % d , cid = % d " , fmt , cid ) ; <nl> return ret ; <nl> } <nl> <nl> / / 64 - 319 , 2B chunk header <nl> if ( cid = = 0 ) { <nl> - required_size = 2 ; <nl> - if ( ( ret = in_buffer - > grow ( skt , required_size ) ) ! = ERROR_SUCCESS ) { <nl> + if ( ( ret = in_buffer - > grow ( skt , 1 ) ) ! = ERROR_SUCCESS ) { <nl> if ( ret ! = ERROR_SOCKET_TIMEOUT & & ! srs_is_client_gracefully_close ( ret ) ) { <nl> - srs_error ( " read 2bytes basic header failed . required_size = % d , ret = % d " , required_size , ret ) ; <nl> + srs_error ( " read 2bytes basic header failed . required_size = % d , ret = % d " , 1 , ret ) ; <nl> } <nl> return ret ; <nl> } <nl> <nl> cid = 64 ; <nl> - cid + = ( u_int8_t ) * ( + + p ) ; <nl> - bh_size = 2 ; <nl> - srs_verbose ( " % dbytes basic header parsed . fmt = % d , cid = % d " , bh_size , fmt , cid ) ; <nl> + cid + = ( u_int8_t ) in_buffer - > read_1byte ( ) ; <nl> + srs_verbose ( " 2bytes basic header parsed . fmt = % d , cid = % d " , fmt , cid ) ; <nl> / / 64 - 65599 , 3B chunk header <nl> } else if ( cid = = 1 ) { <nl> - required_size = 3 ; <nl> - if ( ( ret = in_buffer - > grow ( skt , 3 ) ) ! = ERROR_SUCCESS ) { <nl> + if ( ( ret = in_buffer - > grow ( skt , 2 ) ) ! = ERROR_SUCCESS ) { <nl> if ( ret ! = ERROR_SOCKET_TIMEOUT & & ! srs_is_client_gracefully_close ( ret ) ) { <nl> - srs_error ( " read 3bytes basic header failed . required_size = % d , ret = % d " , required_size , ret ) ; <nl> + srs_error ( " read 3bytes basic header failed . required_size = % d , ret = % d " , 2 , ret ) ; <nl> } <nl> return ret ; <nl> } <nl> <nl> cid = 64 ; <nl> - cid + = ( u_int8_t ) * ( + + p ) ; <nl> - cid + = ( ( u_int8_t ) * ( + + p ) ) * 256 ; <nl> - bh_size = 3 ; <nl> - srs_verbose ( " % dbytes basic header parsed . fmt = % d , cid = % d " , bh_size , fmt , cid ) ; <nl> + cid + = ( u_int8_t ) in_buffer - > read_1byte ( ) ; <nl> + cid + = ( ( u_int8_t ) in_buffer - > read_1byte ( ) ) * 256 ; <nl> + srs_verbose ( " 3bytes basic header parsed . fmt = % d , cid = % d " , fmt , cid ) ; <nl> } else { <nl> srs_error ( " invalid path , impossible basic header . " ) ; <nl> srs_assert ( false ) ; <nl> int SrsProtocol : : read_basic_header ( char & fmt , int & cid , int & bh_size ) <nl> * fmt = 2 , 0x8X <nl> * fmt = 3 , 0xCX <nl> * / <nl> - int SrsProtocol : : read_message_header ( SrsChunkStream * chunk , char fmt , int bh_size , int & mh_size ) <nl> + int SrsProtocol : : read_message_header ( SrsChunkStream * chunk , char fmt ) <nl> { <nl> int ret = ERROR_SUCCESS ; <nl> <nl> int SrsProtocol : : read_message_header ( SrsChunkStream * chunk , char fmt , int bh_siz <nl> <nl> / / read message header from socket to buffer . <nl> static char mh_sizes [ ] = { 11 , 7 , 3 , 0 } ; <nl> - mh_size = mh_sizes [ ( int ) fmt ] ; <nl> + int mh_size = mh_sizes [ ( int ) fmt ] ; <nl> srs_verbose ( " calc chunk message header size . fmt = % d , mh_size = % d " , fmt , mh_size ) ; <nl> <nl> - int required_size = bh_size + mh_size ; <nl> - if ( ( ret = in_buffer - > grow ( skt , required_size ) ) ! = ERROR_SUCCESS ) { <nl> + if ( mh_size > 0 & & ( ret = in_buffer - > grow ( skt , mh_size ) ) ! = ERROR_SUCCESS ) { <nl> if ( ret ! = ERROR_SOCKET_TIMEOUT & & ! srs_is_client_gracefully_close ( ret ) ) { <nl> - srs_error ( " read % dbytes message header failed . required_size = % d , ret = % d " , mh_size , required_size , ret ) ; <nl> + srs_error ( " read % dbytes message header failed . ret = % d " , mh_size , ret ) ; <nl> } <nl> return ret ; <nl> } <nl> - char * p = in_buffer - > bytes ( ) + bh_size ; <nl> <nl> / * * <nl> * parse the message header . <nl> int SrsProtocol : : read_message_header ( SrsChunkStream * chunk , char fmt , int bh_siz <nl> * / <nl> / / see also : ngx_rtmp_recv <nl> if ( fmt < = RTMP_FMT_TYPE2 ) { <nl> + char * p = in_buffer - > read_slice ( mh_size ) ; <nl> + <nl> char * pp = ( char * ) & chunk - > header . timestamp_delta ; <nl> pp [ 2 ] = * p + + ; <nl> pp [ 1 ] = * p + + ; <nl> int SrsProtocol : : read_message_header ( SrsChunkStream * chunk , char fmt , int bh_siz <nl> / / read extended - timestamp <nl> if ( chunk - > extended_timestamp ) { <nl> mh_size + = 4 ; <nl> - required_size = bh_size + mh_size ; <nl> srs_verbose ( " read header ext time . fmt = % d , ext_time = % d , mh_size = % d " , fmt , chunk - > extended_timestamp , mh_size ) ; <nl> - if ( ( ret = in_buffer - > grow ( skt , required_size ) ) ! = ERROR_SUCCESS ) { <nl> + if ( ( ret = in_buffer - > grow ( skt , 4 ) ) ! = ERROR_SUCCESS ) { <nl> if ( ret ! = ERROR_SOCKET_TIMEOUT & & ! srs_is_client_gracefully_close ( ret ) ) { <nl> - srs_error ( " read % dbytes message header failed . required_size = % d , ret = % d " , mh_size , required_size , ret ) ; <nl> + srs_error ( " read % dbytes message header failed . required_size = % d , ret = % d " , mh_size , 4 , ret ) ; <nl> } <nl> return ret ; <nl> } <nl> + / / the ptr to the slice maybe invalid when grow ( ) <nl> + / / reset the p to get 4bytes slice . <nl> + char * p = in_buffer - > read_slice ( 4 ) ; <nl> <nl> u_int32_t timestamp = 0x00 ; <nl> char * pp = ( char * ) & timestamp ; <nl> int SrsProtocol : : read_message_header ( SrsChunkStream * chunk , char fmt , int bh_siz <nl> * / <nl> if ( ! is_first_chunk_of_msg & & chunk_timestamp > 0 & & chunk_timestamp ! = timestamp ) { <nl> mh_size - = 4 ; <nl> + in_buffer - > skip ( - 4 ) ; <nl> srs_info ( " no 4bytes extended timestamp in the continued chunk " ) ; <nl> } else { <nl> chunk - > header . timestamp = timestamp ; <nl> int SrsProtocol : : read_message_header ( SrsChunkStream * chunk , char fmt , int bh_siz <nl> return ret ; <nl> } <nl> <nl> - int SrsProtocol : : read_message_payload ( SrsChunkStream * chunk , int bh_size , int mh_size , int & payload_size , SrsMessage * * pmsg ) <nl> + int SrsProtocol : : read_message_payload ( SrsChunkStream * chunk , SrsMessage * * pmsg ) <nl> { <nl> int ret = ERROR_SUCCESS ; <nl> <nl> / / empty message <nl> if ( chunk - > header . payload_length < = 0 ) { <nl> - / / need erase the header in buffer . <nl> - in_buffer - > erase ( bh_size + mh_size ) ; <nl> - <nl> srs_trace ( " get an empty RTMP " <nl> " message ( type = % d , size = % d , time = % " PRId64 " , sid = % d ) " , chunk - > header . message_type , <nl> chunk - > header . payload_length , chunk - > header . timestamp , chunk - > header . stream_id ) ; <nl> int SrsProtocol : : read_message_payload ( SrsChunkStream * chunk , int bh_size , int mh <nl> srs_assert ( chunk - > header . payload_length > 0 ) ; <nl> <nl> / / the chunk payload size . <nl> - payload_size = chunk - > header . payload_length - chunk - > msg - > size ; <nl> + int payload_size = chunk - > header . payload_length - chunk - > msg - > size ; <nl> payload_size = srs_min ( payload_size , in_chunk_size ) ; <nl> srs_verbose ( " chunk payload size is % d , message_size = % d , received_size = % d , in_chunk_size = % d " , <nl> payload_size , chunk - > header . payload_length , chunk - > msg - > size , in_chunk_size ) ; <nl> int SrsProtocol : : read_message_payload ( SrsChunkStream * chunk , int bh_size , int mh <nl> / / create msg payload if not initialized <nl> if ( ! chunk - > msg - > payload ) { <nl> chunk - > msg - > payload = new char [ chunk - > header . payload_length ] ; <nl> - memset ( chunk - > msg - > payload , 0 , chunk - > header . payload_length ) ; <nl> - srs_verbose ( " create empty payload for RTMP message . size = % d " , chunk - > header . payload_length ) ; <nl> + srs_verbose ( " create payload for RTMP message . size = % d " , chunk - > header . payload_length ) ; <nl> } <nl> <nl> / / read payload to buffer <nl> - int required_size = bh_size + mh_size + payload_size ; <nl> - if ( ( ret = in_buffer - > grow ( skt , required_size ) ) ! = ERROR_SUCCESS ) { <nl> + if ( ( ret = in_buffer - > grow ( skt , payload_size ) ) ! = ERROR_SUCCESS ) { <nl> if ( ret ! = ERROR_SOCKET_TIMEOUT & & ! srs_is_client_gracefully_close ( ret ) ) { <nl> - srs_error ( " read payload failed . required_size = % d , ret = % d " , required_size , ret ) ; <nl> + srs_error ( " read payload failed . required_size = % d , ret = % d " , payload_size , ret ) ; <nl> } <nl> return ret ; <nl> } <nl> - memcpy ( chunk - > msg - > payload + chunk - > msg - > size , in_buffer - > bytes ( ) + bh_size + mh_size , payload_size ) ; <nl> - in_buffer - > erase ( bh_size + mh_size + payload_size ) ; <nl> + memcpy ( chunk - > msg - > payload + chunk - > msg - > size , in_buffer - > read_slice ( payload_size ) , payload_size ) ; <nl> chunk - > msg - > size + = payload_size ; <nl> <nl> - srs_verbose ( " chunk payload read completed . bh_size = % d , mh_size = % d , payload_size = % d " , bh_size , mh_size , payload_size ) ; <nl> + srs_verbose ( " chunk payload read completed . payload_size = % d " , payload_size ) ; <nl> <nl> / / got entire RTMP message ? <nl> if ( chunk - > header . payload_length = = chunk - > msg - > size ) { <nl> mmm a / trunk / src / rtmp / srs_protocol_stack . hpp <nl> ppp b / trunk / src / rtmp / srs_protocol_stack . hpp <nl> class SrsProtocol <nl> / * * <nl> * read the chunk basic header ( fmt , cid ) from chunk stream . <nl> * user can discovery a SrsChunkStream by cid . <nl> - * @ bh_size return the chunk basic header size , to remove the used bytes when finished . <nl> * / <nl> - virtual int read_basic_header ( char & fmt , int & cid , int & bh_size ) ; <nl> + virtual int read_basic_header ( char & fmt , int & cid ) ; <nl> / * * <nl> * read the chunk message header ( timestamp , payload_length , message_type , stream_id ) <nl> * from chunk stream and save to SrsChunkStream . <nl> - * @ mh_size return the chunk message header size , to remove the used bytes when finished . <nl> * / <nl> - virtual int read_message_header ( SrsChunkStream * chunk , char fmt , int bh_size , int & mh_size ) ; <nl> + virtual int read_message_header ( SrsChunkStream * chunk , char fmt ) ; <nl> / * * <nl> * read the chunk payload , remove the used bytes in buffer , <nl> * if got entire message , set the pmsg . <nl> - * @ payload_size read size in this roundtrip , generally a chunk size or left message size . <nl> * / <nl> - virtual int read_message_payload ( SrsChunkStream * chunk , int bh_size , int mh_size , int & payload_size , SrsMessage * * pmsg ) ; <nl> + virtual int read_message_payload ( SrsChunkStream * chunk , SrsMessage * * pmsg ) ; <nl> / * * <nl> * when recv message , update the context . <nl> * / <nl>
fix , improve about 15 % performance for fast buffer . 2 . 0 . 49
ossrs/srs
29324fab469e0f7cef9ad04ffdbce832ac7dd9ff
2014-12-04T05:05:13Z
mmm a / src / base / bittorrent / torrenthandle . cpp <nl> ppp b / src / base / bittorrent / torrenthandle . cpp <nl> void TorrentHandle : : forceRecheck ( ) <nl> <nl> if ( isPaused ( ) ) { <nl> m_nativeHandle . stop_when_ready ( true ) ; <nl> - resume_impl ( true , true ) ; <nl> + resume_impl ( false ) ; <nl> } <nl> } <nl> <nl> void TorrentHandle : : pause ( ) <nl> <nl> void TorrentHandle : : resume ( bool forced ) <nl> { <nl> - resume_impl ( forced , false ) ; <nl> + resume_impl ( forced ) ; <nl> } <nl> <nl> - void TorrentHandle : : resume_impl ( bool forced , bool uploadMode ) <nl> + void TorrentHandle : : resume_impl ( bool forced ) <nl> { <nl> if ( hasError ( ) ) <nl> m_nativeHandle . clear_error ( ) ; <nl> void TorrentHandle : : resume_impl ( bool forced , bool uploadMode ) <nl> } <nl> <nl> m_nativeHandle . auto_managed ( ! forced ) ; <nl> - m_nativeHandle . set_upload_mode ( uploadMode ) ; <nl> m_nativeHandle . resume ( ) ; <nl> } <nl> <nl> mmm a / src / base / bittorrent / torrenthandle . h <nl> ppp b / src / base / bittorrent / torrenthandle . h <nl> namespace BitTorrent <nl> void handleMetadataReceivedAlert ( const libtorrent : : metadata_received_alert * p ) ; <nl> void handleStatsAlert ( const libtorrent : : stats_alert * p ) ; <nl> <nl> - void resume_impl ( bool forced , bool uploadMode ) ; <nl> + void resume_impl ( bool forced ) ; <nl> bool isMoveInProgress ( ) const ; <nl> QString nativeActualSavePath ( ) const ; <nl> <nl>
Force recheck multiple torrents one by one
qbittorrent/qBittorrent
61925f74fde8e4cffaa8561c4c961219a1c4e7e3
2019-02-01T15:15:54Z
mmm a / tensorflow / contrib / session_bundle / session_bundle . cc <nl> ppp b / tensorflow / contrib / session_bundle / session_bundle . cc <nl> namespace serving { <nl> namespace { <nl> <nl> / / Create a session using the given options and load the graph . <nl> - Status CreateSessionFromGraphDef ( <nl> - const tensorflow : : SessionOptions & options , const GraphDef & graph , <nl> - std : : unique_ptr < tensorflow : : Session > * session ) { <nl> + Status CreateSessionFromGraphDef ( const SessionOptions & options , <nl> + const GraphDef & graph , <nl> + std : : unique_ptr < Session > * session ) { <nl> session - > reset ( NewSession ( options ) ) ; <nl> return ( * session ) - > Create ( graph ) ; <nl> } <nl> <nl> Status GetMetaGraphDefFromExport ( const StringPiece export_dir , <nl> - tensorflow : : MetaGraphDef * meta_graph_def ) { <nl> + MetaGraphDef * meta_graph_def ) { <nl> const string meta_graph_def_path = <nl> - tensorflow : : io : : JoinPath ( export_dir , kMetaGraphDefFilename ) ; <nl> + io : : JoinPath ( export_dir , kMetaGraphDefFilename ) ; <nl> return ReadBinaryProto ( Env : : Default ( ) , meta_graph_def_path , meta_graph_def ) ; <nl> } <nl> <nl> Tensor CreateStringTensor ( const string & value ) { <nl> void AddAssetsTensorsToInputs ( const StringPiece export_dir , <nl> const std : : vector < AssetFile > & asset_files , <nl> std : : vector < std : : pair < string , Tensor > > * inputs ) { <nl> - if ( ! asset_files . empty ( ) ) { <nl> - for ( auto & asset : asset_files ) { <nl> - Tensor assets_file_tensor = CreateStringTensor ( tensorflow : : io : : JoinPath ( <nl> - tensorflow : : io : : JoinPath ( export_dir , kAssetsDirectory ) , <nl> - asset . filename ( ) ) ) ; <nl> - inputs - > push_back ( <nl> - { asset . tensor_binding ( ) . tensor_name ( ) , assets_file_tensor } ) ; <nl> - } <nl> + if ( asset_files . empty ( ) ) { <nl> + return ; <nl> } <nl> + for ( auto & asset : asset_files ) { <nl> + Tensor assets_file_tensor = CreateStringTensor ( io : : JoinPath ( <nl> + io : : JoinPath ( export_dir , kAssetsDirectory ) , asset . filename ( ) ) ) ; <nl> + inputs - > push_back ( <nl> + { asset . tensor_binding ( ) . tensor_name ( ) , assets_file_tensor } ) ; <nl> + } <nl> } <nl> <nl> / / Historically , model exporter ( exporter . py ) takes only saver with <nl> string GetVariablesFilename ( const StringPiece export_dir ) { <nl> const char kVariablesFilename [ ] = " export " ; <nl> const char kVariablesFilenamePattern [ ] = " export - \ ? \ ? \ ? \ ? \ ? - of - \ ? \ ? \ ? \ ? \ ? " ; <nl> if ( Env : : Default ( ) - > FileExists ( <nl> - tensorflow : : io : : JoinPath ( export_dir , kVariablesFilename ) ) ) { <nl> - return tensorflow : : io : : JoinPath ( export_dir , kVariablesFilename ) ; <nl> + io : : JoinPath ( export_dir , kVariablesFilename ) ) ) { <nl> + return io : : JoinPath ( export_dir , kVariablesFilename ) ; <nl> } else { <nl> - return tensorflow : : io : : JoinPath ( export_dir , kVariablesFilenamePattern ) ; <nl> + return io : : JoinPath ( export_dir , kVariablesFilenamePattern ) ; <nl> } <nl> } <nl> <nl> Status RunRestoreOp ( const RunOptions & run_options , const StringPiece export_dir , <nl> const std : : vector < AssetFile > & asset_files , <nl> const StringPiece restore_op_name , <nl> const StringPiece variables_filename_const_op_name , <nl> - tensorflow : : Session * session ) { <nl> + Session * session ) { <nl> LOG ( INFO ) < < " Running restore op for SessionBundle " ; <nl> Tensor variables_tensor = <nl> CreateStringTensor ( GetVariablesFilename ( export_dir ) ) ; <nl> Status RunRestoreOp ( const RunOptions & run_options , const StringPiece export_dir , <nl> <nl> Status RunInitOp ( const RunOptions & run_options , const StringPiece export_dir , <nl> const std : : vector < AssetFile > & asset_files , <nl> - const StringPiece init_op_name , tensorflow : : Session * session ) { <nl> + const StringPiece init_op_name , Session * session ) { <nl> LOG ( INFO ) < < " Running init op for SessionBundle " ; <nl> std : : vector < std : : pair < string , Tensor > > inputs ; <nl> AddAssetsTensorsToInputs ( export_dir , asset_files , & inputs ) ; <nl> Status RunInitOp ( const RunOptions & run_options , const StringPiece export_dir , <nl> <nl> } / / namespace <nl> <nl> - tensorflow : : Status LoadSessionBundleFromPath ( <nl> - const tensorflow : : SessionOptions & options , const StringPiece export_dir , <nl> - SessionBundle * const bundle ) { <nl> + Status LoadSessionBundleFromPath ( const SessionOptions & options , <nl> + const StringPiece export_dir , <nl> + SessionBundle * const bundle ) { <nl> TF_RETURN_IF_ERROR ( LoadSessionBundleFromPathUsingRunOptions ( <nl> options , RunOptions ( ) , export_dir , bundle ) ) ; <nl> return Status : : OK ( ) ; <nl> } <nl> <nl> - tensorflow : : Status LoadSessionBundleFromPathUsingRunOptions ( <nl> - const tensorflow : : SessionOptions & options , const RunOptions & run_options , <nl> - const StringPiece export_dir , SessionBundle * const bundle ) { <nl> + Status LoadSessionBundleFromPathUsingRunOptions ( const SessionOptions & options , <nl> + const RunOptions & run_options , <nl> + const StringPiece export_dir , <nl> + SessionBundle * const bundle ) { <nl> LOG ( INFO ) < < " Attempting to load a SessionBundle from : " < < export_dir ; <nl> const int64 start_seconds = Env : : Default ( ) - > NowSeconds ( ) ; <nl> TF_RETURN_IF_ERROR ( <nl> tensorflow : : Status LoadSessionBundleFromPathUsingRunOptions ( <nl> if ( ! any . Is < GraphDef > ( ) ) { <nl> return errors : : FailedPrecondition ( <nl> " Expected Any type_url for : " , <nl> - tensorflow : : GraphDef : : default_instance ( ) . descriptor ( ) - > full_name ( ) , <nl> - " . Got : " , string ( any . type_url ( ) . data ( ) , any . type_url ( ) . size ( ) ) , " . " ) ; <nl> + GraphDef : : default_instance ( ) . descriptor ( ) - > full_name ( ) , " . Got : " , <nl> + string ( any . type_url ( ) . data ( ) , any . type_url ( ) . size ( ) ) , " . " ) ; <nl> } <nl> - tensorflow : : GraphDef graph_def ; <nl> + GraphDef graph_def ; <nl> if ( ! any . UnpackTo ( & graph_def ) ) { <nl> return errors : : FailedPrecondition ( " Failed to unpack : " , <nl> any . DebugString ( ) ) ; <nl> tensorflow : : Status LoadSessionBundleFromPathUsingRunOptions ( <nl> CreateSessionFromGraphDef ( options , graph_def , & bundle - > session ) ) ; <nl> } else { <nl> / / Fallback to use the graph_def in the MetaGraphDef . <nl> - const tensorflow : : GraphDef & graph_def = bundle - > meta_graph_def . graph_def ( ) ; <nl> + const GraphDef & graph_def = bundle - > meta_graph_def . graph_def ( ) ; <nl> TF_RETURN_IF_ERROR ( <nl> CreateSessionFromGraphDef ( options , graph_def , & bundle - > session ) ) ; <nl> } <nl>
Small cleanup of session - bundle implementation .
tensorflow/tensorflow
07f6503f7e9c033c4e70743bc99326296bb82348
2016-08-05T18:02:49Z
mmm a / lib / browser / api / app . js <nl> ppp b / lib / browser / api / app . js <nl> <nl> + ' use strict ' ; <nl> + <nl> const deprecate = require ( ' electron ' ) . deprecate ; <nl> const session = require ( ' electron ' ) . session ; <nl> const Menu = require ( ' electron ' ) . Menu ; <nl> deprecate . rename ( app , ' terminate ' , ' quit ' ) ; <nl> deprecate . event ( app , ' finish - launching ' , ' ready ' , function ( ) { <nl> <nl> / / give default app a chance to setup default menu . <nl> - return setImmediate ( ( function ( _this ) { <nl> - return function ( ) { <nl> - return _this . emit ( ' finish - launching ' ) ; <nl> - } ; <nl> - } ) ( this ) ) ; <nl> + setImmediate ( ( ) = > { <nl> + this . emit ( ' finish - launching ' ) ; <nl> + } ) ; <nl> } ) ; <nl> <nl> deprecate . event ( app , ' activate - with - no - open - windows ' , ' activate ' , function ( event , hasVisibleWindows ) { <nl> mmm a / lib / browser / api / auto - updater / auto - updater - win . js <nl> ppp b / lib / browser / api / auto - updater / auto - updater - win . js <nl> AutoUpdater . prototype . checkForUpdates = function ( ) { <nl> return this . emitError ( ' Can not find Squirrel ' ) ; <nl> } <nl> this . emit ( ' checking - for - update ' ) ; <nl> - return squirrelUpdate . download ( this . updateURL , ( function ( _this ) { <nl> - return function ( error , update ) { <nl> + squirrelUpdate . download ( this . updateURL , ( error , update ) = > { <nl> + if ( error ! = null ) { <nl> + return this . emitError ( error ) ; <nl> + } <nl> + if ( update = = null ) { <nl> + return this . emit ( ' update - not - available ' ) ; <nl> + } <nl> + this . emit ( ' update - available ' ) ; <nl> + squirrelUpdate . update ( this . updateURL , ( error ) = > { <nl> + var date , releaseNotes , version ; <nl> if ( error ! = null ) { <nl> - return _this . emitError ( error ) ; <nl> + return this . emitError ( error ) ; <nl> } <nl> - if ( update = = null ) { <nl> - return _this . emit ( ' update - not - available ' ) ; <nl> - } <nl> - _this . emit ( ' update - available ' ) ; <nl> - return squirrelUpdate . update ( _this . updateURL , function ( error ) { <nl> - var date , releaseNotes , version ; <nl> - if ( error ! = null ) { <nl> - return _this . emitError ( error ) ; <nl> - } <nl> - releaseNotes = update . releaseNotes , version = update . version ; <nl> - <nl> - / / Following information is not available on Windows , so fake them . <nl> - date = new Date ; <nl> - return _this . emit ( ' update - downloaded ' , { } , releaseNotes , version , date , _this . updateURL , function ( ) { <nl> - return _this . quitAndInstall ( ) ; <nl> - } ) ; <nl> + releaseNotes = update . releaseNotes , version = update . version ; <nl> + <nl> + / / Following information is not available on Windows , so fake them . <nl> + date = new Date ; <nl> + this . emit ( ' update - downloaded ' , { } , releaseNotes , version , date , this . updateURL , ( ) = > { <nl> + this . quitAndInstall ( ) ; <nl> } ) ; <nl> - } ; <nl> - } ) ( this ) ) ; <nl> + } ) ; <nl> + } ) ; <nl> } ; <nl> <nl> / / Private : Emit both error object and message , this is to keep compatibility <nl> mmm a / lib / browser / api / browser - window . js <nl> ppp b / lib / browser / api / browser - window . js <nl> <nl> + ' use strict ' ; <nl> + <nl> const ipcMain = require ( ' electron ' ) . ipcMain ; <nl> const deprecate = require ( ' electron ' ) . deprecate ; <nl> const EventEmitter = require ( ' events ' ) . EventEmitter ; <nl> BrowserWindow . prototype . _init = function ( ) { <nl> / / window . resizeTo ( . . . ) <nl> / / window . moveTo ( . . . ) <nl> this . webContents . on ( ' move ' , ( event , size ) = > { <nl> - return this . setBounds ( size ) ; <nl> + this . setBounds ( size ) ; <nl> } ) ; <nl> <nl> / / Hide the auto - hide menu when webContents is focused . <nl> this . webContents . on ( ' activate ' , ( ) = > { <nl> if ( process . platform ! = = ' darwin ' & & this . isMenuBarAutoHide ( ) & & this . isMenuBarVisible ( ) ) { <nl> - return this . setMenuBarVisibility ( false ) ; <nl> + this . setMenuBarVisibility ( false ) ; <nl> } <nl> } ) ; <nl> <nl> / / Forward the crashed event . <nl> this . webContents . on ( ' crashed ' , ( ) = > { <nl> - return this . emit ( ' crashed ' ) ; <nl> + this . emit ( ' crashed ' ) ; <nl> } ) ; <nl> <nl> / / Change window title to page title . <nl> mmm a / lib / browser / api / menu - item . js <nl> ppp b / lib / browser / api / menu - item . js <nl> <nl> + ' use strict ' ; <nl> + <nl> var MenuItem , methodInBrowserWindow , nextCommandId , rolesMap ; <nl> <nl> nextCommandId = 0 ; <nl> MenuItem = ( function ( ) { <nl> throw new Error ( " Unknown menu type " + this . type ) ; <nl> } <nl> this . commandId = + + nextCommandId ; <nl> - this . click = ( function ( _this ) { <nl> - return function ( focusedWindow ) { <nl> - <nl> - / / Manually flip the checked flags when clicked . <nl> - var methodName , ref1 , ref2 ; <nl> - if ( ( ref1 = _this . type ) = = = ' checkbox ' | | ref1 = = = ' radio ' ) { <nl> - _this . checked = ! _this . checked ; <nl> - } <nl> - if ( _this . role & & rolesMap [ _this . role ] & & process . platform ! = = ' darwin ' & & ( focusedWindow ! = null ) ) { <nl> - methodName = rolesMap [ _this . role ] ; <nl> - if ( methodInBrowserWindow [ methodName ] ) { <nl> - return focusedWindow [ methodName ] ( ) ; <nl> - } else { <nl> - return ( ref2 = focusedWindow . webContents ) ! = null ? ref2 [ methodName ] ( ) : void 0 ; <nl> - } <nl> - } else if ( typeof click = = = ' function ' ) { <nl> - return click ( _this , focusedWindow ) ; <nl> - } else if ( typeof _this . selector = = = ' string ' & & process . platform = = = ' darwin ' ) { <nl> - return Menu . sendActionToFirstResponder ( _this . selector ) ; <nl> + this . click = ( focusedWindow ) = > { <nl> + / / Manually flip the checked flags when clicked . <nl> + var methodName , ref1 , ref2 ; <nl> + if ( ( ref1 = this . type ) = = = ' checkbox ' | | ref1 = = = ' radio ' ) { <nl> + this . checked = ! this . checked ; <nl> + } <nl> + if ( this . role & & rolesMap [ this . role ] & & process . platform ! = = ' darwin ' & & ( focusedWindow ! = null ) ) { <nl> + methodName = rolesMap [ this . role ] ; <nl> + if ( methodInBrowserWindow [ methodName ] ) { <nl> + return focusedWindow [ methodName ] ( ) ; <nl> + } else { <nl> + return ( ref2 = focusedWindow . webContents ) ! = null ? ref2 [ methodName ] ( ) : void 0 ; <nl> } <nl> - } ; <nl> - } ) ( this ) ; <nl> + } else if ( typeof click = = = ' function ' ) { <nl> + return click ( this , focusedWindow ) ; <nl> + } else if ( typeof this . selector = = = ' string ' & & process . platform = = = ' darwin ' ) { <nl> + return Menu . sendActionToFirstResponder ( this . selector ) ; <nl> + } <nl> + } ; <nl> } <nl> <nl> MenuItem . prototype . overrideProperty = function ( name , defaultValue ) { <nl> mmm a / lib / browser / api / menu . js <nl> ppp b / lib / browser / api / menu . js <nl> <nl> + ' use strict ' ; <nl> + <nl> const BrowserWindow = require ( ' electron ' ) . BrowserWindow ; <nl> const MenuItem = require ( ' electron ' ) . MenuItem ; <nl> const EventEmitter = require ( ' events ' ) . EventEmitter ; <nl> Menu . prototype . _init = function ( ) { <nl> this . groupsMap = { } ; <nl> this . items = [ ] ; <nl> return this . delegate = { <nl> - isCommandIdChecked : ( function ( _this ) { <nl> - return function ( commandId ) { <nl> - var ref1 ; <nl> - return ( ref1 = _this . commandsMap [ commandId ] ) ! = null ? ref1 . checked : void 0 ; <nl> - } ; <nl> - } ) ( this ) , <nl> - isCommandIdEnabled : ( function ( _this ) { <nl> - return function ( commandId ) { <nl> - var ref1 ; <nl> - return ( ref1 = _this . commandsMap [ commandId ] ) ! = null ? ref1 . enabled : void 0 ; <nl> - } ; <nl> - } ) ( this ) , <nl> - isCommandIdVisible : ( function ( _this ) { <nl> - return function ( commandId ) { <nl> - var ref1 ; <nl> - return ( ref1 = _this . commandsMap [ commandId ] ) ! = null ? ref1 . visible : void 0 ; <nl> - } ; <nl> - } ) ( this ) , <nl> - getAcceleratorForCommandId : ( function ( _this ) { <nl> - return function ( commandId ) { <nl> - var ref1 ; <nl> - return ( ref1 = _this . commandsMap [ commandId ] ) ! = null ? ref1 . accelerator : void 0 ; <nl> - } ; <nl> - } ) ( this ) , <nl> - getIconForCommandId : ( function ( _this ) { <nl> - return function ( commandId ) { <nl> - var ref1 ; <nl> - return ( ref1 = _this . commandsMap [ commandId ] ) ! = null ? ref1 . icon : void 0 ; <nl> - } ; <nl> - } ) ( this ) , <nl> - executeCommand : ( function ( _this ) { <nl> - return function ( commandId ) { <nl> - var ref1 ; <nl> - return ( ref1 = _this . commandsMap [ commandId ] ) ! = null ? ref1 . click ( BrowserWindow . getFocusedWindow ( ) ) : void 0 ; <nl> - } ; <nl> - } ) ( this ) , <nl> - menuWillShow : ( function ( _this ) { <nl> - return function ( ) { <nl> - <nl> - / / Make sure radio groups have at least one menu item seleted . <nl> - var checked , group , id , j , len , radioItem , ref1 ; <nl> - ref1 = _this . groupsMap ; <nl> - for ( id in ref1 ) { <nl> - group = ref1 [ id ] ; <nl> - checked = false ; <nl> - for ( j = 0 , len = group . length ; j < len ; j + + ) { <nl> - radioItem = group [ j ] ; <nl> - if ( ! radioItem . checked ) { <nl> - continue ; <nl> - } <nl> - checked = true ; <nl> - break ; <nl> - } <nl> - if ( ! checked ) { <nl> - v8Util . setHiddenValue ( group [ 0 ] , ' checked ' , true ) ; <nl> + isCommandIdChecked : ( commandId ) = > { <nl> + var command = this . commandsMap [ commandId ] ; <nl> + return command ! = null ? command . checked : undefined ; <nl> + } , <nl> + isCommandIdEnabled : ( commandId ) = > { <nl> + var command = this . commandsMap [ commandId ] ; <nl> + return command ! = null ? command . enabled : undefined ; <nl> + } , <nl> + isCommandIdVisible : ( commandId ) = > { <nl> + var command = this . commandsMap [ commandId ] ; <nl> + return command ! = null ? command . visible : undefined ; <nl> + } , <nl> + getAcceleratorForCommandId : ( commandId ) = > { <nl> + var command = this . commandsMap [ commandId ] ; <nl> + return command ! = null ? command . accelerator : undefined ; <nl> + } , <nl> + getIconForCommandId : ( commandId ) = > { <nl> + var command = this . commandsMap [ commandId ] ; <nl> + return command ! = null ? command . icon : void 0 ; <nl> + } , <nl> + executeCommand : ( commandId ) = > { <nl> + var command = this . commandsMap [ commandId ] ; <nl> + return command ! = null ? command . click ( BrowserWindow . getFocusedWindow ( ) ) : undefined ; <nl> + } , <nl> + menuWillShow : ( ) = > { <nl> + / / Make sure radio groups have at least one menu item seleted . <nl> + var checked , group , id , j , len , radioItem , ref1 ; <nl> + ref1 = this . groupsMap ; <nl> + results = [ ] ; <nl> + for ( id in ref1 ) { <nl> + group = ref1 [ id ] ; <nl> + checked = false ; <nl> + for ( j = 0 , len = group . length ; j < len ; j + + ) { <nl> + radioItem = group [ j ] ; <nl> + if ( ! radioItem . checked ) { <nl> + continue ; <nl> } <nl> + checked = true ; <nl> + break ; <nl> + } <nl> + if ( ! checked ) { <nl> + v8Util . setHiddenValue ( group [ 0 ] , ' checked ' , true ) ; <nl> } <nl> } ; <nl> - } ) ( this ) <nl> + } <nl> } ; <nl> } ; <nl> <nl> Menu . prototype . insert = function ( pos , item ) { <nl> get : function ( ) { <nl> return v8Util . getHiddenValue ( item , ' checked ' ) ; <nl> } , <nl> - set : ( function ( _this ) { <nl> - return function ( ) { <nl> - var j , len , otherItem , ref1 ; <nl> - ref1 = _this . groupsMap [ item . groupId ] ; <nl> - for ( j = 0 , len = ref1 . length ; j < len ; j + + ) { <nl> - otherItem = ref1 [ j ] ; <nl> - if ( otherItem ! = = item ) { <nl> - v8Util . setHiddenValue ( otherItem , ' checked ' , false ) ; <nl> - } <nl> + set : ( ) = > { <nl> + var j , len , otherItem , ref1 ; <nl> + ref1 = this . groupsMap [ item . groupId ] ; <nl> + for ( j = 0 , len = ref1 . length ; j < len ; j + + ) { <nl> + otherItem = ref1 [ j ] ; <nl> + if ( otherItem ! = = item ) { <nl> + v8Util . setHiddenValue ( otherItem , ' checked ' , false ) ; <nl> } <nl> - return v8Util . setHiddenValue ( item , ' checked ' , true ) ; <nl> - } ; <nl> - } ) ( this ) <nl> + } <nl> + return v8Util . setHiddenValue ( item , ' checked ' , true ) ; <nl> + } <nl> } ) ; <nl> this . insertRadioItem ( pos , item . commandId , item . label , item . groupId ) ; <nl> } <nl> mmm a / lib / browser / api / navigation - controller . js <nl> ppp b / lib / browser / api / navigation - controller . js <nl> <nl> + ' use strict ' ; <nl> + <nl> const ipcMain = require ( ' electron ' ) . ipcMain ; <nl> <nl> var slice = [ ] . slice ; <nl> var NavigationController = ( function ( ) { <nl> this . currentIndex + + ; <nl> this . history . push ( this . webContents . _getURL ( ) ) ; <nl> } <nl> - this . webContents . on ( ' navigation - entry - commited ' , ( function ( _this ) { <nl> - return function ( event , url , inPage , replaceEntry ) { <nl> - var currentEntry ; <nl> - if ( _this . inPageIndex > - 1 & & ! inPage ) { <nl> - <nl> - / / Navigated to a new page , clear in - page mark . <nl> - _this . inPageIndex = - 1 ; <nl> - } else if ( _this . inPageIndex = = = - 1 & & inPage ) { <nl> - <nl> - / / Started in - page navigations . <nl> - _this . inPageIndex = _this . currentIndex ; <nl> + this . webContents . on ( ' navigation - entry - commited ' , ( event , url , inPage , replaceEntry ) = > { <nl> + var currentEntry ; <nl> + if ( this . inPageIndex > - 1 & & ! inPage ) { <nl> + / / Navigated to a new page , clear in - page mark . <nl> + this . inPageIndex = - 1 ; <nl> + } else if ( this . inPageIndex = = = - 1 & & inPage ) { <nl> + / / Started in - page navigations . <nl> + this . inPageIndex = this . currentIndex ; <nl> + } <nl> + if ( this . pendingIndex > = 0 ) { <nl> + / / Go to index . <nl> + this . currentIndex = this . pendingIndex ; <nl> + this . pendingIndex = - 1 ; <nl> + return this . history [ this . currentIndex ] = url ; <nl> + } else if ( replaceEntry ) { <nl> + / / Non - user initialized navigation . <nl> + return this . history [ this . currentIndex ] = url ; <nl> + } else { <nl> + / / Normal navigation . Clear history . <nl> + this . history = this . history . slice ( 0 , this . currentIndex + 1 ) ; <nl> + currentEntry = this . history [ this . currentIndex ] ; <nl> + if ( ( currentEntry ! = null ? currentEntry . url : void 0 ) ! = = url ) { <nl> + this . currentIndex + + ; <nl> + return this . history . push ( url ) ; <nl> } <nl> - if ( _this . pendingIndex > = 0 ) { <nl> - <nl> - / / Go to index . <nl> - _this . currentIndex = _this . pendingIndex ; <nl> - _this . pendingIndex = - 1 ; <nl> - return _this . history [ _this . currentIndex ] = url ; <nl> - } else if ( replaceEntry ) { <nl> - <nl> - / / Non - user initialized navigation . <nl> - return _this . history [ _this . currentIndex ] = url ; <nl> - } else { <nl> - <nl> - / / Normal navigation . Clear history . <nl> - _this . history = _this . history . slice ( 0 , _this . currentIndex + 1 ) ; <nl> - currentEntry = _this . history [ _this . currentIndex ] ; <nl> - if ( ( currentEntry ! = null ? currentEntry . url : void 0 ) ! = = url ) { <nl> - _this . currentIndex + + ; <nl> - return _this . history . push ( url ) ; <nl> - } <nl> - } <nl> - } ; <nl> - } ) ( this ) ) ; <nl> + } <nl> + } ) ; <nl> } <nl> <nl> NavigationController . prototype . loadURL = function ( url , options ) { <nl> mmm a / lib / browser / api / web - contents . js <nl> ppp b / lib / browser / api / web - contents . js <nl> let wrapWebContents = function ( webContents ) { <nl> <nl> / / This error occurs when host could not be found . <nl> webContents . on ( ' did - fail - provisional - load ' , function ( ) { <nl> - var args ; <nl> - args = 1 < = arguments . length ? slice . call ( arguments , 0 ) : [ ] ; <nl> + var args = 1 < = arguments . length ? slice . call ( arguments , 0 ) : [ ] ; <nl> <nl> / / Calling loadURL during this event might cause crash , so delay the event <nl> / / until next tick . <nl> - return setImmediate ( ( function ( _this ) { <nl> - return function ( ) { <nl> - return _this . emit . apply ( _this , [ ' did - fail - load ' ] . concat ( slice . call ( args ) ) ) ; <nl> - } ; <nl> - } ) ( this ) ) ; <nl> + setImmediate ( ( ) = > { <nl> + this . emit . apply ( this , [ ' did - fail - load ' ] . concat ( slice . call ( args ) ) ) ; <nl> + } ) ; <nl> } ) ; <nl> <nl> / / Delays the page - title - updated event to next tick . <nl> webContents . on ( ' - page - title - updated ' , function ( ) { <nl> - var args ; <nl> - args = 1 < = arguments . length ? slice . call ( arguments , 0 ) : [ ] ; <nl> - return setImmediate ( ( function ( _this ) { <nl> - return function ( ) { <nl> - return _this . emit . apply ( _this , [ ' page - title - updated ' ] . concat ( slice . call ( args ) ) ) ; <nl> - } ; <nl> - } ) ( this ) ) ; <nl> + var args = 1 < = arguments . length ? slice . call ( arguments , 0 ) : [ ] ; <nl> + setImmediate ( ( ) = > { <nl> + this . emit . apply ( this , [ ' page - title - updated ' ] . concat ( slice . call ( args ) ) ) ; <nl> + } ) ; <nl> } ) ; <nl> <nl> / / Deprecated . <nl> deprecate . rename ( webContents , ' loadUrl ' , ' loadURL ' ) ; <nl> deprecate . rename ( webContents , ' getUrl ' , ' getURL ' ) ; <nl> deprecate . event ( webContents , ' page - title - set ' , ' page - title - updated ' , function ( ) { <nl> - var args ; <nl> - args = 1 < = arguments . length ? slice . call ( arguments , 0 ) : [ ] ; <nl> + var args = 1 < = arguments . length ? slice . call ( arguments , 0 ) : [ ] ; <nl> return this . emit . apply ( this , [ ' page - title - set ' ] . concat ( slice . call ( args ) ) ) ; <nl> } ) ; <nl> return webContents . printToPDF = function ( options , callback ) { <nl> mmm a / lib / common / api / crash - reporter . js <nl> ppp b / lib / common / api / crash - reporter . js <nl> <nl> + ' use strict ' ; <nl> + <nl> const os = require ( ' os ' ) ; <nl> const path = require ( ' path ' ) ; <nl> const spawn = require ( ' child_process ' ) . spawn ; <nl> var CrashReporter = ( function ( ) { <nl> deprecate . log ( ' submitURL is now a required option to crashReporter . start ' ) ; <nl> return ; <nl> } <nl> - start = ( function ( _this ) { <nl> - return function ( ) { <nl> - return binding . start ( _this . productName , companyName , submitURL , autoSubmit , ignoreSystemCrashHandler , extra ) ; <nl> - } ; <nl> - } ) ( this ) ; <nl> + start = ( ) = > { <nl> + binding . start ( this . productName , companyName , submitURL , autoSubmit , ignoreSystemCrashHandler , extra ) ; <nl> + } ; <nl> if ( process . platform = = = ' win32 ' ) { <nl> args = [ " - - reporter - url = " + submitURL , " - - application - name = " + this . productName , " - - v = 1 " ] ; <nl> env = { <nl> mmm a / lib / renderer / override . js <nl> ppp b / lib / renderer / override . js <nl> <nl> + ' use strict ' ; <nl> + <nl> const ipcRenderer = require ( ' electron ' ) . ipcRenderer ; <nl> const remote = require ( ' electron ' ) . remote ; <nl> <nl> var BrowserWindowProxy = ( function ( ) { <nl> function BrowserWindowProxy ( guestId1 ) { <nl> this . guestId = guestId1 ; <nl> this . closed = false ; <nl> - ipcRenderer . once ( " ATOM_SHELL_GUEST_WINDOW_MANAGER_WINDOW_CLOSED_ " + this . guestId , ( function ( _this ) { <nl> - return function ( ) { <nl> - BrowserWindowProxy . remove ( _this . guestId ) ; <nl> - return ( _this . closed = true ) ; <nl> - } ; <nl> - } ) ( this ) ) ; <nl> + ipcRenderer . once ( " ATOM_SHELL_GUEST_WINDOW_MANAGER_WINDOW_CLOSED_ " + this . guestId , ( ) = > { <nl> + BrowserWindowProxy . remove ( this . guestId ) ; <nl> + this . closed = true ; <nl> + } ) ; <nl> } <nl> <nl> BrowserWindowProxy . prototype . close = function ( ) { <nl> if ( process . openerId ! = null ) { <nl> <nl> ipcRenderer . on ( ' ATOM_RENDERER_WINDOW_VISIBILITY_CHANGE ' , function ( event , isVisible , isMinimized ) { <nl> var hasChanged = _isVisible ! = isVisible | | _isMinimized ! = isMinimized ; <nl> - <nl> + <nl> if ( hasChanged ) { <nl> _isVisible = isVisible ; <nl> _isMinimized = isMinimized ; <nl> mmm a / lib / renderer / web - view / web - view . js <nl> ppp b / lib / renderer / web - view / web - view . js <nl> var WebViewImpl = ( function ( ) { <nl> <nl> WebViewImpl . prototype . setupFocusPropagation = function ( ) { <nl> if ( ! this . webviewNode . hasAttribute ( ' tabIndex ' ) ) { <nl> - <nl> / / < webview > needs a tabIndex in order to be focusable . <nl> / / TODO ( fsamuel ) : It would be nice to avoid exposing a tabIndex attribute <nl> / / to allow < webview > to be focusable . <nl> / / See http : / / crbug . com / 231664 . <nl> this . webviewNode . setAttribute ( ' tabIndex ' , - 1 ) ; <nl> } <nl> - this . webviewNode . addEventListener ( ' focus ' , ( function ( _this ) { <nl> - return function ( ) { <nl> - / / Focus the BrowserPlugin when the < webview > takes focus . <nl> - return _this . browserPluginNode . focus ( ) ; <nl> - } ; <nl> - } ) ( this ) ) ; <nl> - return this . webviewNode . addEventListener ( ' blur ' , ( function ( _this ) { <nl> - return function ( ) { <nl> - / / Blur the BrowserPlugin when the < webview > loses focus . <nl> - return _this . browserPluginNode . blur ( ) ; <nl> - } ; <nl> - } ) ( this ) ) ; <nl> + <nl> + / / Focus the BrowserPlugin when the < webview > takes focus . <nl> + this . webviewNode . addEventListener ( ' focus ' , ( ) = > { <nl> + this . browserPluginNode . focus ( ) ; <nl> + } ) ; <nl> + <nl> + / / Blur the BrowserPlugin when the < webview > loses focus . <nl> + this . webviewNode . addEventListener ( ' blur ' , ( ) = > { <nl> + this . browserPluginNode . blur ( ) ; <nl> + } ) ; <nl> } ; <nl> <nl> <nl> var WebViewImpl = ( function ( ) { <nl> } ; <nl> <nl> WebViewImpl . prototype . createGuest = function ( ) { <nl> - return guestViewInternal . createGuest ( this . buildParams ( ) , ( function ( _this ) { <nl> - return function ( event , guestInstanceId ) { <nl> - return _this . attachWindow ( guestInstanceId ) ; <nl> - } ; <nl> - } ) ( this ) ) ; <nl> + return guestViewInternal . createGuest ( this . buildParams ( ) , ( event , guestInstanceId ) = > { <nl> + this . attachWindow ( guestInstanceId ) ; <nl> + } ) ; <nl> } ; <nl> <nl> WebViewImpl . prototype . dispatchEvent = function ( webViewEvent ) { <nl> var WebViewImpl = ( function ( ) { <nl> var propertyName ; <nl> propertyName = ' on ' + eventName . toLowerCase ( ) ; <nl> return Object . defineProperty ( this . webviewNode , propertyName , { <nl> - get : ( function ( _this ) { <nl> - return function ( ) { <nl> - return _this . on [ propertyName ] ; <nl> - } ; <nl> - } ) ( this ) , <nl> - set : ( function ( _this ) { <nl> - return function ( value ) { <nl> - if ( _this . on [ propertyName ] ) { <nl> - _this . webviewNode . removeEventListener ( eventName , _this . on [ propertyName ] ) ; <nl> - } <nl> - _this . on [ propertyName ] = value ; <nl> - if ( value ) { <nl> - return _this . webviewNode . addEventListener ( eventName , value ) ; <nl> - } <nl> - } ; <nl> - } ) ( this ) , <nl> + get : ( ) = > { <nl> + this . on [ propertyName ] ; <nl> + } , <nl> + set : ( value ) = > { <nl> + if ( this . on [ propertyName ] ) { <nl> + this . webviewNode . removeEventListener ( eventName , this . on [ propertyName ] ) ; <nl> + } <nl> + this . on [ propertyName ] = value ; <nl> + if ( value ) { <nl> + return this . webviewNode . addEventListener ( eventName , value ) ; <nl> + } <nl> + } , <nl> enumerable : true <nl> } ) ; <nl> } ; <nl>
Use arrow functions to replace old CoffeeScript = > this wrappers
electron/electron
a3f08c9b514bde39067fa3c5a029a163484abdde
2016-03-11T19:25:56Z
mmm a / tensorflow / python / kernel_tests / distributions / kullback_leibler_test . py <nl> ppp b / tensorflow / python / kernel_tests / distributions / kullback_leibler_test . py <nl> def _kl ( a , b , name = None ) : <nl> # pylint : disable = unused - argument , unused - variable <nl> <nl> with self . test_session ( ) : <nl> - a = MyDistException ( loc = 0 . 0 , scale = 1 . 0 ) <nl> + a = MyDistException ( loc = 0 . 0 , scale = 1 . 0 , allow_nan_stats = False ) <nl> kl = kullback_leibler . kl_divergence ( a , a , allow_nan_stats = False ) <nl> with self . assertRaisesOpError ( <nl> " KL calculation between . * and . * returned NaN values " ) : <nl> kl . eval ( ) <nl> + with self . assertRaisesOpError ( <nl> + " KL calculation between . * and . * returned NaN values " ) : <nl> + a . kl_divergence ( a ) . eval ( ) <nl> + a = MyDistException ( loc = 0 . 0 , scale = 1 . 0 , allow_nan_stats = True ) <nl> kl_ok = kullback_leibler . kl_divergence ( a , a ) <nl> self . assertAllEqual ( [ float ( " nan " ) ] , kl_ok . eval ( ) ) <nl> + self_kl_ok = a . kl_divergence ( a ) <nl> + self . assertAllEqual ( [ float ( " nan " ) ] , self_kl_ok . eval ( ) ) <nl> + cross_ok = a . cross_entropy ( a ) <nl> + self . assertAllEqual ( [ float ( " nan " ) ] , cross_ok . eval ( ) ) <nl> <nl> def testRegistrationFailures ( self ) : <nl> <nl> def testExactRegistrationsAllMatch ( self ) : <nl> for ( k , v ) in _DIVERGENCES . items ( ) : <nl> self . assertEqual ( v , _registered_kl ( * k ) ) <nl> <nl> - def testIndirectRegistration ( self ) : <nl> + def _testIndirectRegistration ( self , fn ) : <nl> <nl> class Sub1 ( normal . Normal ) : <nl> - pass <nl> + <nl> + def entropy ( self ) : <nl> + return " " <nl> <nl> class Sub2 ( normal . Normal ) : <nl> - pass <nl> + <nl> + def entropy ( self ) : <nl> + return " " <nl> <nl> class Sub11 ( Sub1 ) : <nl> - pass <nl> + <nl> + def entropy ( self ) : <nl> + return " " <nl> <nl> # pylint : disable = unused - argument , unused - variable <nl> @ kullback_leibler . RegisterKL ( Sub1 , Sub1 ) <nl> def _kl21 ( a , b , name = None ) : <nl> sub2 = Sub2 ( loc = 0 . 0 , scale = 1 . 0 ) <nl> sub11 = Sub11 ( loc = 0 . 0 , scale = 1 . 0 ) <nl> <nl> - self . assertEqual ( " sub1 - 1 " , kullback_leibler . kl_divergence ( sub1 , sub1 ) ) <nl> - self . assertEqual ( " sub1 - 2 " , kullback_leibler . kl_divergence ( sub1 , sub2 ) ) <nl> - self . assertEqual ( " sub2 - 1 " , kullback_leibler . kl_divergence ( sub2 , sub1 ) ) <nl> - self . assertEqual ( " sub1 - 1 " , kullback_leibler . kl_divergence ( sub11 , sub11 ) ) <nl> - self . assertEqual ( " sub1 - 1 " , kullback_leibler . kl_divergence ( sub11 , sub1 ) ) <nl> - self . assertEqual ( " sub1 - 2 " , kullback_leibler . kl_divergence ( sub11 , sub2 ) ) <nl> - self . assertEqual ( " sub1 - 1 " , kullback_leibler . kl_divergence ( sub11 , sub1 ) ) <nl> - self . assertEqual ( " sub1 - 2 " , kullback_leibler . kl_divergence ( sub11 , sub2 ) ) <nl> - self . assertEqual ( " sub2 - 1 " , kullback_leibler . kl_divergence ( sub2 , sub11 ) ) <nl> - self . assertEqual ( " sub1 - 1 " , kullback_leibler . kl_divergence ( sub1 , sub11 ) ) <nl> + self . assertEqual ( " sub1 - 1 " , fn ( sub1 , sub1 ) ) <nl> + self . assertEqual ( " sub1 - 2 " , fn ( sub1 , sub2 ) ) <nl> + self . assertEqual ( " sub2 - 1 " , fn ( sub2 , sub1 ) ) <nl> + self . assertEqual ( " sub1 - 1 " , fn ( sub11 , sub11 ) ) <nl> + self . assertEqual ( " sub1 - 1 " , fn ( sub11 , sub1 ) ) <nl> + self . assertEqual ( " sub1 - 2 " , fn ( sub11 , sub2 ) ) <nl> + self . assertEqual ( " sub1 - 1 " , fn ( sub11 , sub1 ) ) <nl> + self . assertEqual ( " sub1 - 2 " , fn ( sub11 , sub2 ) ) <nl> + self . assertEqual ( " sub2 - 1 " , fn ( sub2 , sub11 ) ) <nl> + self . assertEqual ( " sub1 - 1 " , fn ( sub1 , sub11 ) ) <nl> + <nl> + def testIndirectRegistrationKLFun ( self ) : <nl> + self . _testIndirectRegistration ( kullback_leibler . kl_divergence ) <nl> + <nl> + def testIndirectRegistrationKLSelf ( self ) : <nl> + self . _testIndirectRegistration ( <nl> + lambda p , q : p . kl_divergence ( q ) ) <nl> + <nl> + def testIndirectRegistrationCrossEntropy ( self ) : <nl> + self . _testIndirectRegistration ( <nl> + lambda p , q : p . cross_entropy ( q ) ) <nl> + <nl> + def testFunctionCrossEntropy ( self ) : <nl> + self . _testIndirectRegistration ( kullback_leibler . cross_entropy ) <nl> <nl> <nl> if __name__ = = " __main__ " : <nl> mmm a / tensorflow / python / ops / distributions / distribution . py <nl> ppp b / tensorflow / python / ops / distributions / distribution . py <nl> <nl> from tensorflow . python . framework import tensor_util <nl> from tensorflow . python . ops import array_ops <nl> from tensorflow . python . ops import math_ops <nl> + from tensorflow . python . ops . distributions import kullback_leibler <nl> from tensorflow . python . ops . distributions import util <nl> from tensorflow . python . util import tf_inspect <nl> <nl> <nl> ] <nl> <nl> _DISTRIBUTION_PUBLIC_METHOD_WRAPPERS = [ <nl> - " batch_shape_tensor " , " batch_shape " , " event_shape_tensor " , " event_shape " , <nl> - " sample " , " log_prob " , " prob " , " log_cdf " , " cdf " , " log_survival_function " , <nl> - " survival_function " , " entropy " , " mean " , " variance " , " stddev " , " mode " , <nl> - " covariance " ] <nl> + " batch_shape " , <nl> + " batch_shape_tensor " , <nl> + " cdf " , <nl> + " covariance " , <nl> + " cross_entropy " , <nl> + " entropy " , <nl> + " event_shape " , <nl> + " event_shape_tensor " , <nl> + " kl_divergence " , <nl> + " log_cdf " , <nl> + " log_prob " , <nl> + " log_survival_function " , <nl> + " mean " , <nl> + " mode " , <nl> + " prob " , <nl> + " sample " , <nl> + " stddev " , <nl> + " survival_function " , <nl> + " variance " , <nl> + ] <nl> <nl> <nl> @ six . add_metaclass ( abc . ABCMeta ) <nl> def is_scalar_event ( self , name = " is_scalar_event " ) : <nl> " " " Indicates that ` event_shape = = [ ] ` . <nl> <nl> Args : <nl> - name : The name to give this op . <nl> + name : Python ` str ` prepended to names of ops created by this function . <nl> <nl> Returns : <nl> is_scalar_event : ` bool ` scalar ` Tensor ` . <nl> def is_scalar_batch ( self , name = " is_scalar_batch " ) : <nl> " " " Indicates that ` batch_shape = = [ ] ` . <nl> <nl> Args : <nl> - name : The name to give this op . <nl> + name : Python ` str ` prepended to names of ops created by this function . <nl> <nl> Returns : <nl> is_scalar_batch : ` bool ` scalar ` Tensor ` . <nl> def log_prob ( self , value , name = " log_prob " ) : <nl> <nl> Args : <nl> value : ` float ` or ` double ` ` Tensor ` . <nl> - name : The name to give this op . <nl> + name : Python ` str ` prepended to names of ops created by this function . <nl> <nl> Returns : <nl> log_prob : a ` Tensor ` of shape ` sample_shape ( x ) + self . batch_shape ` with <nl> def prob ( self , value , name = " prob " ) : <nl> <nl> Args : <nl> value : ` float ` or ` double ` ` Tensor ` . <nl> - name : The name to give this op . <nl> + name : Python ` str ` prepended to names of ops created by this function . <nl> <nl> Returns : <nl> prob : a ` Tensor ` of shape ` sample_shape ( x ) + self . batch_shape ` with <nl> def log_cdf ( self , value , name = " log_cdf " ) : <nl> <nl> Args : <nl> value : ` float ` or ` double ` ` Tensor ` . <nl> - name : The name to give this op . <nl> + name : Python ` str ` prepended to names of ops created by this function . <nl> <nl> Returns : <nl> logcdf : a ` Tensor ` of shape ` sample_shape ( x ) + self . batch_shape ` with <nl> def cdf ( self , value , name = " cdf " ) : <nl> <nl> Args : <nl> value : ` float ` or ` double ` ` Tensor ` . <nl> - name : The name to give this op . <nl> + name : Python ` str ` prepended to names of ops created by this function . <nl> <nl> Returns : <nl> cdf : a ` Tensor ` of shape ` sample_shape ( x ) + self . batch_shape ` with <nl> def log_survival_function ( self , value , name = " log_survival_function " ) : <nl> <nl> Args : <nl> value : ` float ` or ` double ` ` Tensor ` . <nl> - name : The name to give this op . <nl> + name : Python ` str ` prepended to names of ops created by this function . <nl> <nl> Returns : <nl> ` Tensor ` of shape ` sample_shape ( x ) + self . batch_shape ` with values of type <nl> def survival_function ( self , value , name = " survival_function " ) : <nl> <nl> Args : <nl> value : ` float ` or ` double ` ` Tensor ` . <nl> - name : The name to give this op . <nl> + name : Python ` str ` prepended to names of ops created by this function . <nl> <nl> Returns : <nl> ` Tensor ` of shape ` sample_shape ( x ) + self . batch_shape ` with values of type <nl> def quantile ( self , value , name = " quantile " ) : <nl> <nl> Args : <nl> value : ` float ` or ` double ` ` Tensor ` . <nl> - name : The name to give this op . <nl> + name : Python ` str ` prepended to names of ops created by this function . <nl> <nl> Returns : <nl> quantile : a ` Tensor ` of shape ` sample_shape ( x ) + self . batch_shape ` with <nl> def variance ( self , name = " variance " ) : <nl> denotes expectation , and ` Var . shape = batch_shape + event_shape ` . <nl> <nl> Args : <nl> - name : The name to give this op . <nl> + name : Python ` str ` prepended to names of ops created by this function . <nl> <nl> Returns : <nl> variance : Floating - point ` Tensor ` with shape identical to <nl> def stddev ( self , name = " stddev " ) : <nl> denotes expectation , and ` stddev . shape = batch_shape + event_shape ` . <nl> <nl> Args : <nl> - name : The name to give this op . <nl> + name : Python ` str ` prepended to names of ops created by this function . <nl> <nl> Returns : <nl> stddev : Floating - point ` Tensor ` with shape identical to <nl> def covariance ( self , name = " covariance " ) : <nl> length - ` k ' ` vector . <nl> <nl> Args : <nl> - name : The name to give this op . <nl> + name : Python ` str ` prepended to names of ops created by this function . <nl> <nl> Returns : <nl> covariance : Floating - point ` Tensor ` with shape ` [ B1 , . . . , Bn , k ' , k ' ] ` <nl> def mode ( self , name = " mode " ) : <nl> with self . _name_scope ( name ) : <nl> return self . _mode ( ) <nl> <nl> + def _cross_entropy ( self , other ) : <nl> + return kullback_leibler . cross_entropy ( <nl> + self , other , allow_nan_stats = self . allow_nan_stats ) <nl> + <nl> + def cross_entropy ( self , other , name = " cross_entropy " ) : <nl> + " " " Computes the ( Shannon ) cross entropy . <nl> + <nl> + Denote this distribution ( ` self ` ) by ` P ` and the ` other ` distribution by <nl> + ` Q ` . Assuming ` P , Q ` are absolutely continuous with respect to <nl> + one another and permit densities ` p ( x ) dr ( x ) ` and ` q ( x ) dr ( x ) ` , ( Shanon ) <nl> + cross entropy is defined as : <nl> + <nl> + ` ` ` none <nl> + H [ P , Q ] = E_p [ - log q ( X ) ] = - int_F p ( x ) log q ( x ) dr ( x ) <nl> + ` ` ` <nl> + <nl> + where ` F ` denotes the support of the random variable ` X ~ P ` . <nl> + <nl> + Args : <nl> + other : ` tf . distributions . Distribution ` instance . <nl> + name : Python ` str ` prepended to names of ops created by this function . <nl> + <nl> + Returns : <nl> + cross_entropy : ` self . dtype ` ` Tensor ` with shape ` [ B1 , . . . , Bn ] ` <nl> + representing ` n ` different calculations of ( Shanon ) cross entropy . <nl> + " " " <nl> + with self . _name_scope ( name ) : <nl> + return self . _cross_entropy ( other ) <nl> + <nl> + def _kl_divergence ( self , other ) : <nl> + return kullback_leibler . kl_divergence ( <nl> + self , other , allow_nan_stats = self . allow_nan_stats ) <nl> + <nl> + def kl_divergence ( self , other , name = " kl_divergence " ) : <nl> + " " " Computes the Kullback - - Leibler divergence . <nl> + <nl> + Denote this distribution ( ` self ` ) by ` p ` and the ` other ` distribution by <nl> + ` q ` . Assuming ` p , q ` are absolutely continuous with respect to reference <nl> + measure ` r ` , ( Shanon ) cross entropy is defined as : <nl> + <nl> + ` ` ` none <nl> + KL [ p , q ] = E_p [ log ( p ( X ) / q ( X ) ) ] <nl> + = - int_F p ( x ) log q ( x ) dr ( x ) + int_F p ( x ) log p ( x ) dr ( x ) <nl> + = H [ p , q ] - H [ p ] <nl> + ` ` ` <nl> + <nl> + where ` F ` denotes the support of the random variable ` X ~ p ` , ` H [ . , . ] ` <nl> + denotes ( Shanon ) cross entropy , and ` H [ . ] ` denotes ( Shanon ) entropy . <nl> + <nl> + Args : <nl> + other : ` tf . distributions . Distribution ` instance . <nl> + name : Python ` str ` prepended to names of ops created by this function . <nl> + <nl> + Returns : <nl> + kl_divergence : ` self . dtype ` ` Tensor ` with shape ` [ B1 , . . . , Bn ] ` <nl> + representing ` n ` different calculations of the Kullback - Leibler <nl> + divergence . <nl> + " " " <nl> + with self . _name_scope ( name ) : <nl> + return self . _kl_divergence ( other ) <nl> + <nl> @ contextlib . contextmanager <nl> def _name_scope ( self , name = None , values = None ) : <nl> " " " Helper function to standardize op scope . " " " <nl> mmm a / tensorflow / python / ops / distributions / kullback_leibler . py <nl> ppp b / tensorflow / python / ops / distributions / kullback_leibler . py <nl> def kl_divergence ( distribution_a , distribution_b , <nl> return array_ops . identity ( kl_t , name = " checked_kl " ) <nl> <nl> <nl> + def cross_entropy ( ref , other , <nl> + allow_nan_stats = True , name = None ) : <nl> + " " " Computes the ( Shannon ) cross entropy . <nl> + <nl> + Denote two distributions by ` P ` ( ` ref ` ) and ` Q ` ( ` other ` ) . Assuming ` P , Q ` <nl> + are absolutely continuous with respect to one another and permit densities <nl> + ` p ( x ) dr ( x ) ` and ` q ( x ) dr ( x ) ` , ( Shanon ) cross entropy is defined as : <nl> + <nl> + ` ` ` none <nl> + H [ P , Q ] = E_p [ - log q ( X ) ] = - int_F p ( x ) log q ( x ) dr ( x ) <nl> + ` ` ` <nl> + <nl> + where ` F ` denotes the support of the random variable ` X ~ P ` . <nl> + <nl> + Args : <nl> + ref : ` tf . distributions . Distribution ` instance . <nl> + other : ` tf . distributions . Distribution ` instance . <nl> + allow_nan_stats : Python ` bool ` , default ` True ` . When ` True ` , <nl> + statistics ( e . g . , mean , mode , variance ) use the value " ` NaN ` " to <nl> + indicate the result is undefined . When ` False ` , an exception is raised <nl> + if one or more of the statistic ' s batch members are undefined . <nl> + name : Python ` str ` prepended to names of ops created by this function . <nl> + <nl> + Returns : <nl> + cross_entropy : ` ref . dtype ` ` Tensor ` with shape ` [ B1 , . . . , Bn ] ` <nl> + representing ` n ` different calculations of ( Shanon ) cross entropy . <nl> + " " " <nl> + with ops . name_scope ( name , " cross_entropy " ) : <nl> + return ref . entropy ( ) + kl_divergence ( <nl> + ref , other , allow_nan_stats = allow_nan_stats ) <nl> + <nl> + <nl> class RegisterKL ( object ) : <nl> " " " Decorator to register a KL divergence implementation function . <nl> <nl> mmm a / tensorflow / tools / api / golden / tensorflow . distributions . - bernoulli . pbtxt <nl> ppp b / tensorflow / tools / api / golden / tensorflow . distributions . - bernoulli . pbtxt <nl> tf_class { <nl> name : " covariance " <nl> argspec : " args = [ \ ' self \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' covariance \ ' ] , " <nl> } <nl> + member_method { <nl> + name : " cross_entropy " <nl> + argspec : " args = [ \ ' self \ ' , \ ' other \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' cross_entropy \ ' ] , " <nl> + } <nl> member_method { <nl> name : " entropy " <nl> argspec : " args = [ \ ' self \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' entropy \ ' ] , " <nl> tf_class { <nl> name : " is_scalar_event " <nl> argspec : " args = [ \ ' self \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' is_scalar_event \ ' ] , " <nl> } <nl> + member_method { <nl> + name : " kl_divergence " <nl> + argspec : " args = [ \ ' self \ ' , \ ' other \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' kl_divergence \ ' ] , " <nl> + } <nl> member_method { <nl> name : " log_cdf " <nl> argspec : " args = [ \ ' self \ ' , \ ' value \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' log_cdf \ ' ] , " <nl> mmm a / tensorflow / tools / api / golden / tensorflow . distributions . - beta . pbtxt <nl> ppp b / tensorflow / tools / api / golden / tensorflow . distributions . - beta . pbtxt <nl> tf_class { <nl> name : " covariance " <nl> argspec : " args = [ \ ' self \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' covariance \ ' ] , " <nl> } <nl> + member_method { <nl> + name : " cross_entropy " <nl> + argspec : " args = [ \ ' self \ ' , \ ' other \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' cross_entropy \ ' ] , " <nl> + } <nl> member_method { <nl> name : " entropy " <nl> argspec : " args = [ \ ' self \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' entropy \ ' ] , " <nl> tf_class { <nl> name : " is_scalar_event " <nl> argspec : " args = [ \ ' self \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' is_scalar_event \ ' ] , " <nl> } <nl> + member_method { <nl> + name : " kl_divergence " <nl> + argspec : " args = [ \ ' self \ ' , \ ' other \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' kl_divergence \ ' ] , " <nl> + } <nl> member_method { <nl> name : " log_cdf " <nl> argspec : " args = [ \ ' self \ ' , \ ' value \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' log_cdf \ ' ] , " <nl> mmm a / tensorflow / tools / api / golden / tensorflow . distributions . - categorical . pbtxt <nl> ppp b / tensorflow / tools / api / golden / tensorflow . distributions . - categorical . pbtxt <nl> tf_class { <nl> name : " covariance " <nl> argspec : " args = [ \ ' self \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' covariance \ ' ] , " <nl> } <nl> + member_method { <nl> + name : " cross_entropy " <nl> + argspec : " args = [ \ ' self \ ' , \ ' other \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' cross_entropy \ ' ] , " <nl> + } <nl> member_method { <nl> name : " entropy " <nl> argspec : " args = [ \ ' self \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' entropy \ ' ] , " <nl> tf_class { <nl> name : " is_scalar_event " <nl> argspec : " args = [ \ ' self \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' is_scalar_event \ ' ] , " <nl> } <nl> + member_method { <nl> + name : " kl_divergence " <nl> + argspec : " args = [ \ ' self \ ' , \ ' other \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' kl_divergence \ ' ] , " <nl> + } <nl> member_method { <nl> name : " log_cdf " <nl> argspec : " args = [ \ ' self \ ' , \ ' value \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' log_cdf \ ' ] , " <nl> mmm a / tensorflow / tools / api / golden / tensorflow . distributions . - dirichlet - multinomial . pbtxt <nl> ppp b / tensorflow / tools / api / golden / tensorflow . distributions . - dirichlet - multinomial . pbtxt <nl> tf_class { <nl> name : " covariance " <nl> argspec : " args = [ \ ' self \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' covariance \ ' ] , " <nl> } <nl> + member_method { <nl> + name : " cross_entropy " <nl> + argspec : " args = [ \ ' self \ ' , \ ' other \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' cross_entropy \ ' ] , " <nl> + } <nl> member_method { <nl> name : " entropy " <nl> argspec : " args = [ \ ' self \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' entropy \ ' ] , " <nl> tf_class { <nl> name : " is_scalar_event " <nl> argspec : " args = [ \ ' self \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' is_scalar_event \ ' ] , " <nl> } <nl> + member_method { <nl> + name : " kl_divergence " <nl> + argspec : " args = [ \ ' self \ ' , \ ' other \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' kl_divergence \ ' ] , " <nl> + } <nl> member_method { <nl> name : " log_cdf " <nl> argspec : " args = [ \ ' self \ ' , \ ' value \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' log_cdf \ ' ] , " <nl> mmm a / tensorflow / tools / api / golden / tensorflow . distributions . - dirichlet . pbtxt <nl> ppp b / tensorflow / tools / api / golden / tensorflow . distributions . - dirichlet . pbtxt <nl> tf_class { <nl> name : " covariance " <nl> argspec : " args = [ \ ' self \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' covariance \ ' ] , " <nl> } <nl> + member_method { <nl> + name : " cross_entropy " <nl> + argspec : " args = [ \ ' self \ ' , \ ' other \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' cross_entropy \ ' ] , " <nl> + } <nl> member_method { <nl> name : " entropy " <nl> argspec : " args = [ \ ' self \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' entropy \ ' ] , " <nl> tf_class { <nl> name : " is_scalar_event " <nl> argspec : " args = [ \ ' self \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' is_scalar_event \ ' ] , " <nl> } <nl> + member_method { <nl> + name : " kl_divergence " <nl> + argspec : " args = [ \ ' self \ ' , \ ' other \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' kl_divergence \ ' ] , " <nl> + } <nl> member_method { <nl> name : " log_cdf " <nl> argspec : " args = [ \ ' self \ ' , \ ' value \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' log_cdf \ ' ] , " <nl> mmm a / tensorflow / tools / api / golden / tensorflow . distributions . - distribution . pbtxt <nl> ppp b / tensorflow / tools / api / golden / tensorflow . distributions . - distribution . pbtxt <nl> tf_class { <nl> name : " covariance " <nl> argspec : " args = [ \ ' self \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' covariance \ ' ] , " <nl> } <nl> + member_method { <nl> + name : " cross_entropy " <nl> + argspec : " args = [ \ ' self \ ' , \ ' other \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' cross_entropy \ ' ] , " <nl> + } <nl> member_method { <nl> name : " entropy " <nl> argspec : " args = [ \ ' self \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' entropy \ ' ] , " <nl> tf_class { <nl> name : " is_scalar_event " <nl> argspec : " args = [ \ ' self \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' is_scalar_event \ ' ] , " <nl> } <nl> + member_method { <nl> + name : " kl_divergence " <nl> + argspec : " args = [ \ ' self \ ' , \ ' other \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' kl_divergence \ ' ] , " <nl> + } <nl> member_method { <nl> name : " log_cdf " <nl> argspec : " args = [ \ ' self \ ' , \ ' value \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' log_cdf \ ' ] , " <nl> mmm a / tensorflow / tools / api / golden / tensorflow . distributions . - exponential . pbtxt <nl> ppp b / tensorflow / tools / api / golden / tensorflow . distributions . - exponential . pbtxt <nl> tf_class { <nl> name : " covariance " <nl> argspec : " args = [ \ ' self \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' covariance \ ' ] , " <nl> } <nl> + member_method { <nl> + name : " cross_entropy " <nl> + argspec : " args = [ \ ' self \ ' , \ ' other \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' cross_entropy \ ' ] , " <nl> + } <nl> member_method { <nl> name : " entropy " <nl> argspec : " args = [ \ ' self \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' entropy \ ' ] , " <nl> tf_class { <nl> name : " is_scalar_event " <nl> argspec : " args = [ \ ' self \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' is_scalar_event \ ' ] , " <nl> } <nl> + member_method { <nl> + name : " kl_divergence " <nl> + argspec : " args = [ \ ' self \ ' , \ ' other \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' kl_divergence \ ' ] , " <nl> + } <nl> member_method { <nl> name : " log_cdf " <nl> argspec : " args = [ \ ' self \ ' , \ ' value \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' log_cdf \ ' ] , " <nl> mmm a / tensorflow / tools / api / golden / tensorflow . distributions . - gamma . pbtxt <nl> ppp b / tensorflow / tools / api / golden / tensorflow . distributions . - gamma . pbtxt <nl> tf_class { <nl> name : " covariance " <nl> argspec : " args = [ \ ' self \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' covariance \ ' ] , " <nl> } <nl> + member_method { <nl> + name : " cross_entropy " <nl> + argspec : " args = [ \ ' self \ ' , \ ' other \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' cross_entropy \ ' ] , " <nl> + } <nl> member_method { <nl> name : " entropy " <nl> argspec : " args = [ \ ' self \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' entropy \ ' ] , " <nl> tf_class { <nl> name : " is_scalar_event " <nl> argspec : " args = [ \ ' self \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' is_scalar_event \ ' ] , " <nl> } <nl> + member_method { <nl> + name : " kl_divergence " <nl> + argspec : " args = [ \ ' self \ ' , \ ' other \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' kl_divergence \ ' ] , " <nl> + } <nl> member_method { <nl> name : " log_cdf " <nl> argspec : " args = [ \ ' self \ ' , \ ' value \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' log_cdf \ ' ] , " <nl> mmm a / tensorflow / tools / api / golden / tensorflow . distributions . - laplace . pbtxt <nl> ppp b / tensorflow / tools / api / golden / tensorflow . distributions . - laplace . pbtxt <nl> tf_class { <nl> name : " covariance " <nl> argspec : " args = [ \ ' self \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' covariance \ ' ] , " <nl> } <nl> + member_method { <nl> + name : " cross_entropy " <nl> + argspec : " args = [ \ ' self \ ' , \ ' other \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' cross_entropy \ ' ] , " <nl> + } <nl> member_method { <nl> name : " entropy " <nl> argspec : " args = [ \ ' self \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' entropy \ ' ] , " <nl> tf_class { <nl> name : " is_scalar_event " <nl> argspec : " args = [ \ ' self \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' is_scalar_event \ ' ] , " <nl> } <nl> + member_method { <nl> + name : " kl_divergence " <nl> + argspec : " args = [ \ ' self \ ' , \ ' other \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' kl_divergence \ ' ] , " <nl> + } <nl> member_method { <nl> name : " log_cdf " <nl> argspec : " args = [ \ ' self \ ' , \ ' value \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' log_cdf \ ' ] , " <nl> mmm a / tensorflow / tools / api / golden / tensorflow . distributions . - multinomial . pbtxt <nl> ppp b / tensorflow / tools / api / golden / tensorflow . distributions . - multinomial . pbtxt <nl> tf_class { <nl> name : " covariance " <nl> argspec : " args = [ \ ' self \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' covariance \ ' ] , " <nl> } <nl> + member_method { <nl> + name : " cross_entropy " <nl> + argspec : " args = [ \ ' self \ ' , \ ' other \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' cross_entropy \ ' ] , " <nl> + } <nl> member_method { <nl> name : " entropy " <nl> argspec : " args = [ \ ' self \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' entropy \ ' ] , " <nl> tf_class { <nl> name : " is_scalar_event " <nl> argspec : " args = [ \ ' self \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' is_scalar_event \ ' ] , " <nl> } <nl> + member_method { <nl> + name : " kl_divergence " <nl> + argspec : " args = [ \ ' self \ ' , \ ' other \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' kl_divergence \ ' ] , " <nl> + } <nl> member_method { <nl> name : " log_cdf " <nl> argspec : " args = [ \ ' self \ ' , \ ' value \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' log_cdf \ ' ] , " <nl> mmm a / tensorflow / tools / api / golden / tensorflow . distributions . - normal . pbtxt <nl> ppp b / tensorflow / tools / api / golden / tensorflow . distributions . - normal . pbtxt <nl> tf_class { <nl> name : " covariance " <nl> argspec : " args = [ \ ' self \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' covariance \ ' ] , " <nl> } <nl> + member_method { <nl> + name : " cross_entropy " <nl> + argspec : " args = [ \ ' self \ ' , \ ' other \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' cross_entropy \ ' ] , " <nl> + } <nl> member_method { <nl> name : " entropy " <nl> argspec : " args = [ \ ' self \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' entropy \ ' ] , " <nl> tf_class { <nl> name : " is_scalar_event " <nl> argspec : " args = [ \ ' self \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' is_scalar_event \ ' ] , " <nl> } <nl> + member_method { <nl> + name : " kl_divergence " <nl> + argspec : " args = [ \ ' self \ ' , \ ' other \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' kl_divergence \ ' ] , " <nl> + } <nl> member_method { <nl> name : " log_cdf " <nl> argspec : " args = [ \ ' self \ ' , \ ' value \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' log_cdf \ ' ] , " <nl> mmm a / tensorflow / tools / api / golden / tensorflow . distributions . - student - t . pbtxt <nl> ppp b / tensorflow / tools / api / golden / tensorflow . distributions . - student - t . pbtxt <nl> tf_class { <nl> name : " covariance " <nl> argspec : " args = [ \ ' self \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' covariance \ ' ] , " <nl> } <nl> + member_method { <nl> + name : " cross_entropy " <nl> + argspec : " args = [ \ ' self \ ' , \ ' other \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' cross_entropy \ ' ] , " <nl> + } <nl> member_method { <nl> name : " entropy " <nl> argspec : " args = [ \ ' self \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' entropy \ ' ] , " <nl> tf_class { <nl> name : " is_scalar_event " <nl> argspec : " args = [ \ ' self \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' is_scalar_event \ ' ] , " <nl> } <nl> + member_method { <nl> + name : " kl_divergence " <nl> + argspec : " args = [ \ ' self \ ' , \ ' other \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' kl_divergence \ ' ] , " <nl> + } <nl> member_method { <nl> name : " log_cdf " <nl> argspec : " args = [ \ ' self \ ' , \ ' value \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' log_cdf \ ' ] , " <nl> mmm a / tensorflow / tools / api / golden / tensorflow . distributions . - uniform . pbtxt <nl> ppp b / tensorflow / tools / api / golden / tensorflow . distributions . - uniform . pbtxt <nl> tf_class { <nl> name : " covariance " <nl> argspec : " args = [ \ ' self \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' covariance \ ' ] , " <nl> } <nl> + member_method { <nl> + name : " cross_entropy " <nl> + argspec : " args = [ \ ' self \ ' , \ ' other \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' cross_entropy \ ' ] , " <nl> + } <nl> member_method { <nl> name : " entropy " <nl> argspec : " args = [ \ ' self \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' entropy \ ' ] , " <nl> tf_class { <nl> name : " is_scalar_event " <nl> argspec : " args = [ \ ' self \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' is_scalar_event \ ' ] , " <nl> } <nl> + member_method { <nl> + name : " kl_divergence " <nl> + argspec : " args = [ \ ' self \ ' , \ ' other \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' kl_divergence \ ' ] , " <nl> + } <nl> member_method { <nl> name : " log_cdf " <nl> argspec : " args = [ \ ' self \ ' , \ ' value \ ' , \ ' name \ ' ] , varargs = None , keywords = None , defaults = [ \ ' log_cdf \ ' ] , " <nl>
Add ` cross_entropy ` and ` kl_divergence ` to ` tf . distributions . Distribution ` .
tensorflow/tensorflow
2c78d7bfaf3158df22401c03fc4de2cb99526d4f
2017-12-01T18:16:40Z
mmm a / modules / prediction / evaluator / vehicle / cruise_mlp_evaluator . cc <nl> ppp b / modules / prediction / evaluator / vehicle / cruise_mlp_evaluator . cc <nl> void CruiseMLPEvaluator : : SetLaneFeatureValues <nl> } <nl> <nl> double heading = feature . velocity_heading ( ) ; <nl> + double speed = feature . speed ( ) ; <nl> for ( int i = 0 ; i < lane_sequence_ptr - > lane_segment_size ( ) ; + + i ) { <nl> if ( feature_values - > size ( ) > = LANE_FEATURE_SIZE ) { <nl> break ; <nl> void CruiseMLPEvaluator : : SetLaneFeatureValues <nl> double diff_y = lane_point . position ( ) . y ( ) - feature . position ( ) . y ( ) ; <nl> double angle = std : : atan2 ( diff_x , diff_y ) ; <nl> feature_values - > push_back ( lane_point . kappa ( ) ) ; <nl> + feature_values - > push_back ( speed * speed * lane_point . kappa ( ) ) ; <nl> feature_values - > push_back ( std : : sin ( angle - heading ) ) ; <nl> feature_values - > push_back ( lane_point . relative_l ( ) ) ; <nl> feature_values - > push_back ( lane_point . heading ( ) ) ; <nl> void CruiseMLPEvaluator : : SetLaneFeatureValues <nl> <nl> / / If the lane points are not sufficient , apply a linear extrapolation . <nl> std : : size_t size = feature_values - > size ( ) ; <nl> - while ( size > = 4 & & size < LANE_FEATURE_SIZE ) { <nl> - double lane_kappa = feature_values - > operator [ ] ( size - 5 ) ; <nl> + while ( size > = 6 & & size < LANE_FEATURE_SIZE ) { <nl> + double lane_kappa = feature_values - > operator [ ] ( size - 6 ) ; <nl> + double centri_acc = feature_values - > operator [ ] ( size - 5 ) ; <nl> double heading_diff = feature_values - > operator [ ] ( size - 4 ) ; <nl> double lane_l_diff = feature_values - > operator [ ] ( size - 3 ) ; <nl> double heading = feature_values - > operator [ ] ( size - 2 ) ; <nl> double angle_diff = feature_values - > operator [ ] ( size - 1 ) ; <nl> feature_values - > push_back ( lane_kappa ) ; <nl> + feature_values - > push_back ( centri_acc ) ; <nl> feature_values - > push_back ( heading_diff ) ; <nl> feature_values - > push_back ( lane_l_diff ) ; <nl> feature_values - > push_back ( heading ) ; <nl> mmm a / modules / prediction / evaluator / vehicle / cruise_mlp_evaluator . h <nl> ppp b / modules / prediction / evaluator / vehicle / cruise_mlp_evaluator . h <nl> class CruiseMLPEvaluator : public Evaluator { <nl> <nl> private : <nl> static const size_t OBSTACLE_FEATURE_SIZE = 22 ; <nl> - static const size_t LANE_FEATURE_SIZE = 50 ; <nl> + static const size_t LANE_FEATURE_SIZE = 60 ; <nl> std : : unique_ptr < FnnVehicleModel > model_ptr_ ; <nl> } ; <nl> <nl>
Prediction : add centri acc feature into cruise mlp evaluator
ApolloAuto/apollo
b7fa59476c666d99a78e7889ce965943fd34d317
2018-12-13T23:17:48Z
mmm a / src / core / CMakeLists . txt <nl> ppp b / src / core / CMakeLists . txt <nl> add_library ( core STATIC <nl> hle / service / service . h <nl> hle / service / set / set . cpp <nl> hle / service / set / set . h <nl> + hle / service / set / set_cal . cpp <nl> + hle / service / set / set_cal . h <nl> + hle / service / set / set_fd . cpp <nl> + hle / service / set / set_fd . h <nl> + hle / service / set / set_sys . cpp <nl> + hle / service / set / set_sys . h <nl> + hle / service / set / settings . cpp <nl> + hle / service / set / settings . h <nl> hle / service / sm / controller . cpp <nl> hle / service / sm / controller . h <nl> hle / service / sm / sm . cpp <nl> mmm a / src / core / hle / service / service . cpp <nl> ppp b / src / core / hle / service / service . cpp <nl> <nl> # include " core / hle / service / nvdrv / nvdrv . h " <nl> # include " core / hle / service / pctl / pctl . h " <nl> # include " core / hle / service / service . h " <nl> - # include " core / hle / service / set / set . h " <nl> + # include " core / hle / service / set / settings . h " <nl> # include " core / hle / service / sm / controller . h " <nl> # include " core / hle / service / sm / sm . h " <nl> # include " core / hle / service / sockets / sockets . h " <nl> mmm a / src / core / hle / service / set / set . cpp <nl> ppp b / src / core / hle / service / set / set . cpp <nl> void SET : : GetAvailableLanguageCodes ( Kernel : : HLERequestContext & ctx ) { <nl> LOG_WARNING ( Service_SET , " ( STUBBED ) called " ) ; <nl> } <nl> <nl> - SET : : SET ( const char * name ) : ServiceFramework ( name ) { <nl> + SET : : SET ( ) : ServiceFramework ( " set " ) { <nl> static const FunctionInfo functions [ ] = { <nl> + { 0 , nullptr , " GetLanguageCode " } , <nl> { 1 , & SET : : GetAvailableLanguageCodes , " GetAvailableLanguageCodes " } , <nl> + { 2 , nullptr , " MakeLanguageCode " } , <nl> + { 3 , nullptr , " GetAvailableLanguageCodeCount " } , <nl> + { 4 , nullptr , " GetRegionCode " } , <nl> + { 5 , nullptr , " GetAvailableLanguageCodes2 " } , <nl> + { 6 , nullptr , " GetAvailableLanguageCodeCount2 " } , <nl> + { 7 , nullptr , " GetKeyCodeMap " } , <nl> } ; <nl> RegisterHandlers ( functions ) ; <nl> } <nl> <nl> - void InstallInterfaces ( SM : : ServiceManager & service_manager ) { <nl> - std : : make_shared < SET > ( " set " ) - > InstallAsService ( service_manager ) ; <nl> - } <nl> - <nl> } / / namespace Set <nl> } / / namespace Service <nl> mmm a / src / core / hle / service / set / set . h <nl> ppp b / src / core / hle / service / set / set . h <nl> namespace Set { <nl> <nl> class SET final : public ServiceFramework < SET > { <nl> public : <nl> - explicit SET ( const char * name ) ; <nl> + explicit SET ( ) ; <nl> ~ SET ( ) = default ; <nl> <nl> private : <nl> void GetAvailableLanguageCodes ( Kernel : : HLERequestContext & ctx ) ; <nl> } ; <nl> <nl> - / / / Registers all Set services with the specified service manager . <nl> - void InstallInterfaces ( SM : : ServiceManager & service_manager ) ; <nl> - <nl> } / / namespace Set <nl> } / / namespace Service <nl> new file mode 100644 <nl> index 00000000000 . . 6231acd967b <nl> mmm / dev / null <nl> ppp b / src / core / hle / service / set / set_cal . cpp <nl> <nl> + / / Copyright 2018 yuzu emulator team <nl> + / / Licensed under GPLv2 or any later version <nl> + / / Refer to the license . txt file included . <nl> + <nl> + # include " core / hle / service / set / set_cal . h " <nl> + <nl> + namespace Service { <nl> + namespace Set { <nl> + <nl> + SET_CAL : : SET_CAL ( ) : ServiceFramework ( " set : cal " ) { <nl> + static const FunctionInfo functions [ ] = { <nl> + { 0 , nullptr , " GetBluetoothBdAddress " } , <nl> + { 1 , nullptr , " GetConfigurationId1 " } , <nl> + { 2 , nullptr , " GetAccelerometerOffset " } , <nl> + { 3 , nullptr , " GetAccelerometerScale " } , <nl> + { 4 , nullptr , " GetGyroscopeOffset " } , <nl> + { 5 , nullptr , " GetGyroscopeScale " } , <nl> + { 6 , nullptr , " GetWirelessLanMacAddress " } , <nl> + { 7 , nullptr , " GetWirelessLanCountryCodeCount " } , <nl> + { 8 , nullptr , " GetWirelessLanCountryCodes " } , <nl> + { 9 , nullptr , " GetSerialNumber " } , <nl> + { 10 , nullptr , " SetInitialSystemAppletProgramId " } , <nl> + { 11 , nullptr , " SetOverlayDispProgramId " } , <nl> + { 12 , nullptr , " GetBatteryLot " } , <nl> + { 14 , nullptr , " GetEciDeviceCertificate " } , <nl> + { 15 , nullptr , " GetEticketDeviceCertificate " } , <nl> + { 16 , nullptr , " GetSslKey " } , <nl> + { 17 , nullptr , " GetSslCertificate " } , <nl> + { 18 , nullptr , " GetGameCardKey " } , <nl> + { 19 , nullptr , " GetGameCardCertificate " } , <nl> + { 20 , nullptr , " GetEciDeviceKey " } , <nl> + { 21 , nullptr , " GetEticketDeviceKey " } , <nl> + { 22 , nullptr , " GetSpeakerParameter " } , <nl> + { 23 , nullptr , " GetLcdVendorId " } , <nl> + } ; <nl> + RegisterHandlers ( functions ) ; <nl> + } <nl> + <nl> + } / / namespace Set <nl> + } / / namespace Service <nl> new file mode 100644 <nl> index 00000000000 . . 9c0b851d0ad <nl> mmm / dev / null <nl> ppp b / src / core / hle / service / set / set_cal . h <nl> <nl> + / / Copyright 2018 yuzu emulator team <nl> + / / Licensed under GPLv2 or any later version <nl> + / / Refer to the license . txt file included . <nl> + <nl> + # pragma once <nl> + <nl> + # include " core / hle / service / service . h " <nl> + <nl> + namespace Service { <nl> + namespace Set { <nl> + <nl> + class SET_CAL final : public ServiceFramework < SET_CAL > { <nl> + public : <nl> + explicit SET_CAL ( ) ; <nl> + ~ SET_CAL ( ) = default ; <nl> + } ; <nl> + <nl> + } / / namespace Set <nl> + } / / namespace Service <nl> new file mode 100644 <nl> index 00000000000 . . 8320d425053 <nl> mmm / dev / null <nl> ppp b / src / core / hle / service / set / set_fd . cpp <nl> <nl> + / / Copyright 2018 yuzu emulator team <nl> + / / Licensed under GPLv2 or any later version <nl> + / / Refer to the license . txt file included . <nl> + <nl> + # include " core / hle / service / set / set_fd . h " <nl> + <nl> + namespace Service { <nl> + namespace Set { <nl> + <nl> + SET_FD : : SET_FD ( ) : ServiceFramework ( " set : fd " ) { <nl> + static const FunctionInfo functions [ ] = { <nl> + { 2 , nullptr , " SetSettingsItemValue " } , <nl> + { 3 , nullptr , " ResetSettingsItemValue " } , <nl> + { 4 , nullptr , " CreateSettingsItemKeyIterator " } , <nl> + { 10 , nullptr , " ReadSettings " } , <nl> + { 11 , nullptr , " ResetSettings " } , <nl> + { 20 , nullptr , " SetWebInspectorFlag " } , <nl> + { 21 , nullptr , " SetAllowedSslHosts " } , <nl> + { 22 , nullptr , " SetHostFsMountPoint " } , <nl> + } ; <nl> + RegisterHandlers ( functions ) ; <nl> + } <nl> + <nl> + } / / namespace Set <nl> + } / / namespace Service <nl> new file mode 100644 <nl> index 00000000000 . . 65b36bcb3d6 <nl> mmm / dev / null <nl> ppp b / src / core / hle / service / set / set_fd . h <nl> <nl> + / / Copyright 2018 yuzu emulator team <nl> + / / Licensed under GPLv2 or any later version <nl> + / / Refer to the license . txt file included . <nl> + <nl> + # pragma once <nl> + <nl> + # include " core / hle / service / service . h " <nl> + <nl> + namespace Service { <nl> + namespace Set { <nl> + <nl> + class SET_FD final : public ServiceFramework < SET_FD > { <nl> + public : <nl> + explicit SET_FD ( ) ; <nl> + ~ SET_FD ( ) = default ; <nl> + } ; <nl> + <nl> + } / / namespace Set <nl> + } / / namespace Service <nl> new file mode 100644 <nl> index 00000000000 . . 363abd10aa2 <nl> mmm / dev / null <nl> ppp b / src / core / hle / service / set / set_sys . cpp <nl> <nl> + / / Copyright 2018 yuzu emulator team <nl> + / / Licensed under GPLv2 or any later version <nl> + / / Refer to the license . txt file included . <nl> + <nl> + # include " common / logging / log . h " <nl> + # include " core / hle / ipc_helpers . h " <nl> + # include " core / hle / kernel / client_port . h " <nl> + # include " core / hle / service / set / set_sys . h " <nl> + <nl> + namespace Service { <nl> + namespace Set { <nl> + <nl> + void SET_SYS : : GetColorSetId ( Kernel : : HLERequestContext & ctx ) { <nl> + <nl> + IPC : : ResponseBuilder rb { ctx , 3 } ; <nl> + <nl> + rb . Push ( RESULT_SUCCESS ) ; <nl> + rb . Push < u32 > ( 0 ) ; <nl> + <nl> + LOG_WARNING ( Service_SET , " ( STUBBED ) called " ) ; <nl> + } <nl> + <nl> + SET_SYS : : SET_SYS ( ) : ServiceFramework ( " set : sys " ) { <nl> + static const FunctionInfo functions [ ] = { <nl> + { 0 , nullptr , " SetLanguageCode " } , <nl> + { 1 , nullptr , " SetNetworkSettings " } , <nl> + { 2 , nullptr , " GetNetworkSettings " } , <nl> + { 3 , nullptr , " GetFirmwareVersion " } , <nl> + { 4 , nullptr , " GetFirmwareVersion2 " } , <nl> + { 7 , nullptr , " GetLockScreenFlag " } , <nl> + { 8 , nullptr , " SetLockScreenFlag " } , <nl> + { 9 , nullptr , " GetBacklightSettings " } , <nl> + { 10 , nullptr , " SetBacklightSettings " } , <nl> + { 11 , nullptr , " SetBluetoothDevicesSettings " } , <nl> + { 12 , nullptr , " GetBluetoothDevicesSettings " } , <nl> + { 13 , nullptr , " GetExternalSteadyClockSourceId " } , <nl> + { 14 , nullptr , " SetExternalSteadyClockSourceId " } , <nl> + { 15 , nullptr , " GetUserSystemClockContext " } , <nl> + { 16 , nullptr , " SetUserSystemClockContext " } , <nl> + { 17 , nullptr , " GetAccountSettings " } , <nl> + { 18 , nullptr , " SetAccountSettings " } , <nl> + { 19 , nullptr , " GetAudioVolume " } , <nl> + { 20 , nullptr , " SetAudioVolume " } , <nl> + { 21 , nullptr , " GetEulaVersions " } , <nl> + { 22 , nullptr , " SetEulaVersions " } , <nl> + { 23 , & SET_SYS : : GetColorSetId , " GetColorSetId " } , <nl> + { 24 , nullptr , " SetColorSetId " } , <nl> + { 25 , nullptr , " GetConsoleInformationUploadFlag " } , <nl> + { 26 , nullptr , " SetConsoleInformationUploadFlag " } , <nl> + { 27 , nullptr , " GetAutomaticApplicationDownloadFlag " } , <nl> + { 28 , nullptr , " SetAutomaticApplicationDownloadFlag " } , <nl> + { 29 , nullptr , " GetNotificationSettings " } , <nl> + { 30 , nullptr , " SetNotificationSettings " } , <nl> + { 31 , nullptr , " GetAccountNotificationSettings " } , <nl> + { 32 , nullptr , " SetAccountNotificationSettings " } , <nl> + { 35 , nullptr , " GetVibrationMasterVolume " } , <nl> + { 36 , nullptr , " SetVibrationMasterVolume " } , <nl> + { 37 , nullptr , " GetSettingsItemValueSize " } , <nl> + { 38 , nullptr , " GetSettingsItemValue " } , <nl> + { 39 , nullptr , " GetTvSettings " } , <nl> + { 40 , nullptr , " SetTvSettings " } , <nl> + { 41 , nullptr , " GetEdid " } , <nl> + { 42 , nullptr , " SetEdid " } , <nl> + { 43 , nullptr , " GetAudioOutputMode " } , <nl> + { 44 , nullptr , " SetAudioOutputMode " } , <nl> + { 45 , nullptr , " IsForceMuteOnHeadphoneRemoved " } , <nl> + { 46 , nullptr , " SetForceMuteOnHeadphoneRemoved " } , <nl> + { 47 , nullptr , " GetQuestFlag " } , <nl> + { 48 , nullptr , " SetQuestFlag " } , <nl> + { 49 , nullptr , " GetDataDeletionSettings " } , <nl> + { 50 , nullptr , " SetDataDeletionSettings " } , <nl> + { 51 , nullptr , " GetInitialSystemAppletProgramId " } , <nl> + { 52 , nullptr , " GetOverlayDispProgramId " } , <nl> + { 53 , nullptr , " GetDeviceTimeZoneLocationName " } , <nl> + { 54 , nullptr , " SetDeviceTimeZoneLocationName " } , <nl> + { 55 , nullptr , " GetWirelessCertificationFileSize " } , <nl> + { 56 , nullptr , " GetWirelessCertificationFile " } , <nl> + { 57 , nullptr , " SetRegionCode " } , <nl> + { 58 , nullptr , " GetNetworkSystemClockContext " } , <nl> + { 59 , nullptr , " SetNetworkSystemClockContext " } , <nl> + { 60 , nullptr , " IsUserSystemClockAutomaticCorrectionEnabled " } , <nl> + { 61 , nullptr , " SetUserSystemClockAutomaticCorrectionEnabled " } , <nl> + { 62 , nullptr , " GetDebugModeFlag " } , <nl> + { 63 , nullptr , " GetPrimaryAlbumStorage " } , <nl> + { 64 , nullptr , " SetPrimaryAlbumStorage " } , <nl> + { 65 , nullptr , " GetUsb30EnableFlag " } , <nl> + { 66 , nullptr , " SetUsb30EnableFlag " } , <nl> + { 67 , nullptr , " GetBatteryLot " } , <nl> + { 68 , nullptr , " GetSerialNumber " } , <nl> + { 69 , nullptr , " GetNfcEnableFlag " } , <nl> + { 70 , nullptr , " SetNfcEnableFlag " } , <nl> + { 71 , nullptr , " GetSleepSettings " } , <nl> + { 72 , nullptr , " SetSleepSettings " } , <nl> + { 73 , nullptr , " GetWirelessLanEnableFlag " } , <nl> + { 74 , nullptr , " SetWirelessLanEnableFlag " } , <nl> + { 75 , nullptr , " GetInitialLaunchSettings " } , <nl> + { 76 , nullptr , " SetInitialLaunchSettings " } , <nl> + { 77 , nullptr , " GetDeviceNickName " } , <nl> + { 78 , nullptr , " SetDeviceNickName " } , <nl> + { 79 , nullptr , " GetProductModel " } , <nl> + { 80 , nullptr , " GetLdnChannel " } , <nl> + { 81 , nullptr , " SetLdnChannel " } , <nl> + { 82 , nullptr , " AcquireTelemetryDirtyFlagEventHandle " } , <nl> + { 83 , nullptr , " GetTelemetryDirtyFlags " } , <nl> + { 84 , nullptr , " GetPtmBatteryLot " } , <nl> + { 85 , nullptr , " SetPtmBatteryLot " } , <nl> + { 86 , nullptr , " GetPtmFuelGaugeParameter " } , <nl> + { 87 , nullptr , " SetPtmFuelGaugeParameter " } , <nl> + { 88 , nullptr , " GetBluetoothEnableFlag " } , <nl> + { 89 , nullptr , " SetBluetoothEnableFlag " } , <nl> + { 90 , nullptr , " GetMiiAuthorId " } , <nl> + { 91 , nullptr , " SetShutdownRtcValue " } , <nl> + { 92 , nullptr , " GetShutdownRtcValue " } , <nl> + { 93 , nullptr , " AcquireFatalDirtyFlagEventHandle " } , <nl> + { 94 , nullptr , " GetFatalDirtyFlags " } , <nl> + { 95 , nullptr , " GetAutoUpdateEnableFlag " } , <nl> + { 96 , nullptr , " SetAutoUpdateEnableFlag " } , <nl> + { 97 , nullptr , " GetNxControllerSettings " } , <nl> + { 98 , nullptr , " SetNxControllerSettings " } , <nl> + { 99 , nullptr , " GetBatteryPercentageFlag " } , <nl> + { 100 , nullptr , " SetBatteryPercentageFlag " } , <nl> + { 101 , nullptr , " GetExternalRtcResetFlag " } , <nl> + { 102 , nullptr , " SetExternalRtcResetFlag " } , <nl> + { 103 , nullptr , " GetUsbFullKeyEnableFlag " } , <nl> + { 104 , nullptr , " SetUsbFullKeyEnableFlag " } , <nl> + { 105 , nullptr , " SetExternalSteadyClockInternalOffset " } , <nl> + { 106 , nullptr , " GetExternalSteadyClockInternalOffset " } , <nl> + { 107 , nullptr , " GetBacklightSettingsEx " } , <nl> + { 108 , nullptr , " SetBacklightSettingsEx " } , <nl> + { 109 , nullptr , " GetHeadphoneVolumeWarningCount " } , <nl> + { 110 , nullptr , " SetHeadphoneVolumeWarningCount " } , <nl> + { 111 , nullptr , " GetBluetoothAfhEnableFlag " } , <nl> + { 112 , nullptr , " SetBluetoothAfhEnableFlag " } , <nl> + { 113 , nullptr , " GetBluetoothBoostEnableFlag " } , <nl> + { 114 , nullptr , " SetBluetoothBoostEnableFlag " } , <nl> + { 115 , nullptr , " GetInRepairProcessEnableFlag " } , <nl> + { 116 , nullptr , " SetInRepairProcessEnableFlag " } , <nl> + { 117 , nullptr , " GetHeadphoneVolumeUpdateFlag " } , <nl> + { 118 , nullptr , " SetHeadphoneVolumeUpdateFlag " } , <nl> + { 119 , nullptr , " NeedsToUpdateHeadphoneVolume " } , <nl> + { 120 , nullptr , " GetPushNotificationActivityModeOnSleep " } , <nl> + { 121 , nullptr , " SetPushNotificationActivityModeOnSleep " } , <nl> + { 122 , nullptr , " GetServiceDiscoveryControlSettings " } , <nl> + { 123 , nullptr , " SetServiceDiscoveryControlSettings " } , <nl> + { 124 , nullptr , " GetErrorReportSharePermission " } , <nl> + { 125 , nullptr , " SetErrorReportSharePermission " } , <nl> + { 126 , nullptr , " GetAppletLaunchFlags " } , <nl> + { 127 , nullptr , " SetAppletLaunchFlags " } , <nl> + { 128 , nullptr , " GetConsoleSixAxisSensorAccelerationBias " } , <nl> + { 129 , nullptr , " SetConsoleSixAxisSensorAccelerationBias " } , <nl> + { 130 , nullptr , " GetConsoleSixAxisSensorAngularVelocityBias " } , <nl> + { 131 , nullptr , " SetConsoleSixAxisSensorAngularVelocityBias " } , <nl> + { 132 , nullptr , " GetConsoleSixAxisSensorAccelerationGain " } , <nl> + { 133 , nullptr , " SetConsoleSixAxisSensorAccelerationGain " } , <nl> + { 134 , nullptr , " GetConsoleSixAxisSensorAngularVelocityGain " } , <nl> + { 135 , nullptr , " SetConsoleSixAxisSensorAngularVelocityGain " } , <nl> + { 136 , nullptr , " GetKeyboardLayout " } , <nl> + { 137 , nullptr , " SetKeyboardLayout " } , <nl> + { 138 , nullptr , " GetWebInspectorFlag " } , <nl> + { 139 , nullptr , " GetAllowedSslHosts " } , <nl> + { 140 , nullptr , " GetHostFsMountPoint " } , <nl> + } ; <nl> + RegisterHandlers ( functions ) ; <nl> + } <nl> + <nl> + } / / namespace Set <nl> + } / / namespace Service <nl> new file mode 100644 <nl> index 00000000000 . . 105f1a3c7ce <nl> mmm / dev / null <nl> ppp b / src / core / hle / service / set / set_sys . h <nl> <nl> + / / Copyright 2018 yuzu emulator team <nl> + / / Licensed under GPLv2 or any later version <nl> + / / Refer to the license . txt file included . <nl> + <nl> + # pragma once <nl> + <nl> + # include " core / hle / service / service . h " <nl> + <nl> + namespace Service { <nl> + namespace Set { <nl> + <nl> + class SET_SYS final : public ServiceFramework < SET_SYS > { <nl> + public : <nl> + explicit SET_SYS ( ) ; <nl> + ~ SET_SYS ( ) = default ; <nl> + <nl> + private : <nl> + void GetColorSetId ( Kernel : : HLERequestContext & ctx ) ; <nl> + } ; <nl> + <nl> + } / / namespace Set <nl> + } / / namespace Service <nl> new file mode 100644 <nl> index 00000000000 . . c6bc9e24047 <nl> mmm / dev / null <nl> ppp b / src / core / hle / service / set / settings . cpp <nl> <nl> + / / Copyright 2018 yuzu emulator team <nl> + / / Licensed under GPLv2 or any later version <nl> + / / Refer to the license . txt file included . <nl> + <nl> + # include " core / hle / service / set / set . h " <nl> + # include " core / hle / service / set / set_cal . h " <nl> + # include " core / hle / service / set / set_fd . h " <nl> + # include " core / hle / service / set / set_sys . h " <nl> + # include " core / hle / service / set / settings . h " <nl> + <nl> + namespace Service { <nl> + namespace Set { <nl> + <nl> + void InstallInterfaces ( SM : : ServiceManager & service_manager ) { <nl> + std : : make_shared < SET > ( ) - > InstallAsService ( service_manager ) ; <nl> + std : : make_shared < SET_CAL > ( ) - > InstallAsService ( service_manager ) ; <nl> + std : : make_shared < SET_FD > ( ) - > InstallAsService ( service_manager ) ; <nl> + std : : make_shared < SET_SYS > ( ) - > InstallAsService ( service_manager ) ; <nl> + } <nl> + <nl> + } / / namespace Set <nl> + } / / namespace Service <nl> new file mode 100644 <nl> index 00000000000 . . 6c8d5a58cf2 <nl> mmm / dev / null <nl> ppp b / src / core / hle / service / set / settings . h <nl> <nl> + / / Copyright 2018 yuzu emulator team <nl> + / / Licensed under GPLv2 or any later version <nl> + / / Refer to the license . txt file included . <nl> + <nl> + # pragma once <nl> + <nl> + # include " core / hle / service / service . h " <nl> + <nl> + namespace Service { <nl> + namespace Set { <nl> + <nl> + / / / Registers all Settings services with the specified service manager . <nl> + void InstallInterfaces ( SM : : ServiceManager & service_manager ) ; <nl> + <nl> + } / / namespace Set <nl> + } / / namespace Service <nl>
Service / Set : add more services
yuzu-emu/yuzu
28669872d93ae726c3f97f6e9ac8da984e313646
2018-03-03T06:03:49Z
mmm a / Marlin / language_es . h <nl> ppp b / Marlin / language_es . h <nl> <nl> # define MSG_EXTRUDE " Extruir " <nl> # define MSG_RETRACT " Retraer " <nl> # define MSG_MOVE_AXIS " Mover ejes " <nl> + # define MSG_LEVEL_BED " Nivelar cama " <nl> # define MSG_MOVE_X " Mover X " <nl> # define MSG_MOVE_Y " Mover Y " <nl> # define MSG_MOVE_Z " Mover Z " <nl> mmm a / Marlin / language_it . h <nl> ppp b / Marlin / language_it . h <nl> <nl> # define MSG_EXTRUDE " Estrudi " <nl> # define MSG_RETRACT " Ritrai " <nl> # define MSG_MOVE_AXIS " Muovi Asse " <nl> + # define MSG_LEVEL_BED " Livellamento piano " <nl> # define MSG_MOVE_X " Muovi X " <nl> # define MSG_MOVE_Y " Muovi Y " <nl> # define MSG_MOVE_Z " Muovi Z " <nl>
Merge pull request from tnw513 / fix_lang
MarlinFirmware/Marlin
db8ab50c66913f5a4c74a3d8e4f352f05192d354
2016-03-21T00:07:00Z
mmm a / CMake / FollyConfigChecks . cmake <nl> ppp b / CMake / FollyConfigChecks . cmake <nl> check_symbol_exists ( preadv sys / uio . h FOLLY_HAVE_PREADV ) <nl> check_symbol_exists ( pwritev sys / uio . h FOLLY_HAVE_PWRITEV ) <nl> check_symbol_exists ( clock_gettime time . h FOLLY_HAVE_CLOCK_GETTIME ) <nl> <nl> - <nl> - check_function_exists ( <nl> - cplus_demangle_v3_callback <nl> - FOLLY_HAVE_CPLUS_DEMANGLE_V3_CALLBACK <nl> - ) <nl> check_function_exists ( malloc_usable_size FOLLY_HAVE_MALLOC_USABLE_SIZE ) <nl> <nl> set ( CMAKE_REQUIRED_FLAGS " $ { FOLLY_ORIGINAL_CMAKE_REQUIRED_FLAGS } " ) <nl> mmm a / CMake / folly - config . h . cmake <nl> ppp b / CMake / folly - config . h . cmake <nl> <nl> # cmakedefine FOLLY_HAVE_PREADV 1 <nl> # cmakedefine FOLLY_HAVE_PWRITEV 1 <nl> # cmakedefine FOLLY_HAVE_CLOCK_GETTIME 1 <nl> - # cmakedefine FOLLY_HAVE_CPLUS_DEMANGLE_V3_CALLBACK 1 <nl> # cmakedefine FOLLY_HAVE_OPENSSL_ASN1_TIME_DIFF 1 <nl> <nl> # cmakedefine FOLLY_HAVE_IFUNC 1 <nl> mmm a / folly / Demangle . cpp <nl> ppp b / folly / Demangle . cpp <nl> <nl> # include < algorithm > <nl> # include < cstring > <nl> <nl> + # include < folly / detail / Demangle . h > <nl> # include < folly / portability / Config . h > <nl> <nl> - # if FOLLY_HAVE_CPLUS_DEMANGLE_V3_CALLBACK <nl> - # include < cxxabi . h > <nl> + # if FOLLY_DETAIL_HAVE_DEMANGLE_H <nl> <nl> - / / From libiberty <nl> - / / <nl> - / / __attribute__ ( ( __weak__ ) ) doesn ' t work , because cplus_demangle_v3_callback <nl> - / / is exported by an object file in libiberty . a , and the ELF spec says <nl> - / / " The link editor does not extract archive members to resolve undefined weak <nl> - / / symbols " ( but , interestingly enough , will resolve undefined weak symbols <nl> - / / with definitions from archive members that were extracted in order to <nl> - / / resolve an undefined global ( strong ) symbol ) <nl> - <nl> - # ifndef DMGL_NO_OPTS <nl> - # define FOLLY_DEFINED_DMGL 1 <nl> - # define DMGL_NO_OPTS 0 / * For readability . . . * / <nl> - # define DMGL_PARAMS ( 1 < < 0 ) / * Include function args * / <nl> - # define DMGL_ANSI ( 1 < < 1 ) / * Include const , volatile , etc * / <nl> - # define DMGL_JAVA ( 1 < < 2 ) / * Demangle as Java rather than C + + . * / <nl> - # define DMGL_VERBOSE ( 1 < < 3 ) / * Include implementation details . * / <nl> - # define DMGL_TYPES ( 1 < < 4 ) / * Also try to demangle type encodings . * / <nl> - # define DMGL_RET_POSTFIX ( 1 < < 5 ) / * Print function return types ( when <nl> - present ) after function signature * / <nl> - # endif <nl> - <nl> - extern " C " int cplus_demangle_v3_callback ( <nl> - const char * mangled , <nl> - int options , / / We use DMGL_PARAMS | DMGL_TYPES , aka 0x11 <nl> - void ( * callback ) ( const char * , size_t , void * ) , <nl> - void * arg ) ; <nl> + # include < cxxabi . h > <nl> <nl> # endif <nl> <nl> namespace folly { <nl> <nl> - # if FOLLY_HAVE_CPLUS_DEMANGLE_V3_CALLBACK <nl> + # if FOLLY_DETAIL_HAVE_DEMANGLE_H <nl> <nl> fbstring demangle ( const char * name ) { <nl> # ifdef FOLLY_DEMANGLE_MAX_SYMBOL_SIZE <nl> size_t demangle ( const char * name , char * out , size_t outSize ) { <nl> dbuf . total = 0 ; <nl> <nl> / / Unlike most library functions , this returns 1 on success and 0 on failure <nl> - int status = cplus_demangle_v3_callback ( <nl> - name , <nl> - DMGL_PARAMS | DMGL_ANSI | DMGL_TYPES , <nl> - demangleCallback , <nl> - & dbuf ) ; <nl> + int status = <nl> + detail : : cplus_demangle_v3_callback_wrapper ( name , demangleCallback , & dbuf ) ; <nl> if ( status = = 0 ) { / / failed , return original <nl> return folly : : strlcpy ( out , name , outSize ) ; <nl> } <nl> mmm a / folly / Makefile . am <nl> ppp b / folly / Makefile . am <nl> nobase_follyinclude_HEADERS = \ <nl> detail / AtFork . h \ <nl> detail / AtomicHashUtils . h \ <nl> detail / AtomicUnorderedMapUtils . h \ <nl> + detail / Demangle . h \ <nl> detail / DiscriminatedPtrDetail . h \ <nl> detail / FileUtilDetail . h \ <nl> detail / FingerprintPolynomial . h \ <nl> libfollybasesse42_la_SOURCES = \ <nl> libfollybase_la_SOURCES = \ <nl> Conv . cpp \ <nl> Demangle . cpp \ <nl> + detail / Demangle . cpp \ <nl> detail / RangeCommon . cpp \ <nl> Format . cpp \ <nl> FormatArg . cpp \ <nl> new file mode 100644 <nl> index 00000000000 . . 419656625a1 <nl> mmm / dev / null <nl> ppp b / folly / detail / Demangle . cpp <nl> <nl> + / * <nl> + * Copyright 2018 - present Facebook , Inc . <nl> + * <nl> + * Licensed under the Apache License , Version 2 . 0 ( the " License " ) ; <nl> + * you may not use this file except in compliance with the License . <nl> + * You may obtain a copy of the License at <nl> + * <nl> + * http : / / www . apache . org / licenses / LICENSE - 2 . 0 <nl> + * <nl> + * Unless required by applicable law or agreed to in writing , software <nl> + * distributed under the License is distributed on an " AS IS " BASIS , <nl> + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied . <nl> + * See the License for the specific language governing permissions and <nl> + * limitations under the License . <nl> + * / <nl> + <nl> + # include < folly / detail / Demangle . h > <nl> + <nl> + / / Do not include < libiberty . h > ( binutils ) and < string . h > ( glibc ) in the same <nl> + / / translation unit since they contain conflicting declarations for the symbol <nl> + / / ` basename ` . <nl> + / / <nl> + / / So we extract the inclusion of ` < demangle . h > ` which includes ` < libiberty . h > ` <nl> + / / to here , isolating it . <nl> + # if FOLLY_DETAIL_HAVE_DEMANGLE_H <nl> + # include < demangle . h > <nl> + # endif <nl> + <nl> + namespace folly { <nl> + namespace detail { <nl> + <nl> + int cplus_demangle_v3_callback_wrapper ( <nl> + char const * const mangled , <nl> + void ( * const cbref ) ( char const * , std : : size_t , void * ) , <nl> + void * const opaque ) { <nl> + # if FOLLY_DETAIL_HAVE_DEMANGLE_H <nl> + auto const options = DMGL_PARAMS | DMGL_ANSI | DMGL_TYPES ; <nl> + return cplus_demangle_v3_callback ( mangled , options , cbref , opaque ) ; <nl> + # else <nl> + return 0 ; <nl> + # endif <nl> + } <nl> + <nl> + } / / namespace detail <nl> + } / / namespace folly <nl> new file mode 100644 <nl> index 00000000000 . . 1755880e7ba <nl> mmm / dev / null <nl> ppp b / folly / detail / Demangle . h <nl> <nl> + / * <nl> + * Copyright 2018 - present Facebook , Inc . <nl> + * <nl> + * Licensed under the Apache License , Version 2 . 0 ( the " License " ) ; <nl> + * you may not use this file except in compliance with the License . <nl> + * You may obtain a copy of the License at <nl> + * <nl> + * http : / / www . apache . org / licenses / LICENSE - 2 . 0 <nl> + * <nl> + * Unless required by applicable law or agreed to in writing , software <nl> + * distributed under the License is distributed on an " AS IS " BASIS , <nl> + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied . <nl> + * See the License for the specific language governing permissions and <nl> + * limitations under the License . <nl> + * / <nl> + <nl> + # pragma once <nl> + <nl> + # include < cstddef > <nl> + <nl> + # if __has_include ( < demangle . h > ) <nl> + # define FOLLY_DETAIL_HAVE_DEMANGLE_H 1 <nl> + # else <nl> + # define FOLLY_DETAIL_HAVE_DEMANGLE_H 0 <nl> + # endif <nl> + <nl> + namespace folly { <nl> + namespace detail { <nl> + <nl> + extern int cplus_demangle_v3_callback_wrapper ( <nl> + char const * mangled , <nl> + void ( * cbref ) ( char const * , std : : size_t , void * ) , <nl> + void * opaque ) ; <nl> + <nl> + } / / namespace detail <nl> + } / / namespace folly <nl> mmm a / folly / test / DemangleTest . cpp <nl> ppp b / folly / test / DemangleTest . cpp <nl> <nl> <nl> # include < folly / Demangle . h > <nl> <nl> + # include < folly / detail / Demangle . h > <nl> # include < folly / portability / GTest . h > <nl> <nl> using folly : : demangle ; <nl> struct ThisIsAVeryLongStructureName { <nl> } ; <nl> } / / namespace folly_test <nl> <nl> - # if FOLLY_HAVE_CPLUS_DEMANGLE_V3_CALLBACK <nl> + # if FOLLY_DETAIL_HAVE_DEMANGLE_H <nl> TEST ( Demangle , demangle ) { <nl> char expected [ ] = " folly_test : : ThisIsAVeryLongStructureName " ; <nl> EXPECT_STREQ ( <nl> TEST ( Demangle , LongSymbolFallback ) { <nl> } <nl> # endif / / defined ( FOLLY_DEMANGLE_MAX_SYMBOL_SIZE ) <nl> <nl> - # endif / / FOLLY_HAVE_CPLUS_DEMANGLE_V3_CALLBACK <nl> + # endif / / FOLLY_DETAIL_HAVE_DEMANGLE_H <nl> <nl> TEST ( Demangle , strlcpy ) { <nl> char buf [ 6 ] ; <nl>
Cut config - time detection of cplus_demangle_v3_callback
facebook/folly
8f45e92cae6718b662430b21b593d941019918df
2018-08-15T00:23:29Z
mmm a / src / intertyper . js <nl> ppp b / src / intertyper . js <nl> function intertyper ( data ) { <nl> } , <nl> } ) ; <nl> <nl> + MATHOPS = set ( [ ' add ' , ' sub ' , ' sdiv ' , ' udiv ' , ' mul ' , ' icmp ' , ' zext ' , ' urem ' , ' srem ' , ' fadd ' , ' fsub ' , ' fmul ' , ' fdiv ' , ' fcmp ' , ' uitofp ' , ' sitofp ' , ' fpext ' , ' fptrunc ' , ' fptoui ' , ' fptosi ' , ' trunc ' , ' sext ' , ' select ' , ' shl ' , ' shr ' , ' ashl ' , ' ashr ' , ' lshr ' , ' lshl ' , ' xor ' , ' or ' , ' and ' , ' ptrtoint ' , ' inttoptr ' ] ) ; <nl> + <nl> substrate . addZyme ( ' Triager ' , { <nl> processItem : function ( item ) { <nl> function triage ( ) { <nl> if ( ! item . intertype ) { <nl> - if ( item . tokens . length > = 3 & & item . indent = = = 0 & & item . tokens [ 1 ] . text = = ' = ' ) <nl> - return ' Global ' ; <nl> - if ( item . tokens . length > = 4 & & item . indent = = = 0 & & item . tokens [ 0 ] . text = = ' define ' & & <nl> - item . tokens . slice ( - 1 ) [ 0 ] . text = = ' { ' ) <nl> - return ' FuncHeader ' ; <nl> - if ( ( item . tokens . length > = 1 & & item . indent = = = 0 & & item . tokens [ 0 ] . text . substr ( - 1 ) = = ' : ' ) | | / / XXX LLVM 2 . 7 format , or llvm - gcc in 2 . 8 <nl> - ( item . tokens . length > = 3 & & item . indent = = = 0 & & item . tokens [ 1 ] . text = = ' < label > ' ) ) <nl> - return ' Label ' ; <nl> - if ( item . tokens [ 0 ] . text in searchable ( ' ; ' , ' target ' ) ) <nl> - return ' / dev / null ' ; <nl> - if ( item . indent = = = 2 & & item . tokens & & item . tokens . length > = 3 & & findTokenText ( item , ' = ' ) > = 0 & & <nl> - ! item . intertype ) <nl> - return ' Assign ' ; <nl> - if ( ! item . intertype & & item . indent = = = - 1 & & item . tokens & & item . tokens . length > = 3 & & <nl> - ( item . tokens [ 0 ] . text = = ' load ' | | item . tokens [ 1 ] . text = = ' load ' ) ) <nl> - return ' Load ' ; <nl> - if ( ! item . intertype & & item . indent = = = - 1 & & item . tokens . length > = 3 & & item . tokens [ 0 ] . text = = ' extractvalue ' ) <nl> - return ' ExtractValue ' ; <nl> - if ( ! item . intertype & & item . indent = = = - 1 & & item . tokens & & item . tokens . length > = 3 & & item . tokens [ 0 ] . text = = ' bitcast ' ) <nl> - return ' Bitcast ' ; <nl> - if ( ! item . intertype & & item . indent = = = - 1 & & item . tokens & & item . tokens . length > = 3 & & item . tokens [ 0 ] . text = = ' getelementptr ' ) <nl> - return ' GEP ' ; <nl> - if ( item . tokens & & item . tokens . length > = 3 & & ( item . tokens [ 0 ] . text = = ' call ' | | item . tokens [ 1 ] . text = = ' call ' ) & & ! item . intertype ) <nl> + var token0Text = item . tokens [ 0 ] . text ; <nl> + var token1Text = item . tokens [ 1 ] ? item . tokens [ 1 ] . text : null ; <nl> + var tokensLength = item . tokens . length ; <nl> + if ( item . indent = = = 2 ) { <nl> + if ( tokensLength > = 5 & & <nl> + ( token0Text = = ' store ' | | token1Text = = ' store ' ) ) <nl> + return ' Store ' ; <nl> + if ( tokensLength > = 3 & & findTokenText ( item , ' = ' ) > = 0 ) <nl> + return ' Assign ' ; <nl> + if ( tokensLength > = 3 & & token0Text = = ' br ' ) <nl> + return ' Branch ' ; <nl> + if ( tokensLength > = 2 & & token0Text = = ' ret ' ) <nl> + return ' Return ' ; <nl> + if ( tokensLength > = 2 & & token0Text = = ' switch ' ) <nl> + return ' Switch ' ; <nl> + if ( token0Text = = ' unreachable ' ) <nl> + return ' Unreachable ' ; <nl> + } else if ( item . indent = = = - 1 ) { <nl> + if ( tokensLength > = 3 & & <nl> + ( token0Text = = ' load ' | | token1Text = = ' load ' ) ) <nl> + return ' Load ' ; <nl> + if ( tokensLength > = 3 & & <nl> + token0Text in MATHOPS ) <nl> + return ' Mathops ' ; <nl> + if ( tokensLength > = 3 & & token0Text = = ' bitcast ' ) <nl> + return ' Bitcast ' ; <nl> + if ( tokensLength > = 3 & & token0Text = = ' getelementptr ' ) <nl> + return ' GEP ' ; <nl> + if ( tokensLength > = 3 & & token0Text = = ' alloca ' ) <nl> + return ' Alloca ' ; <nl> + if ( tokensLength > = 3 & & token0Text = = ' extractvalue ' ) <nl> + return ' ExtractValue ' ; <nl> + if ( tokensLength > = 3 & & token0Text = = ' phi ' ) <nl> + return ' Phi ' ; <nl> + } else if ( item . indent = = = 0 ) { <nl> + if ( ( tokensLength > = 1 & & token0Text . substr ( - 1 ) = = ' : ' ) | | / / XXX LLVM 2 . 7 format , or llvm - gcc in 2 . 8 <nl> + ( tokensLength > = 3 & & token1Text = = ' < label > ' ) ) <nl> + return ' Label ' ; <nl> + if ( tokensLength > = 4 & & token0Text = = ' declare ' ) <nl> + return ' External ' ; <nl> + if ( tokensLength > = 3 & & token1Text = = ' = ' ) <nl> + return ' Global ' ; <nl> + if ( tokensLength > = 4 & & token0Text = = ' define ' & & <nl> + item . tokens . slice ( - 1 ) [ 0 ] . text = = ' { ' ) <nl> + return ' FuncHeader ' ; <nl> + if ( tokensLength > = 1 & & token0Text = = ' } ' ) <nl> + return ' FuncEnd ' ; <nl> + } <nl> + if ( tokensLength > = 3 & & ( token0Text = = ' call ' | | token1Text = = ' call ' ) ) <nl> return ' Call ' ; <nl> - if ( item . tokens & & item . tokens . length > = 3 & & item . tokens [ 0 ] . text = = ' invoke ' & & ! item . intertype ) <nl> + if ( token0Text in searchable ( ' ; ' , ' target ' ) ) <nl> + return ' / dev / null ' ; <nl> + if ( tokensLength > = 3 & & token0Text = = ' invoke ' ) <nl> return ' Invoke ' ; <nl> - if ( ! item . intertype & & item . indent = = = - 1 & & item . tokens & & item . tokens . length > = 3 & & item . tokens [ 0 ] . text = = ' alloca ' ) <nl> - return ' Alloca ' ; <nl> - if ( ! item . intertype & & item . indent = = = - 1 & & item . tokens & & item . tokens . length > = 3 & & item . tokens [ 0 ] . text = = ' phi ' ) <nl> - return ' Phi ' ; <nl> - if ( item . indent = = = - 1 & & item . tokens & & item . tokens . length > = 3 & & <nl> - [ ' add ' , ' sub ' , ' sdiv ' , ' udiv ' , ' mul ' , ' icmp ' , ' zext ' , ' urem ' , ' srem ' , ' fadd ' , ' fsub ' , ' fmul ' , ' fdiv ' , ' fcmp ' , ' uitofp ' , ' sitofp ' , ' fpext ' , ' fptrunc ' , ' fptoui ' , ' fptosi ' , ' trunc ' , ' sext ' , ' select ' , ' shl ' , ' shr ' , ' ashl ' , ' ashr ' , ' lshr ' , ' lshl ' , ' xor ' , ' or ' , ' and ' , ' ptrtoint ' , ' inttoptr ' ] . indexOf ( item . tokens [ 0 ] . text ) ! = - 1 & & ! item . intertype ) <nl> - return ' Mathops ' ; <nl> - if ( item . indent = = = 2 & & item . tokens & & item . tokens . length > = 5 & & <nl> - ( item . tokens [ 0 ] . text = = ' store ' | | item . tokens [ 1 ] . text = = ' store ' ) ) <nl> - return ' Store ' ; <nl> - if ( item . indent = = = 2 & & item . tokens & & item . tokens . length > = 3 & & item . tokens [ 0 ] . text = = ' br ' & & <nl> - ! item . intertype ) <nl> - return ' Branch ' ; <nl> - if ( item . indent = = = 2 & & item . tokens & & item . tokens . length > = 2 & & item . tokens [ 0 ] . text = = ' ret ' & & <nl> - ! item . intertype ) <nl> - return ' Return ' ; <nl> - if ( item . indent = = = 2 & & item . tokens & & item . tokens . length > = 2 & & item . tokens [ 0 ] . text = = ' switch ' & & <nl> - ! item . intertype ) <nl> - return ' Switch ' ; <nl> - if ( item . indent = = = 0 & & item . tokens & & item . tokens . length > = 1 & & item . tokens [ 0 ] . text = = ' } ' & & ! item . intertype ) <nl> - return ' FuncEnd ' ; <nl> - if ( item . indent = = = 0 & & item . tokens & & item . tokens . length > = 4 & & item . tokens [ 0 ] . text = = ' declare ' & & <nl> - ! item . intertype ) <nl> - return ' External ' ; <nl> - if ( item . indent = = = 2 & & item . tokens & & item . tokens [ 0 ] . text = = ' unreachable ' & & <nl> - ! item . intertype ) <nl> - return ' Unreachable ' ; <nl> } else { <nl> / / Already intertyped <nl> if ( item . parentSlot ) <nl>
optimizer intertyper triager
emscripten-core/emscripten
baa2806fdd6c01df82058bf523a767e88d7049ae
2010-10-19T03:22:02Z
mmm a / RELEASE . md <nl> ppp b / RELEASE . md <nl> <nl> * Math and Linear Algebra : <nl> * < ADD RELEASE NOTES HERE > <nl> * TPU Enhancements : <nl> + * Added support for the ` beta ` parameter of the FTRL optimizer for TPU <nl> + embeddings . Users of other TensorFlow platforms can implement equivalent <nl> + behavior by adjusting the ` l2 ` parameter . <nl> * < ADD RELEASE NOTES HERE > <nl> * XLA Support : <nl> * < ADD RELEASE NOTES HERE > <nl> mmm a / tensorflow / core / protobuf / tpu / optimization_parameters . proto <nl> ppp b / tensorflow / core / protobuf / tpu / optimization_parameters . proto <nl> message StochasticGradientDescentParameters { } <nl> / / https : / / github . com / tensorflow / tensorflow / blob / 6b6471f3ffb7f1fefe42d814aa5fb9ab7a535b58 / tensorflow / core / kernels / training_ops . cc # L2646 <nl> / / <nl> / / The hyperparameters for FTRL are the same as for the Keras implementation , <nl> - / / with some additions . When the multiply_linear_by_lr field is set to true , a <nl> - / / modified formula is used for FTRL that treats the " linear " accumulator as <nl> - / / being pre - multiplied by the learning rate ( i . e . , the accumulator named <nl> - / / " linear " actually stores " linear * learning_rate " ) . Other than checkpoint <nl> + / / with some additions . The " beta " parameter matches the behavior described in <nl> + / / the second link above ; " beta " / ( 2 * learning rate ) should be added to " l2 " <nl> + / / to get equivalent behavior in the other TensorFlow implementations of this <nl> + / / optimizer . When the multiply_linear_by_lr field is set to true , a modified <nl> + / / formula is used for FTRL that treats the " linear " accumulator as being <nl> + / / pre - multiplied by the learning rate ( i . e . , the accumulator named " linear " <nl> + / / actually stores " linear * learning_rate " ) . Other than checkpoint <nl> / / compatibility , this is mathematically equivalent for a static learning rate ; <nl> / / for a dynamic learning rate , it is nearly the same as long as the learning <nl> / / rate does not change quickly . The benefit of setting multiply_linear_by_lr to <nl> message FtrlParameters { <nl> float l1 = 1 ; <nl> float l2 = 2 ; <nl> float lr_power = 3 ; <nl> + float beta = 7 ; <nl> bool multiply_linear_by_lr = 6 ; <nl> <nl> / / Old initial accumulator parameters . <nl>
Added beta parameter ( as in https : / / research . google . com / pubs / archive / 41159 . pdf )
tensorflow/tensorflow
fcb71ce45b544ff8702b8faca84a763b3c2ebb57
2020-08-03T22:09:01Z
mmm a / src / asmjs / asm - typer . cc <nl> ppp b / src / asmjs / asm - typer . cc <nl> AsmType * AsmTyper : : ValidateCall ( AsmType * return_type , Call * call ) { <nl> DCHECK ( false ) ; <nl> FAIL ( call , " Redeclared global identifier . " ) ; <nl> } <nl> + if ( call - > GetCallType ( ) ! = Call : : OTHER_CALL ) { <nl> + FAIL ( call , " Invalid call of existing global function . " ) ; <nl> + } <nl> SetTypeOf ( call_var_proxy , reinterpret_cast < AsmType * > ( call_type ) ) ; <nl> SetTypeOf ( call , return_type ) ; <nl> return return_type ; <nl> AsmType * AsmTyper : : ValidateCall ( AsmType * return_type , Call * call ) { <nl> FAIL ( call , " Function invocation does not match function type . " ) ; <nl> } <nl> <nl> + if ( call - > GetCallType ( ) ! = Call : : OTHER_CALL ) { <nl> + FAIL ( call , " Invalid forward call of global function . " ) ; <nl> + } <nl> + <nl> SetTypeOf ( call_var_proxy , call_var_info - > type ( ) ) ; <nl> SetTypeOf ( call , return_type ) ; <nl> return return_type ; <nl> AsmType * AsmTyper : : ValidateCall ( AsmType * return_type , Call * call ) { <nl> DCHECK ( false ) ; <nl> FAIL ( call , " Redeclared global identifier . " ) ; <nl> } <nl> + if ( call - > GetCallType ( ) ! = Call : : KEYED_PROPERTY_CALL ) { <nl> + FAIL ( call , " Invalid call of existing function table . " ) ; <nl> + } <nl> SetTypeOf ( call_property , reinterpret_cast < AsmType * > ( call_type ) ) ; <nl> SetTypeOf ( call , return_type ) ; <nl> return return_type ; <nl> AsmType * AsmTyper : : ValidateCall ( AsmType * return_type , Call * call ) { <nl> " signature . " ) ; <nl> } <nl> <nl> + if ( call - > GetCallType ( ) ! = Call : : KEYED_PROPERTY_CALL ) { <nl> + FAIL ( call , " Invalid forward call of function table . " ) ; <nl> + } <nl> SetTypeOf ( call_property , previous_type - > signature ( ) ) ; <nl> SetTypeOf ( call , return_type ) ; <nl> return return_type ; <nl> mmm a / test / cctest / asmjs / test - asm - typer . cc <nl> ppp b / test / cctest / asmjs / test - asm - typer . cc <nl> class AsmTyperHarnessBuilder { <nl> CHECK ( validation_type_ = = ValidateStatement | | <nl> validation_type_ = = ValidateExpression ) ; <nl> auto * var = DeclareVariable ( var_name ) ; <nl> + if ( var - > IsUnallocated ( ) ) { <nl> + var - > AllocateTo ( VariableLocation : : LOCAL , - 1 ) ; <nl> + } <nl> auto * var_info = new ( zone_ ) AsmTyper : : VariableInfo ( type ) ; <nl> var_info - > set_mutability ( AsmTyper : : VariableInfo : : kLocal ) ; <nl> CHECK ( typer_ - > AddLocal ( var , var_info ) ) ; <nl> class AsmTyperHarnessBuilder { <nl> <nl> AsmTyperHarnessBuilder * WithGlobal ( VariableName var_name , AsmType * type ) { <nl> auto * var = DeclareVariable ( var_name ) ; <nl> - auto * var_info = new ( zone_ ) AsmTyper : : VariableInfo ( type ) ; <nl> - var_info - > set_mutability ( AsmTyper : : VariableInfo : : kMutableGlobal ) ; <nl> - CHECK ( typer_ - > AddGlobal ( var , var_info ) ) ; <nl> + if ( var - > IsUnallocated ( ) ) { <nl> + var - > AllocateTo ( VariableLocation : : MODULE , - 1 ) ; <nl> + } <nl> + if ( type ! = nullptr ) { <nl> + auto * var_info = new ( zone_ ) AsmTyper : : VariableInfo ( type ) ; <nl> + var_info - > set_mutability ( AsmTyper : : VariableInfo : : kMutableGlobal ) ; <nl> + CHECK ( typer_ - > AddGlobal ( var , var_info ) ) ; <nl> + } <nl> return this ; <nl> } <nl> <nl> class AsmTyperHarnessBuilder { <nl> AsmTyperHarnessBuilder * WithImport ( VariableName var_name , <nl> AsmTyper : : StandardMember standard_member ) { <nl> auto * var = DeclareVariable ( var_name ) ; <nl> + if ( var - > IsUnallocated ( ) ) { <nl> + var - > AllocateTo ( VariableLocation : : LOCAL , - 1 ) ; <nl> + } <nl> AsmTyper : : VariableInfo * var_info = nullptr ; <nl> auto * stdlib_map = & typer_ - > stdlib_math_types_ ; <nl> switch ( standard_member ) { <nl> class AsmTyperHarnessBuilder { <nl> return true ; <nl> } <nl> <nl> - std : : cerr < < " Asm validation failed : " < < typer_ - > error_message ( ) < < " \ n " ; <nl> + std : : unique_ptr < char [ ] > msg = i : : MessageHandler : : GetLocalizedMessage ( <nl> + isolate_ , typer_ - > error_message ( ) ) ; <nl> + std : : cerr < < " Asm validation failed : " < < msg . get ( ) < < " \ n " ; <nl> return false ; <nl> } <nl> <nl> class AsmTyperHarnessBuilder { <nl> CHECK ( validation_type_ = = ValidateExpression ) ; <nl> auto * validated_as = ValidateExpressionStatment ( fun_decl_ ) ; <nl> if ( validated_as = = AsmType : : None ( ) ) { <nl> - std : : cerr < < " Validation failure : " < < typer_ - > error_message ( ) < < " \ n " ; <nl> + std : : unique_ptr < char [ ] > msg = i : : MessageHandler : : GetLocalizedMessage ( <nl> + isolate_ , typer_ - > error_message ( ) ) ; <nl> + std : : cerr < < " Validation failure : " < < msg . get ( ) < < " \ n " ; <nl> return false ; <nl> } else if ( validated_as ! = type ) { <nl> std : : cerr < < " Validation succeeded with wrong type " <nl> TEST ( ErrorsInModuleExport ) { <nl> { " return { ' a ' : ffi } " , " cannot export foreign functions " } , <nl> { " return { ' a ' : f ( ) } " , " must be an asm . js function name " } , <nl> { " return { ' a ' : f } " , " Undefined identifier in asm . js module export " } , <nl> - { " function v ( ) { a ( ) ; } return { b : d2s } " , " Missing definition for forw " } , <nl> + { " function v ( ) { a ( ) ; } return { b : d2s } " , <nl> + " Invalid call of existing global function " } , <nl> { " function v ( ) { } return { b : v , ' a ' : d2s_tbl } " , <nl> " cannot export function tables " } , <nl> { " function v ( ) { } return { b : v , ' a ' : min } " , <nl> TEST ( ErrorsInStatement ) { <nl> - > WithImport ( DynamicGlobal ( " fround " ) , iw : : AsmTyper : : kMathFround ) <nl> - > WithLocal ( DynamicGlobal ( " flocal " ) , iw : : AsmType : : Float ( ) ) <nl> - > WithLocal ( DynamicGlobal ( " slocal " ) , iw : : AsmType : : Signed ( ) ) <nl> + - > WithGlobal ( DynamicGlobal ( " d " ) , nullptr ) <nl> - > FailsWithMessage ( test - > error_message ) ) { <nl> std : : cerr < < " Test : \ n " < < test - > statement ; <nl> CHECK ( false ) ; <nl> TEST ( ErrorsInExpression ) { <nl> - > WithGlobal ( DynamicGlobal ( " d2s_tbl " ) , d2s_tbl ) <nl> - > WithGlobal ( DynamicGlobal ( " HEAP32 " ) , iw : : AsmType : : Int32Array ( ) ) <nl> - > WithGlobal ( DynamicGlobal ( " HEAP8 " ) , iw : : AsmType : : Int8Array ( ) ) <nl> + - > WithGlobal ( DynamicGlobal ( " a " ) , nullptr ) <nl> - > FailsWithMessage ( test - > error_message ) ) { <nl> std : : cerr < < " Test : \ n " < < test - > expression ; <nl> CHECK ( false ) ; <nl> TEST ( ValidateAssignmentExpression ) { <nl> - > WithGlobal ( DynamicGlobal ( " U32 " ) , iw : : AsmType : : Uint32Array ( ) ) <nl> - > WithGlobal ( DynamicGlobal ( " F32 " ) , iw : : AsmType : : Float32Array ( ) ) <nl> - > WithGlobal ( DynamicGlobal ( " F64 " ) , iw : : AsmType : : Float64Array ( ) ) <nl> + - > WithGlobal ( DynamicGlobal ( " make_float " ) , nullptr ) <nl> + - > WithGlobal ( DynamicGlobal ( " make_double " ) , nullptr ) <nl> - > SucceedsWithExactType ( test - > load_type ) ) { <nl> std : : cerr < < " Test : \ n " < < test - > expression ; <nl> CHECK ( false ) ; <nl> TEST ( ValidateUnaryExpression ) { <nl> - > WithLocal ( DynamicGlobal ( " ulocal " ) , iw : : AsmType : : Unsigned ( ) ) <nl> - > WithLocal ( DynamicGlobal ( " ilocal " ) , iw : : AsmType : : Int ( ) ) <nl> - > WithGlobal ( DynamicGlobal ( " dglobal " ) , iw : : AsmType : : Double ( ) ) <nl> + - > WithGlobal ( DynamicGlobal ( " make_double " ) , nullptr ) <nl> - > WithGlobal ( DynamicGlobal ( " dbl " ) , v2d ) <nl> - > SucceedsWithExactType ( test - > load_type ) ) { <nl> std : : cerr < < " Test : \ n " < < test - > expression ; <nl> TEST ( ValidateBitwiseExpression ) { <nl> - > WithLocal ( DynamicGlobal ( " iish1 " ) , iw : : AsmType : : Intish ( ) ) <nl> - > WithLocal ( DynamicGlobal ( " iish0 " ) , iw : : AsmType : : Intish ( ) ) <nl> - > WithGlobal ( DynamicGlobal ( " signed " ) , v2s ) <nl> + - > WithGlobal ( DynamicGlobal ( " make_signed " ) , nullptr ) <nl> - > SucceedsWithExactType ( test - > load_type ) ) { <nl> std : : cerr < < " Test : \ n " < < test - > expression ; <nl> CHECK ( false ) ; <nl> TEST ( ValidateCall ) { <nl> / / <nl> / / ifd2_ ( & iw : : AsmType : : Float ) <nl> / / <nl> - / / returns an AsmType representing an asm . j function with the following <nl> + / / returns an AsmType representing an asm . js function with the following <nl> / / signature : <nl> / / <nl> / / float ( int , float , double ) <nl> TEST ( ValidateCall ) { <nl> - > WithLocal ( DynamicGlobal ( " u " ) , iw : : AsmType : : Unsigned ( ) ) <nl> - > WithLocal ( DynamicGlobal ( " iish " ) , iw : : AsmType : : Intish ( ) ) <nl> - > WithGlobal ( DynamicGlobal ( " v2f " ) , v2f ) <nl> + - > WithGlobal ( DynamicGlobal ( " ifd2f " ) , nullptr ) <nl> + - > WithGlobal ( DynamicGlobal ( " ifd2d " ) , nullptr ) <nl> + - > WithGlobal ( DynamicGlobal ( " ifd2i " ) , nullptr ) <nl> - > WithGlobal ( DynamicGlobal ( " ifd2f_tbl " ) , ifd2f_tbl ) <nl> - > WithGlobal ( DynamicGlobal ( " ifd2d_tbl " ) , ifd2d_tbl ) <nl> - > WithGlobal ( DynamicGlobal ( " ifd2i_tbl " ) , ifd2i_tbl ) <nl> mmm a / test / mjsunit / asm / asm - validation . js <nl> ppp b / test / mjsunit / asm / asm - validation . js <nl> function assertValidAsm ( func ) { <nl> Module ( ) ; <nl> assertFalse ( % IsAsmWasmCode ( Module ) ) ; <nl> } ) ( ) ; <nl> + <nl> + ( function TestUndefinedGlobalCall ( ) { <nl> + function Module ( ) { <nl> + " use asm " ; <nl> + function foo ( ) { <nl> + return bar ( ) | 0 ; <nl> + } <nl> + return foo ; <nl> + } <nl> + Module ( ) ; <nl> + assertFalse ( % IsAsmWasmCode ( Module ) ) ; <nl> + } ) ( ) ; <nl>
Forbid non - locals / keyed - property calls to allow interleaved compile .
v8/v8
be10e68f39b4697e56fcdfcd661dfdbcf19485b7
2017-01-10T04:07:16Z
mmm a / documentation / sphinx / source / downloads . rst <nl> ppp b / documentation / sphinx / source / downloads . rst <nl> macOS <nl> <nl> The macOS installation package is supported on macOS 10 . 7 + . It includes the client and ( optionally ) the server . <nl> <nl> - * ` FoundationDB - 6 . 2 . 13 . pkg < https : / / www . foundationdb . org / downloads / 6 . 2 . 13 / macOS / installers / FoundationDB - 6 . 2 . 13 . pkg > ` _ <nl> + * ` FoundationDB - 6 . 2 . 14 . pkg < https : / / www . foundationdb . org / downloads / 6 . 2 . 14 / macOS / installers / FoundationDB - 6 . 2 . 14 . pkg > ` _ <nl> <nl> Ubuntu <nl> mmmmmm <nl> <nl> The Ubuntu packages are supported on 64 - bit Ubuntu 12 . 04 + , but beware of the Linux kernel bug in Ubuntu 12 . x . <nl> <nl> - * ` foundationdb - clients - 6 . 2 . 13 - 1_amd64 . deb < https : / / www . foundationdb . org / downloads / 6 . 2 . 13 / ubuntu / installers / foundationdb - clients_6 . 2 . 13 - 1_amd64 . deb > ` _ <nl> - * ` foundationdb - server - 6 . 2 . 13 - 1_amd64 . deb < https : / / www . foundationdb . org / downloads / 6 . 2 . 13 / ubuntu / installers / foundationdb - server_6 . 2 . 13 - 1_amd64 . deb > ` _ ( depends on the clients package ) <nl> + * ` foundationdb - clients - 6 . 2 . 14 - 1_amd64 . deb < https : / / www . foundationdb . org / downloads / 6 . 2 . 14 / ubuntu / installers / foundationdb - clients_6 . 2 . 14 - 1_amd64 . deb > ` _ <nl> + * ` foundationdb - server - 6 . 2 . 14 - 1_amd64 . deb < https : / / www . foundationdb . org / downloads / 6 . 2 . 14 / ubuntu / installers / foundationdb - server_6 . 2 . 14 - 1_amd64 . deb > ` _ ( depends on the clients package ) <nl> <nl> RHEL / CentOS EL6 <nl> mmmmmmmmmmmmmmm <nl> <nl> The RHEL / CentOS EL6 packages are supported on 64 - bit RHEL / CentOS 6 . x . <nl> <nl> - * ` foundationdb - clients - 6 . 2 . 13 - 1 . el6 . x86_64 . rpm < https : / / www . foundationdb . org / downloads / 6 . 2 . 13 / rhel6 / installers / foundationdb - clients - 6 . 2 . 13 - 1 . el6 . x86_64 . rpm > ` _ <nl> - * ` foundationdb - server - 6 . 2 . 13 - 1 . el6 . x86_64 . rpm < https : / / www . foundationdb . org / downloads / 6 . 2 . 13 / rhel6 / installers / foundationdb - server - 6 . 2 . 13 - 1 . el6 . x86_64 . rpm > ` _ ( depends on the clients package ) <nl> + * ` foundationdb - clients - 6 . 2 . 14 - 1 . el6 . x86_64 . rpm < https : / / www . foundationdb . org / downloads / 6 . 2 . 14 / rhel6 / installers / foundationdb - clients - 6 . 2 . 14 - 1 . el6 . x86_64 . rpm > ` _ <nl> + * ` foundationdb - server - 6 . 2 . 14 - 1 . el6 . x86_64 . rpm < https : / / www . foundationdb . org / downloads / 6 . 2 . 14 / rhel6 / installers / foundationdb - server - 6 . 2 . 14 - 1 . el6 . x86_64 . rpm > ` _ ( depends on the clients package ) <nl> <nl> RHEL / CentOS EL7 <nl> mmmmmmmmmmmmmmm <nl> <nl> The RHEL / CentOS EL7 packages are supported on 64 - bit RHEL / CentOS 7 . x . <nl> <nl> - * ` foundationdb - clients - 6 . 2 . 13 - 1 . el7 . x86_64 . rpm < https : / / www . foundationdb . org / downloads / 6 . 2 . 13 / rhel7 / installers / foundationdb - clients - 6 . 2 . 13 - 1 . el7 . x86_64 . rpm > ` _ <nl> - * ` foundationdb - server - 6 . 2 . 13 - 1 . el7 . x86_64 . rpm < https : / / www . foundationdb . org / downloads / 6 . 2 . 13 / rhel7 / installers / foundationdb - server - 6 . 2 . 13 - 1 . el7 . x86_64 . rpm > ` _ ( depends on the clients package ) <nl> + * ` foundationdb - clients - 6 . 2 . 14 - 1 . el7 . x86_64 . rpm < https : / / www . foundationdb . org / downloads / 6 . 2 . 14 / rhel7 / installers / foundationdb - clients - 6 . 2 . 14 - 1 . el7 . x86_64 . rpm > ` _ <nl> + * ` foundationdb - server - 6 . 2 . 14 - 1 . el7 . x86_64 . rpm < https : / / www . foundationdb . org / downloads / 6 . 2 . 14 / rhel7 / installers / foundationdb - server - 6 . 2 . 14 - 1 . el7 . x86_64 . rpm > ` _ ( depends on the clients package ) <nl> <nl> Windows <nl> mmmmmm - <nl> <nl> The Windows installer is supported on 64 - bit Windows XP and later . It includes the client and ( optionally ) the server . <nl> <nl> - * ` foundationdb - 6 . 2 . 13 - x64 . msi < https : / / www . foundationdb . org / downloads / 6 . 2 . 13 / windows / installers / foundationdb - 6 . 2 . 13 - x64 . msi > ` _ <nl> + * ` foundationdb - 6 . 2 . 14 - x64 . msi < https : / / www . foundationdb . org / downloads / 6 . 2 . 14 / windows / installers / foundationdb - 6 . 2 . 14 - x64 . msi > ` _ <nl> <nl> API Language Bindings <nl> = = = = = = = = = = = = = = = = = = = = = <nl> On macOS and Windows , the FoundationDB Python API bindings are installed as part <nl> <nl> If you need to use the FoundationDB Python API from other Python installations or paths , download the Python package : <nl> <nl> - * ` foundationdb - 6 . 2 . 13 . tar . gz < https : / / www . foundationdb . org / downloads / 6 . 2 . 13 / bindings / python / foundationdb - 6 . 2 . 13 . tar . gz > ` _ <nl> + * ` foundationdb - 6 . 2 . 14 . tar . gz < https : / / www . foundationdb . org / downloads / 6 . 2 . 14 / bindings / python / foundationdb - 6 . 2 . 14 . tar . gz > ` _ <nl> <nl> Ruby 1 . 9 . 3 / 2 . 0 . 0 + <nl> mmmmmmmmmmmmmmm - - <nl> <nl> - * ` fdb - 6 . 2 . 13 . gem < https : / / www . foundationdb . org / downloads / 6 . 2 . 13 / bindings / ruby / fdb - 6 . 2 . 13 . gem > ` _ <nl> + * ` fdb - 6 . 2 . 14 . gem < https : / / www . foundationdb . org / downloads / 6 . 2 . 14 / bindings / ruby / fdb - 6 . 2 . 14 . gem > ` _ <nl> <nl> Java 8 + <nl> mmmmmm - <nl> <nl> - * ` fdb - java - 6 . 2 . 13 . jar < https : / / www . foundationdb . org / downloads / 6 . 2 . 13 / bindings / java / fdb - java - 6 . 2 . 13 . jar > ` _ <nl> - * ` fdb - java - 6 . 2 . 13 - javadoc . jar < https : / / www . foundationdb . org / downloads / 6 . 2 . 13 / bindings / java / fdb - java - 6 . 2 . 13 - javadoc . jar > ` _ <nl> + * ` fdb - java - 6 . 2 . 14 . jar < https : / / www . foundationdb . org / downloads / 6 . 2 . 14 / bindings / java / fdb - java - 6 . 2 . 14 . jar > ` _ <nl> + * ` fdb - java - 6 . 2 . 14 - javadoc . jar < https : / / www . foundationdb . org / downloads / 6 . 2 . 14 / bindings / java / fdb - java - 6 . 2 . 14 - javadoc . jar > ` _ <nl> <nl> Go 1 . 11 + <nl> mmmmmm - - <nl> mmm a / documentation / sphinx / source / release - notes . rst <nl> ppp b / documentation / sphinx / source / release - notes . rst <nl> <nl> Release Notes <nl> # # # # # # # # # # # # # <nl> <nl> + 6 . 2 . 14 <nl> + = = = = = = <nl> + <nl> + Fixes <nl> + mmm - - <nl> + <nl> + * Data distribution was prioritizing shard merges too highly . ` ( PR # 2562 ) < https : / / github . com / apple / foundationdb / pull / 2562 > ` _ . <nl> + * Status would incorrectly mark clusters as having no fault tolerance . ` ( PR # 2562 ) < https : / / github . com / apple / foundationdb / pull / 2562 > ` _ . <nl> + * A proxy could run out of memory if disconnected from the cluster for too long . ` ( PR # 2562 ) < https : / / github . com / apple / foundationdb / pull / 2562 > ` _ . <nl> + <nl> 6 . 2 . 13 <nl> = = = = = = <nl> <nl>
updated documentation for 6 . 2 . 14
apple/foundationdb
f959ed038f9e9e2a0257712e1c8f345958392d4e
2020-01-20T22:20:22Z
mmm a / bazel / repositories . bzl <nl> ppp b / bazel / repositories . bzl <nl> load ( " @ bazel_tools / / tools / build_defs / repo : http . bzl " , " http_archive " ) <nl> load ( " : dev_binding . bzl " , " envoy_dev_binding " ) <nl> load ( " : genrule_repository . bzl " , " genrule_repository " ) <nl> load ( " @ envoy_api / / bazel : envoy_http_archive . bzl " , " envoy_http_archive " ) <nl> - load ( " : repository_locations . bzl " , " DEPENDENCY_REPOSITORIES " , " USE_CATEGORIES " ) <nl> + load ( " : repository_locations . bzl " , " DEPENDENCY_ANNOTATIONS " , " DEPENDENCY_REPOSITORIES " , " USE_CATEGORIES " , " USE_CATEGORIES_WITH_CPE_OPTIONAL " ) <nl> load ( " @ com_google_googleapis / / : repository_rules . bzl " , " switched_rules_by_language " ) <nl> <nl> PPC_SKIP_TARGETS = [ " envoy . filters . http . lua " ] <nl> def _repository_locations ( ) : <nl> if " use_category " not in location : <nl> fail ( " The ' use_category ' attribute must be defined for external dependecy " + str ( location [ " urls " ] ) ) <nl> <nl> + if " cpe " not in location and not [ category for category in USE_CATEGORIES_WITH_CPE_OPTIONAL if category in location [ " use_category " ] ] : <nl> + fail ( " The ' cpe ' attribute must be defined for external dependecy " + str ( location [ " urls " ] ) ) <nl> + <nl> for category in location [ " use_category " ] : <nl> if category not in USE_CATEGORIES : <nl> fail ( " Unknown use_category value ' " + category + " ' for dependecy " + str ( location [ " urls " ] ) ) <nl> REPOSITORY_LOCATIONS = _repository_locations ( ) <nl> # See repository_locations . bzl for the list of annotation attributes . <nl> def _get_location ( dependency ) : <nl> stripped = dict ( REPOSITORY_LOCATIONS [ dependency ] ) <nl> - stripped . pop ( " use_category " , None ) <nl> + for attribute in DEPENDENCY_ANNOTATIONS : <nl> + stripped . pop ( attribute , None ) <nl> return stripped <nl> <nl> def _repository_impl ( name , * * kwargs ) : <nl> mmm a / bazel / repository_locations . bzl <nl> ppp b / bazel / repository_locations . bzl <nl> <nl> + # Validation of content in this file is done on the bazel / repositories . bzl file to make it free of bazel <nl> + # constructs . This is to allow this file to be loaded into Python based build and maintenance tools . <nl> + <nl> # Envoy dependencies may be annotated with the following attributes : <nl> - # <nl> - # use_category - list of the categories describing how the dependency is being used . This attribute is used <nl> - # for automatic tracking of security posture of Envoy ' s dependencies . <nl> - # Possible values are documented in the USE_CATEGORIES list . <nl> + DEPENDENCY_ANNOTATIONS = [ <nl> + # List of the categories describing how the dependency is being used . This attribute is used <nl> + # for automatic tracking of security posture of Envoy ' s dependencies . <nl> + # Possible values are documented in the USE_CATEGORIES list below . <nl> + # This attribute is mandatory for each dependecy . <nl> + " use_category " , <nl> + <nl> + # Attribute specifying CPE ( Common Platform Enumeration , see https : / / nvd . nist . gov / products / cpe ) ID <nl> + # of the dependency . The ID may be in v2 . 3 or v2 . 2 format , although v2 . 3 is prefferred . See <nl> + # https : / / nvd . nist . gov / products / cpe for CPE format . Use single wildcard ' * ' for version and vector elements <nl> + # i . e . ' cpe : 2 . 3 : a : nghttp2 : nghttp2 : * ' . Use " N / A " for dependencies without CPE assigned . <nl> + # This attribute is optional for components with use categories listed in the <nl> + # USE_CATEGORIES_WITH_CPE_OPTIONAL <nl> + " cpe " , <nl> + ] <nl> <nl> # NOTE : If a dependency use case is either dataplane or controlplane , the other uses are not needed <nl> # to be declared . <nl> USE_CATEGORIES = [ <nl> " other " , <nl> ] <nl> <nl> + # Components with these use categories are not required to specify the ' cpe ' annotation . <nl> + USE_CATEGORIES_WITH_CPE_OPTIONAL = [ " build " , " test " , " other " ] <nl> + <nl> DEPENDENCY_REPOSITORIES = dict ( <nl> bazel_compdb = dict ( <nl> sha256 = " 87e376a685eacfb27bcc0d0cdf5ded1d0b99d868390ac50f452ba6ed781caffe " , <nl> DEPENDENCY_REPOSITORIES = dict ( <nl> # chromium - 81 . 0 . 4044 . 69 <nl> urls = [ " https : / / github . com / google / boringssl / archive / 1c2769383f027befac5b75b6cedd25daf3bf4dcf . tar . gz " ] , <nl> use_category = [ " dataplane " ] , <nl> + cpe = " N / A " , <nl> ) , <nl> boringssl_fips = dict ( <nl> sha256 = " b12ad676ee533824f698741bd127f6fbc82c46344398a6d78d25e62c6c418c73 " , <nl> # fips - 20180730 <nl> urls = [ " https : / / commondatastorage . googleapis . com / chromium - boringssl - docs / fips / boringssl - 66005f41fbc3529ffe8d007708756720529da20d . tar . xz " ] , <nl> use_category = [ " dataplane " ] , <nl> + cpe = " N / A " , <nl> ) , <nl> com_google_absl = dict ( <nl> sha256 = " 14ee08e2089c2a9b6bf27e1d10abc5629c69c4d0bab4b78ec5b65a29ea1c2af7 " , <nl> DEPENDENCY_REPOSITORIES = dict ( <nl> # 2020 - 03 - 05 <nl> urls = [ " https : / / github . com / abseil / abseil - cpp / archive / cf3a1998e9d41709d4141e2f13375993cba1130e . tar . gz " ] , <nl> use_category = [ " dataplane " , " controlplane " ] , <nl> + cpe = " N / A " , <nl> ) , <nl> com_github_apache_thrift = dict ( <nl> sha256 = " 7d59ac4fdcb2c58037ebd4a9da5f9a49e3e034bf75b3f26d9fe48ba3d8806e6b " , <nl> strip_prefix = " thrift - 0 . 11 . 0 " , <nl> urls = [ " https : / / files . pythonhosted . org / packages / c6 / b4 / 510617906f8e0c5660e7d96fbc5585113f83ad547a3989b80297ac72a74c / thrift - 0 . 11 . 0 . tar . gz " ] , <nl> use_category = [ " dataplane " ] , <nl> + cpe = " cpe : 2 . 3 : a : apache : thrift : * " , <nl> ) , <nl> com_github_c_ares_c_ares = dict ( <nl> sha256 = " bbaab13d6ad399a278d476f533e4d88a7ec7d729507348bb9c2e3b207ba4c606 " , <nl> DEPENDENCY_REPOSITORIES = dict ( <nl> # TODO ( crazyxy ) : Update to release - 1 . 16 . 0 when it is released . <nl> urls = [ " https : / / github . com / c - ares / c - ares / archive / d7e070e7283f822b1d2787903cce3615536c5610 . tar . gz " ] , <nl> use_category = [ " dataplane " ] , <nl> + cpe = " cpe : 2 . 3 : a : c - ares_project : c - ares : * " , <nl> ) , <nl> com_github_circonus_labs_libcircllhist = dict ( <nl> sha256 = " 8165aa25e529d7d4b9ae849d3bf30371255a99d6db0421516abcff23214cdc2c " , <nl> DEPENDENCY_REPOSITORIES = dict ( <nl> # 2019 - 02 - 11 <nl> urls = [ " https : / / github . com / circonus - labs / libcircllhist / archive / 63a16dd6f2fc7bc841bb17ff92be8318df60e2e1 . tar . gz " ] , <nl> use_category = [ " observability " ] , <nl> + cpe = " N / A " , <nl> ) , <nl> com_github_cyan4973_xxhash = dict ( <nl> sha256 = " 952ebbf5b11fbf59ae5d760a562d1e9112278f244340ad7714e8556cbe54f7f7 " , <nl> strip_prefix = " xxHash - 0 . 7 . 3 " , <nl> urls = [ " https : / / github . com / Cyan4973 / xxHash / archive / v0 . 7 . 3 . tar . gz " ] , <nl> use_category = [ " dataplane " , " controlplane " ] , <nl> + cpe = " N / A " , <nl> ) , <nl> com_github_envoyproxy_sqlparser = dict ( <nl> sha256 = " b2d3882698cf85b64c87121e208ce0b24d5fe2a00a5d058cf4571f1b25b45403 " , <nl> DEPENDENCY_REPOSITORIES = dict ( <nl> # 2020 - 01 - 10 <nl> urls = [ " https : / / github . com / envoyproxy / sql - parser / archive / b14d010afd4313f2372a1cc96aa2327e674cc798 . tar . gz " ] , <nl> use_category = [ " dataplane " ] , <nl> + cpe = " N / A " , <nl> ) , <nl> com_github_mirror_tclap = dict ( <nl> sha256 = " f0ede0721dddbb5eba3a47385a6e8681b14f155e1129dd39d1a959411935098f " , <nl> DEPENDENCY_REPOSITORIES = dict ( <nl> strip_prefix = " fmt - 6 . 0 . 0 " , <nl> urls = [ " https : / / github . com / fmtlib / fmt / archive / 6 . 0 . 0 . tar . gz " ] , <nl> use_category = [ " observability " ] , <nl> + cpe = " N / A " , <nl> ) , <nl> com_github_gabime_spdlog = dict ( <nl> sha256 = " afd18f62d1bc466c60bef088e6b637b0284be88c515cedc59ad4554150af6043 " , <nl> strip_prefix = " spdlog - 1 . 4 . 0 " , <nl> urls = [ " https : / / github . com / gabime / spdlog / archive / v1 . 4 . 0 . tar . gz " ] , <nl> use_category = [ " observability " ] , <nl> + cpe = " N / A " , <nl> ) , <nl> com_github_google_libprotobuf_mutator = dict ( <nl> sha256 = " " , <nl> DEPENDENCY_REPOSITORIES = dict ( <nl> strip_prefix = " grpc - d8f4928fa779f6005a7fe55a176bdb373b0f910f " , <nl> urls = [ " https : / / github . com / grpc / grpc / archive / d8f4928fa779f6005a7fe55a176bdb373b0f910f . tar . gz " ] , <nl> use_category = [ " dataplane " , " controlplane " ] , <nl> + cpe = " cpe : 2 . 3 : a : grpc : grpc : * " , <nl> ) , <nl> com_github_luajit_luajit = dict ( <nl> sha256 = " 409f7fe570d3c16558e594421c47bdd130238323c9d6fd6c83dedd2aaeb082a8 " , <nl> strip_prefix = " LuaJIT - 2 . 1 . 0 - beta3 " , <nl> urls = [ " https : / / github . com / LuaJIT / LuaJIT / archive / v2 . 1 . 0 - beta3 . tar . gz " ] , <nl> use_category = [ " dataplane " ] , <nl> + cpe = " N / A " , <nl> ) , <nl> com_github_moonjit_moonjit = dict ( <nl> sha256 = " 83deb2c880488dfe7dd8ebf09e3b1e7613ef4b8420de53de6f712f01aabca2b6 " , <nl> strip_prefix = " moonjit - 2 . 2 . 0 " , <nl> urls = [ " https : / / github . com / moonjit / moonjit / archive / 2 . 2 . 0 . tar . gz " ] , <nl> use_category = [ " dataplane " ] , <nl> + cpe = " N / A " , <nl> ) , <nl> com_github_nghttp2_nghttp2 = dict ( <nl> sha256 = " eb9d9046495a49dd40c7ef5d6c9907b51e5a6b320ea6e2add11eb8b52c982c47 " , <nl> strip_prefix = " nghttp2 - 1 . 40 . 0 " , <nl> urls = [ " https : / / github . com / nghttp2 / nghttp2 / releases / download / v1 . 40 . 0 / nghttp2 - 1 . 40 . 0 . tar . gz " ] , <nl> use_category = [ " dataplane " ] , <nl> + cpe = " cpe : 2 . 3 : a : nghttp2 : nghttp2 : * " , <nl> ) , <nl> io_opentracing_cpp = dict ( <nl> sha256 = " 015c4187f7a6426a2b5196f0ccd982aa87f010cf61f507ae3ce5c90523f92301 " , <nl> strip_prefix = " opentracing - cpp - 1 . 5 . 1 " , <nl> urls = [ " https : / / github . com / opentracing / opentracing - cpp / archive / v1 . 5 . 1 . tar . gz " ] , <nl> use_category = [ " observability " ] , <nl> + cpe = " N / A " , <nl> ) , <nl> com_lightstep_tracer_cpp = dict ( <nl> sha256 = " 0e99716598c010e56bc427ea3482be5ad2c534be8b039d172564deec1264a213 " , <nl> DEPENDENCY_REPOSITORIES = dict ( <nl> # 2020 - 03 - 24 <nl> urls = [ " https : / / github . com / lightstep / lightstep - tracer - cpp / archive / 3efe2372ee3d7c2138d6b26e542d757494a7938d . tar . gz " ] , <nl> use_category = [ " observability " ] , <nl> + cpe = " N / A " , <nl> ) , <nl> com_github_datadog_dd_opentracing_cpp = dict ( <nl> sha256 = " 6dc1088ab7f788b6c849fbaa6300517c8fdf88991a70b778be79c284c36857bf " , <nl> strip_prefix = " dd - opentracing - cpp - 1 . 1 . 3 " , <nl> urls = [ " https : / / github . com / DataDog / dd - opentracing - cpp / archive / v1 . 1 . 3 . tar . gz " ] , <nl> use_category = [ " observability " ] , <nl> + cpe = " N / A " , <nl> ) , <nl> com_github_google_benchmark = dict ( <nl> sha256 = " 3c6a165b6ecc948967a1ead710d4a181d7b0fbcaa183ef7ea84604994966221a " , <nl> DEPENDENCY_REPOSITORIES = dict ( <nl> # 2019 - 07 - 02 <nl> urls = [ " https : / / github . com / libevent / libevent / archive / 0d7d85c2083f7a4c9efe01c061486f332b576d28 . tar . gz " ] , <nl> use_category = [ " dataplane " ] , <nl> + cpe = " cpe : 2 . 3 : a : libevent_project : libevent : * " , <nl> ) , <nl> net_zlib = dict ( <nl> # Use the dev branch of zlib to resolve fuzz bugs and out of bound <nl> DEPENDENCY_REPOSITORIES = dict ( <nl> # 2019 - 04 - 14 development branch <nl> urls = [ " https : / / github . com / madler / zlib / archive / 79baebe50e4d6b73ae1f8b603f0ef41300110aa3 . tar . gz " ] , <nl> use_category = [ " dataplane " ] , <nl> + cpe = " cpe : 2 . 3 : a : gnu : zlib : * " , <nl> ) , <nl> com_github_jbeder_yaml_cpp = dict ( <nl> sha256 = " 77ea1b90b3718aa0c324207cb29418f5bced2354c2e483a9523d98c3460af1ed " , <nl> strip_prefix = " yaml - cpp - yaml - cpp - 0 . 6 . 3 " , <nl> urls = [ " https : / / github . com / jbeder / yaml - cpp / archive / yaml - cpp - 0 . 6 . 3 . tar . gz " ] , <nl> use_category = [ " dataplane " ] , <nl> + cpe = " N / A " , <nl> ) , <nl> com_github_msgpack_msgpack_c = dict ( <nl> sha256 = " 433cbcd741e1813db9ae4b2e192b83ac7b1d2dd7968a3e11470eacc6f4ab58d2 " , <nl> strip_prefix = " msgpack - 3 . 2 . 1 " , <nl> urls = [ " https : / / github . com / msgpack / msgpack - c / releases / download / cpp - 3 . 2 . 1 / msgpack - 3 . 2 . 1 . tar . gz " ] , <nl> use_category = [ " observability " ] , <nl> + cpe = " N / A " , <nl> ) , <nl> com_github_google_jwt_verify = dict ( <nl> sha256 = " d422a6eadd4bcdd0f9b122cd843a4015f8b18aebea6e1deb004bd4d401a8ef92 " , <nl> DEPENDENCY_REPOSITORIES = dict ( <nl> # 2020 - 02 - 11 <nl> urls = [ " https : / / github . com / google / jwt_verify_lib / archive / 40e2cc938f4bcd059a97dc6c73f59ecfa5a71bac . tar . gz " ] , <nl> use_category = [ " dataplane " ] , <nl> + cpe = " N / A " , <nl> ) , <nl> com_github_nodejs_http_parser = dict ( <nl> sha256 = " 8fa0ab8770fd8425a9b431fdbf91623c4d7a9cdb842b9339289bd2b0b01b0d3d " , <nl> strip_prefix = " http - parser - 2 . 9 . 3 " , <nl> urls = [ " https : / / github . com / nodejs / http - parser / archive / v2 . 9 . 3 . tar . gz " ] , <nl> use_category = [ " dataplane " ] , <nl> + cpe = " N / A " , <nl> ) , <nl> com_github_pallets_jinja = dict ( <nl> sha256 = " db49236731373e4f3118af880eb91bb0aa6978bc0cf8b35760f6a026f1a9ffc4 " , <nl> DEPENDENCY_REPOSITORIES = dict ( <nl> # Changes through 2019 - 12 - 02 <nl> urls = [ " https : / / github . com / Tencent / rapidjson / archive / dfbe1db9da455552f7a9ad5d2aea17dd9d832ac1 . tar . gz " ] , <nl> use_category = [ " dataplane " ] , <nl> + cpe = " cpe : 2 . 3 : a : tencent : rapidjson : * " , <nl> ) , <nl> com_github_twitter_common_lang = dict ( <nl> sha256 = " 56d1d266fd4767941d11c27061a57bc1266a3342e551bde3780f9e9eb5ad0ed1 " , <nl> strip_prefix = " twitter . common . lang - 0 . 3 . 9 / src " , <nl> urls = [ " https : / / files . pythonhosted . org / packages / 08 / bc / d6409a813a9dccd4920a6262eb6e5889e90381453a5f58938ba4cf1d9420 / twitter . common . lang - 0 . 3 . 9 . tar . gz " ] , <nl> use_category = [ " dataplane " ] , <nl> + cpe = " N / A " , <nl> ) , <nl> com_github_twitter_common_rpc = dict ( <nl> sha256 = " 0792b63fb2fb32d970c2e9a409d3d00633190a22eb185145fe3d9067fdaa4514 " , <nl> strip_prefix = " twitter . common . rpc - 0 . 3 . 9 / src " , <nl> urls = [ " https : / / files . pythonhosted . org / packages / be / 97 / f5f701b703d0f25fbf148992cd58d55b4d08d3db785aad209255ee67e2d0 / twitter . common . rpc - 0 . 3 . 9 . tar . gz " ] , <nl> use_category = [ " dataplane " ] , <nl> + cpe = " N / A " , <nl> ) , <nl> com_github_twitter_common_finagle_thrift = dict ( <nl> sha256 = " 1e3a57d11f94f58745e6b83348ecd4fa74194618704f45444a15bc391fde497a " , <nl> strip_prefix = " twitter . common . finagle - thrift - 0 . 3 . 9 / src " , <nl> urls = [ " https : / / files . pythonhosted . org / packages / f9 / e7 / 4f80d582578f8489226370762d2cf6bc9381175d1929eba1754e03f70708 / twitter . common . finagle - thrift - 0 . 3 . 9 . tar . gz " ] , <nl> use_category = [ " dataplane " ] , <nl> + cpe = " N / A " , <nl> ) , <nl> com_google_googletest = dict ( <nl> sha256 = " 9dc9157a9a1551ec7a7e43daea9a694a0bb5fb8bec81235d8a1e6ef64c716dcb " , <nl> DEPENDENCY_REPOSITORIES = dict ( <nl> strip_prefix = " protobuf - 3 . 10 . 1 " , <nl> urls = [ " https : / / github . com / protocolbuffers / protobuf / releases / download / v3 . 10 . 1 / protobuf - all - 3 . 10 . 1 . tar . gz " ] , <nl> use_category = [ " dataplane " , " controlplane " ] , <nl> + cpe = " N / A " , <nl> ) , <nl> grpc_httpjson_transcoding = dict ( <nl> sha256 = " 62c8cb5ea2cca1142cde9d4a0778c52c6022345c3268c60ef81666946b958ad5 " , <nl> DEPENDENCY_REPOSITORIES = dict ( <nl> # 2020 - 03 - 02 <nl> urls = [ " https : / / github . com / grpc - ecosystem / grpc - httpjson - transcoding / archive / faf8af1e9788cd4385b94c8f85edab5ea5d4b2d6 . tar . gz " ] , <nl> use_category = [ " dataplane " ] , <nl> + cpe = " N / A " , <nl> ) , <nl> io_bazel_rules_go = dict ( <nl> sha256 = " e88471aea3a3a4f19ec1310a55ba94772d087e9ce46e41ae38ecebe17935de7b " , <nl> DEPENDENCY_REPOSITORIES = dict ( <nl> # 2020 - 03 - 24 <nl> urls = [ " https : / / github . com / census - instrumentation / opencensus - cpp / archive / 04ed0211931f12b03c1a76b3907248ca4db7bc90 . tar . gz " ] , <nl> use_category = [ " observability " ] , <nl> + cpe = " N / A " , <nl> ) , <nl> com_github_curl = dict ( <nl> sha256 = " 01ae0c123dee45b01bbaef94c0bc00ed2aec89cb2ee0fd598e0d302a6b5e0a98 " , <nl> strip_prefix = " curl - 7 . 69 . 1 " , <nl> urls = [ " https : / / github . com / curl / curl / releases / download / curl - 7_69_1 / curl - 7 . 69 . 1 . tar . gz " ] , <nl> use_category = [ " dataplane " ] , <nl> + cpe = " N / A " , <nl> ) , <nl> com_googlesource_chromium_v8 = dict ( <nl> # This archive was created using https : / / storage . googleapis . com / envoyproxy - wee8 / wee8 - archive . sh <nl> DEPENDENCY_REPOSITORIES = dict ( <nl> sha256 = " 03ff00e41cf259db473dfade9548493e4a2372c0b701a66cd7ff76215bd55a64 " , <nl> urls = [ " https : / / storage . googleapis . com / envoyproxy - wee8 / wee8 - 8 . 1 . 307 . 28 . tar . gz " ] , <nl> use_category = [ " dataplane " ] , <nl> + cpe = " N / A " , <nl> ) , <nl> com_googlesource_quiche = dict ( <nl> # Static snapshot of https : / / quiche . googlesource . com / quiche / + archive / 41c9fdead26b31deefae3c325a2cf1a873688ba3 . tar . gz <nl> sha256 = " 75af53154402e1654cfd32d8aaeed5fab4dbb79d3cab8c9866019d5369c1889e " , <nl> urls = [ " https : / / storage . googleapis . com / quiche - envoy - integration / 41c9fdead26b31deefae3c325a2cf1a873688ba3 . tar . gz " ] , <nl> use_category = [ " dataplane " ] , <nl> + cpe = " N / A " , <nl> ) , <nl> com_googlesource_googleurl = dict ( <nl> # Static snapshot of https : / / quiche . googlesource . com / quiche / + archive / googleurl_dbf5ad147f60afc125e99db7549402af49a5eae8 . tar . gz <nl> sha256 = " b40cd22cadba577b7281a76db66f6a66dd744edbad8cc2c861c2c976ef721e4d " , <nl> urls = [ " https : / / storage . googleapis . com / quiche - envoy - integration / googleurl_dbf5ad147f60afc125e99db7549402af49a5eae8 . tar . gz " ] , <nl> use_category = [ " dataplane " ] , <nl> + cpe = " N / A " , <nl> ) , <nl> com_google_cel_cpp = dict ( <nl> sha256 = " 326ec397b55e39f48bd5380ccded1af5b04653ee96e769cd4d694f9a3bacef50 " , <nl> DEPENDENCY_REPOSITORIES = dict ( <nl> # 2020 - 02 - 26 <nl> urls = [ " https : / / github . com / google / cel - cpp / archive / 80e1cca533190d537a780ad007e8db64164c582e . tar . gz " ] , <nl> use_category = [ " dataplane " ] , <nl> + cpe = " N / A " , <nl> ) , <nl> com_googlesource_code_re2 = dict ( <nl> sha256 = " 04ee2aaebaa5038554683329afc494e684c30f82f2a1e47eb62450e59338f84d " , <nl> strip_prefix = " re2 - 2020 - 03 - 03 " , <nl> urls = [ " https : / / github . com / google / re2 / archive / 2020 - 03 - 03 . tar . gz " ] , <nl> use_category = [ " dataplane " ] , <nl> + cpe = " N / A " , <nl> ) , <nl> # Included to access FuzzedDataProvider . h . This is compiler agnostic but <nl> # provided as part of the compiler - rt source distribution . We can ' t use the <nl> DEPENDENCY_REPOSITORIES = dict ( <nl> # 2019 - 11 - 19 <nl> urls = [ " https : / / github . com / protocolbuffers / upb / archive / 8a3ae1ef3e3e3f26b45dec735c5776737fc7247f . tar . gz " ] , <nl> use_category = [ " dataplane " , " controlplane " ] , <nl> + cpe = " N / A " , <nl> ) , <nl> kafka_source = dict ( <nl> sha256 = " e7b748a62e432b5770db6dbb3b034c68c0ea212812cb51603ee7f3a8a35f06be " , <nl> strip_prefix = " kafka - 2 . 4 . 0 / clients / src / main / resources / common / message " , <nl> urls = [ " https : / / github . com / apache / kafka / archive / 2 . 4 . 0 . zip " ] , <nl> use_category = [ " dataplane " ] , <nl> + cpe = " cpe : 2 . 3 : a : apache : kafka : * " , <nl> ) , <nl> kafka_server_binary = dict ( <nl> sha256 = " b9582bab0c3e8d131953b1afa72d6885ca1caae0061c2623071e7f396f2ccfee " , <nl>
build : Add NIST CPE IDs of envoy dependencies ( )
envoyproxy/envoy
b204cdbaac95cf41e0bd8471e1d4bbb45542ffdc
2020-05-05T01:20:27Z
mmm a / common / params . py <nl> ppp b / common / params . py <nl> class UnknownKeyName ( Exception ) : <nl> " AthenadPid " : [ TxType . PERSISTENT ] , <nl> " CalibrationParams " : [ TxType . PERSISTENT ] , <nl> " CarParams " : [ TxType . CLEAR_ON_MANAGER_START , TxType . CLEAR_ON_PANDA_DISCONNECT ] , <nl> + " CarParamsCache " : [ TxType . CLEAR_ON_MANAGER_START , TxType . CLEAR_ON_PANDA_DISCONNECT ] , <nl> " CarVin " : [ TxType . CLEAR_ON_MANAGER_START , TxType . CLEAR_ON_PANDA_DISCONNECT ] , <nl> " CommunityFeaturesToggle " : [ TxType . PERSISTENT ] , <nl> " CompletedTrainingVersion " : [ TxType . PERSISTENT ] , <nl> mmm a / selfdrive / car / car_helpers . py <nl> ppp b / selfdrive / car / car_helpers . py <nl> <nl> from common . basedir import BASEDIR <nl> from selfdrive . car . fingerprints import eliminate_incompatible_cars , all_known_cars <nl> from selfdrive . car . vin import get_vin , VIN_UNKNOWN <nl> - from selfdrive . car . fw_versions import get_fw_versions <nl> + from selfdrive . car . fw_versions import get_fw_versions , match_fw_to_car <nl> from selfdrive . swaglog import cloudlog <nl> import cereal . messaging as messaging <nl> from selfdrive . car import gen_empty_fingerprint <nl> def fingerprint ( logcan , sendcan , has_relay ) : <nl> if has_relay : <nl> # Vin query only reliably works thorugh OBDII <nl> bus = 1 <nl> - addr , vin = get_vin ( logcan , sendcan , bus ) <nl> - fw_candidates , car_fw = get_fw_versions ( logcan , sendcan , bus ) <nl> + <nl> + cached_params = Params ( ) . get ( " CarParamsCache " ) <nl> + if cached_params is not None : <nl> + cloudlog . warning ( " Using cached CarParams " ) <nl> + CP = car . CarParams . from_bytes ( cached_params ) <nl> + vin = CP . carVin <nl> + car_fw = list ( CP . carFw ) <nl> + else : <nl> + _ , vin = get_vin ( logcan , sendcan , bus ) <nl> + car_fw = get_fw_versions ( logcan , sendcan , bus ) <nl> + <nl> + fw_candidates = match_fw_to_car ( car_fw ) <nl> else : <nl> vin = VIN_UNKNOWN <nl> fw_candidates , car_fw = set ( ) , [ ] <nl> mmm a / selfdrive / car / fw_versions . py <nl> ppp b / selfdrive / car / fw_versions . py <nl> def match_fw_to_car ( fw_versions ) : <nl> candidates = FW_VERSIONS <nl> invalid = [ ] <nl> <nl> + fw_versions_dict = { } <nl> + for fw in fw_versions : <nl> + addr = fw . address <nl> + sub_addr = fw . subAddress if fw . subAddress ! = 0 else None <nl> + fw_versions_dict [ ( addr , sub_addr ) ] = fw . fwVersion <nl> + <nl> for candidate , fws in candidates . items ( ) : <nl> for ecu , expected_versions in fws . items ( ) : <nl> ecu_type = ecu [ 0 ] <nl> addr = ecu [ 1 : ] <nl> - <nl> - found_version = fw_versions . get ( addr , None ) <nl> + found_version = fw_versions_dict . get ( addr , None ) <nl> <nl> # TODO : RAV4 , COROLLA esp sometimes doesn ' t show up <nl> if ecu_type = = Ecu . esp and candidate in [ TOYOTA . RAV4 , TOYOTA . COROLLA ] and found_version is None : <nl> def get_fw_versions ( logcan , sendcan , bus , extra = None , timeout = 0 . 1 , debug = False , <nl> <nl> car_fw . append ( f ) <nl> <nl> - candidates = match_fw_to_car ( fw_versions ) <nl> - return candidates , car_fw <nl> + return car_fw <nl> <nl> <nl> if __name__ = = " __main__ " : <nl> def get_fw_versions ( logcan , sendcan , bus , extra = None , timeout = 0 . 1 , debug = False , <nl> import cereal . messaging as messaging <nl> from selfdrive . car . vin import get_vin <nl> <nl> - <nl> parser = argparse . ArgumentParser ( description = ' Get firmware version of ECUs ' ) <nl> parser . add_argument ( ' - - scan ' , action = ' store_true ' ) <nl> parser . add_argument ( ' - - debug ' , action = ' store_true ' ) <nl> new file mode 100644 <nl> index 0000000000 . . e69de29bb2 <nl> deleted file mode 100755 <nl> index c0ef060ac1 . . 0000000000 <nl> mmm a / selfdrive / car / tests / test_carstates . py <nl> ppp / dev / null <nl> <nl> - # ! / usr / bin / env python3 <nl> - import os <nl> - import unittest <nl> - import requests <nl> - from cereal import car <nl> - <nl> - from tools . lib . logreader import LogReader <nl> - <nl> - from opendbc . can . parser import CANParser <nl> - <nl> - from selfdrive . car . honda . values import CAR as HONDA <nl> - from selfdrive . car . honda . interface import CarInterface as HondaCarInterface <nl> - from selfdrive . car . honda . carcontroller import CarController as HondaCarController <nl> - from selfdrive . car . honda . radar_interface import RadarInterface as HondaRadarInterface <nl> - <nl> - from selfdrive . car . toyota . values import CAR as TOYOTA <nl> - from selfdrive . car . toyota . interface import CarInterface as ToyotaCarInterface <nl> - from selfdrive . car . toyota . carcontroller import CarController as ToyotaCarController <nl> - from selfdrive . car . toyota . radar_interface import RadarInterface as ToyotaRadarInterface <nl> - <nl> - BASE_URL = " https : / / commadataci . blob . core . windows . net / openpilotci / " <nl> - <nl> - def run_route ( route , car_name , CarInterface , CarController ) : <nl> - lr = LogReader ( " / tmp / " + route + " . bz2 " ) <nl> - print ( lr ) <nl> - <nl> - cps = [ ] <nl> - def CANParserHook ( dbc_name , signals , checks = None , bus = 0 , sendcan = False , tcp_addr = " 127 . 0 . 0 . 1 " , timeout = - 1 ) : <nl> - cp = CANParser ( dbc_name , signals , checks , bus , sendcan , " " , timeout ) <nl> - cps . append ( cp ) <nl> - return cp <nl> - <nl> - params = CarInterface . get_params ( car_name ) <nl> - CI = CarInterface ( params , CarController , CANParserHook ) <nl> - print ( CI ) <nl> - <nl> - i = 0 <nl> - last_monotime = 0 <nl> - for msg in lr : <nl> - if msg . which ( ) = = ' can ' : <nl> - msg_bytes = msg . as_builder ( ) . to_bytes ( ) <nl> - monotime = msg . logMonoTime <nl> - for x in cps : <nl> - x . update_string ( monotime , msg_bytes ) <nl> - <nl> - if ( monotime - last_monotime ) > 0 . 01 : <nl> - control = car . CarControl . new_message ( ) <nl> - CS = CI . update ( control ) <nl> - if i % 100 = = 0 : <nl> - print ( ' \ 033 [ 2J \ 033 [ H ' + str ( CS ) ) <nl> - last_monotime = monotime <nl> - i + = 1 <nl> - <nl> - return True <nl> - <nl> - def run_route_radar ( route , car_name , RadarInterface , CarInterface ) : <nl> - lr = LogReader ( " / tmp / " + route + " . bz2 " ) <nl> - print ( lr ) <nl> - <nl> - cps = [ ] <nl> - def CANParserHook ( dbc_name , signals , checks = None , bus = 0 , sendcan = False , tcp_addr = " 127 . 0 . 0 . 1 " , timeout = - 1 ) : <nl> - cp = CANParser ( dbc_name , signals , checks , bus , sendcan , " " , timeout ) <nl> - print ( signals ) <nl> - cps . append ( cp ) <nl> - return cp <nl> - <nl> - params = CarInterface . get_params ( car_name ) <nl> - RI = RadarInterface ( params , CANParserHook ) <nl> - <nl> - i = 0 <nl> - updated_messages = set ( ) <nl> - for msg in lr : <nl> - if msg . which ( ) = = ' can ' : <nl> - msg_bytes = msg . as_builder ( ) . to_bytes ( ) <nl> - _ , vls = cps [ 0 ] . update_string ( msg . logMonoTime , msg_bytes ) <nl> - updated_messages . update ( vls ) <nl> - if RI . trigger_msg in updated_messages : <nl> - ret = RI . _update ( updated_messages ) <nl> - if i % 10 = = 0 : <nl> - print ( ' \ 033 [ 2J \ 033 [ H ' + str ( ret ) ) <nl> - updated_messages = set ( ) <nl> - i + = 1 <nl> - <nl> - return True <nl> - <nl> - <nl> - # TODO : make this generic <nl> - class TestCarInterface ( unittest . TestCase ) : <nl> - def setUp ( self ) : <nl> - self . routes = { <nl> - HONDA . CIVIC : " b0c9d2329ad1606b | 2019 - 05 - 30 - - 20 - 23 - 57 " , <nl> - HONDA . ACCORD : " 0375fdf7b1ce594d | 2019 - 05 - 21 - - 20 - 10 - 33 " , <nl> - TOYOTA . PRIUS : " 38bfd238edecbcd7 | 2019 - 06 - 07 - - 10 - 15 - 25 " , <nl> - TOYOTA . RAV4 : " 02ec6bea180a4d36 | 2019 - 04 - 17 - - 11 - 21 - 35 " <nl> - } <nl> - <nl> - for route in self . routes . values ( ) : <nl> - route_filename = route + " . bz2 " <nl> - if not os . path . isfile ( " / tmp / " + route_filename ) : <nl> - with open ( " / tmp / " + route + " . bz2 " , " w " ) as f : <nl> - f . write ( requests . get ( BASE_URL + route_filename ) . content ) <nl> - <nl> - def test_parser_civic ( self ) : <nl> - # self . assertTrue ( run_route ( self . routes [ HONDA . CIVIC ] , HONDA . CIVIC , HondaCarInterface , HondaCarController ) ) <nl> - pass <nl> - <nl> - def test_parser_accord ( self ) : <nl> - # one honda <nl> - # self . assertTrue ( run_route ( self . routes [ HONDA . ACCORD ] , HONDA . ACCORD , HondaCarInterface , HondaCarController ) ) <nl> - pass <nl> - <nl> - def test_parser_prius ( self ) : <nl> - # self . assertTrue ( run_route ( self . routes [ TOYOTA . PRIUS ] , TOYOTA . PRIUS , ToyotaCarInterface , ToyotaCarController ) ) <nl> - pass <nl> - <nl> - def test_parser_rav4 ( self ) : <nl> - # hmm , rav4 is broken <nl> - # self . assertTrue ( run_route ( self . routes [ TOYOTA . RAV4 ] , TOYOTA . RAV4 , ToyotaCarInterface , ToyotaCarController ) ) <nl> - pass <nl> - <nl> - def test_radar_civic ( self ) : <nl> - # self . assertTrue ( run_route_radar ( self . routes [ HONDA . CIVIC ] , HONDA . CIVIC , HondaRadarInterface , HondaCarInterface ) ) <nl> - pass <nl> - <nl> - def test_radar_prius ( self ) : <nl> - self . assertTrue ( run_route_radar ( self . routes [ TOYOTA . PRIUS ] , TOYOTA . PRIUS , ToyotaRadarInterface , ToyotaCarInterface ) ) <nl> - pass <nl> - <nl> - <nl> - if __name__ = = " __main__ " : <nl> - unittest . main ( ) <nl> - <nl> new file mode 100755 <nl> index 0000000000 . . ff07f9fd7d <nl> mmm / dev / null <nl> ppp b / selfdrive / car / tests / test_fw_fingerprint . py <nl> <nl> + # ! / usr / bin / env python3 <nl> + import unittest <nl> + from cereal import car <nl> + from selfdrive . car . fw_versions import match_fw_to_car <nl> + from selfdrive . car . toyota . values import CAR as TOYOTA <nl> + <nl> + CarFw = car . CarParams . CarFw <nl> + Ecu = car . CarParams . Ecu <nl> + <nl> + <nl> + class TestFwFingerprint ( unittest . TestCase ) : <nl> + def assertFingerprints ( self , candidates , expected ) : <nl> + candidates = list ( candidates ) <nl> + self . assertEqual ( len ( candidates ) , 1 ) <nl> + self . assertEqual ( candidates [ 0 ] , TOYOTA . RAV4_TSS2 ) <nl> + <nl> + def test_rav4_tss2 ( self ) : <nl> + CP = car . CarParams . new_message ( ) <nl> + CP . carFw = [ <nl> + { " ecu " : Ecu . esp , <nl> + " fwVersion " : b " \ x01F15260R210 \ x00 \ x00 \ x00 \ x00 \ x00 \ x00 " , <nl> + " address " : 1968 , <nl> + " subAddress " : 0 } , <nl> + { " ecu " : Ecu . engine , <nl> + " fwVersion " : b " \ x028966342Y8000 \ x00 \ x00 \ x00 \ x00897CF1201001 \ x00 \ x00 \ x00 \ x00 " , <nl> + " address " : 1792 , <nl> + " subAddress " : 0 } , <nl> + { " ecu " : Ecu . eps , <nl> + " fwVersion " : b " \ x028965B0R01200 \ x00 \ x00 \ x00 \ x008965B0R02200 \ x00 \ x00 \ x00 \ x00 " , <nl> + " address " : 1953 , <nl> + " subAddress " : 0 } , <nl> + { " ecu " : Ecu . fwdRadar , <nl> + " fwVersion " : b " \ x018821F3301200 \ x00 \ x00 \ x00 \ x00 " , <nl> + " address " : 1872 , <nl> + " subAddress " : 15 } , <nl> + { " ecu " : Ecu . fwdCamera , <nl> + " fwVersion " : b " \ x028646F4203300 \ x00 \ x00 \ x00 \ x008646G26011A0 \ x00 \ x00 \ x00 \ x00 " , <nl> + " address " : 1872 , <nl> + " subAddress " : 109 } <nl> + ] <nl> + <nl> + self . assertFingerprints ( match_fw_to_car ( CP . carFw ) , TOYOTA . RAV4_TSS2 ) <nl> + <nl> + <nl> + if __name__ = = " __main__ " : <nl> + unittest . main ( ) <nl> deleted file mode 100755 <nl> index fb912ab5b4 . . 0000000000 <nl> mmm a / selfdrive / car / tests / test_honda_carcontroller . py <nl> ppp / dev / null <nl> <nl> - # ! / usr / bin / env python3 <nl> - <nl> - import unittest <nl> - from cereal import car , log <nl> - from selfdrive . car . honda . values import CAR as HONDA <nl> - from selfdrive . car . honda . carcontroller import CarController <nl> - from selfdrive . car . honda . interface import CarInterface <nl> - from common . realtime import sec_since_boot <nl> - <nl> - from selfdrive . boardd . boardd import can_list_to_can_capnp <nl> - from selfdrive . config import Conversions as CV <nl> - import cereal . messaging as messaging <nl> - from cereal . services import service_list <nl> - from opendbc . can . parser import CANParser <nl> - <nl> - import zmq <nl> - import time <nl> - import numpy as np <nl> - <nl> - <nl> - class TestHondaCarcontroller ( unittest . TestCase ) : <nl> - def test_honda_lkas_hud ( self ) : <nl> - self . longMessage = True <nl> - <nl> - sendcan = messaging . pub_sock ( ' sendcan ' ) <nl> - <nl> - car_name = HONDA . CIVIC <nl> - params = CarInterface . get_params ( car_name ) <nl> - CI = CarInterface ( params , CarController ) <nl> - <nl> - # Get parser <nl> - parser_signals = [ <nl> - ( ' SET_ME_X41 ' , ' LKAS_HUD ' , 0 ) , <nl> - ( ' SET_ME_X48 ' , ' LKAS_HUD ' , 0 ) , <nl> - ( ' STEERING_REQUIRED ' , ' LKAS_HUD ' , 0 ) , <nl> - ( ' SOLID_LANES ' , ' LKAS_HUD ' , 0 ) , <nl> - ( ' LEAD_SPEED ' , ' RADAR_HUD ' , 0 ) , <nl> - ( ' LEAD_STATE ' , ' RADAR_HUD ' , 0 ) , <nl> - ( ' LEAD_DISTANCE ' , ' RADAR_HUD ' , 0 ) , <nl> - ( ' ACC_ALERTS ' , ' RADAR_HUD ' , 0 ) , <nl> - ] <nl> - <nl> - VA = car . CarControl . HUDControl . VisualAlert <nl> - <nl> - parser = CANParser ( CI . cp . dbc_name , parser_signals , [ ] , 0 , sendcan = True , tcp_addr = " 127 . 0 . 0 . 1 " ) <nl> - time . sleep ( 0 . 2 ) # Slow joiner syndrome <nl> - <nl> - <nl> - alerts = { <nl> - VA . none : 0 , <nl> - VA . brakePressed : 10 , <nl> - VA . wrongGear : 6 , <nl> - VA . seatbeltUnbuckled : 5 , <nl> - VA . speedTooHigh : 8 , <nl> - } <nl> - <nl> - for steer_required in [ True , False ] : <nl> - for lanes in [ True , False ] : <nl> - for alert in alerts . keys ( ) : <nl> - control = car . CarControl . new_message ( ) <nl> - hud = car . CarControl . HUDControl . new_message ( ) <nl> - <nl> - control . enabled = True <nl> - <nl> - if steer_required : <nl> - hud . visualAlert = VA . steerRequired <nl> - else : <nl> - hud . visualAlert = alert <nl> - <nl> - hud . lanesVisible = lanes <nl> - control . hudControl = hud <nl> - <nl> - CI . update ( control ) <nl> - <nl> - for _ in range ( 25 ) : <nl> - can_sends = CI . apply ( control ) <nl> - sendcan . send ( can_list_to_can_capnp ( can_sends , msgtype = ' sendcan ' ) ) <nl> - <nl> - for _ in range ( 5 ) : <nl> - parser . update ( int ( sec_since_boot ( ) * 1e9 ) , False ) <nl> - time . sleep ( 0 . 01 ) <nl> - <nl> - self . assertEqual ( 0x41 , parser . vl [ ' LKAS_HUD ' ] [ ' SET_ME_X41 ' ] ) <nl> - self . assertEqual ( 0x48 , parser . vl [ ' LKAS_HUD ' ] [ ' SET_ME_X48 ' ] ) <nl> - self . assertEqual ( steer_required , parser . vl [ ' LKAS_HUD ' ] [ ' STEERING_REQUIRED ' ] ) <nl> - self . assertEqual ( lanes , parser . vl [ ' LKAS_HUD ' ] [ ' SOLID_LANES ' ] ) <nl> - <nl> - self . assertEqual ( 0x1fe , parser . vl [ ' RADAR_HUD ' ] [ ' LEAD_SPEED ' ] ) <nl> - self . assertEqual ( 0x7 , parser . vl [ ' RADAR_HUD ' ] [ ' LEAD_STATE ' ] ) <nl> - self . assertEqual ( 0x1e , parser . vl [ ' RADAR_HUD ' ] [ ' LEAD_DISTANCE ' ] ) <nl> - self . assertEqual ( alerts [ alert ] if not steer_required else 0 , parser . vl [ ' RADAR_HUD ' ] [ ' ACC_ALERTS ' ] ) <nl> - <nl> - def test_honda_ui_cruise_speed ( self ) : <nl> - self . longMessage = True <nl> - <nl> - sendcan = messaging . pub_sock ( ' sendcan ' ) <nl> - <nl> - car_name = HONDA . CIVIC <nl> - params = CarInterface . get_params ( car_name ) <nl> - CI = CarInterface ( params , CarController ) <nl> - <nl> - # Get parser <nl> - parser_signals = [ <nl> - # 780 - 0x30c <nl> - ( ' CRUISE_SPEED ' , ' ACC_HUD ' , 0 ) , <nl> - ] <nl> - parser = CANParser ( CI . cp . dbc_name , parser_signals , [ ] , 0 , sendcan = True , tcp_addr = " 127 . 0 . 0 . 1 " ) <nl> - time . sleep ( 0 . 2 ) # Slow joiner syndrome <nl> - <nl> - for cruise_speed in np . linspace ( 0 , 50 , 20 ) : <nl> - for visible in [ False , True ] : <nl> - control = car . CarControl . new_message ( ) <nl> - hud = car . CarControl . HUDControl . new_message ( ) <nl> - hud . setSpeed = float ( cruise_speed ) <nl> - hud . speedVisible = visible <nl> - control . enabled = True <nl> - control . hudControl = hud <nl> - <nl> - CI . update ( control ) <nl> - <nl> - for _ in range ( 25 ) : <nl> - can_sends = CI . apply ( control ) <nl> - sendcan . send ( can_list_to_can_capnp ( can_sends , msgtype = ' sendcan ' ) ) <nl> - <nl> - for _ in range ( 5 ) : <nl> - parser . update ( int ( sec_since_boot ( ) * 1e9 ) , False ) <nl> - time . sleep ( 0 . 01 ) <nl> - <nl> - expected_cruise_speed = round ( cruise_speed * CV . MS_TO_KPH ) <nl> - if not visible : <nl> - expected_cruise_speed = 255 <nl> - <nl> - self . assertAlmostEqual ( parser . vl [ ' ACC_HUD ' ] [ ' CRUISE_SPEED ' ] , expected_cruise_speed , msg = " Car : % s , speed : % . 2f " % ( car_name , cruise_speed ) ) <nl> - <nl> - def test_honda_ui_pcm_accel ( self ) : <nl> - self . longMessage = True <nl> - <nl> - sendcan = messaging . pub_sock ( ' sendcan ' ) <nl> - <nl> - car_name = HONDA . CIVIC <nl> - params = CarInterface . get_params ( car_name ) <nl> - CI = CarInterface ( params , CarController ) <nl> - <nl> - # Get parser <nl> - parser_signals = [ <nl> - # 780 - 0x30c <nl> - ( ' PCM_GAS ' , ' ACC_HUD ' , 0 ) , <nl> - <nl> - ] <nl> - parser = CANParser ( CI . cp . dbc_name , parser_signals , [ ] , 0 , sendcan = True , tcp_addr = " 127 . 0 . 0 . 1 " ) <nl> - time . sleep ( 0 . 2 ) # Slow joiner syndrome <nl> - <nl> - for pcm_accel in np . linspace ( 0 , 1 , 25 ) : <nl> - cc = car . CarControl . CruiseControl . new_message ( ) <nl> - cc . accelOverride = float ( pcm_accel ) <nl> - control = car . CarControl . new_message ( ) <nl> - control . enabled = True <nl> - control . cruiseControl = cc <nl> - <nl> - CI . update ( control ) <nl> - <nl> - for _ in range ( 25 ) : <nl> - can_sends = CI . apply ( control ) <nl> - sendcan . send ( can_list_to_can_capnp ( can_sends , msgtype = ' sendcan ' ) ) <nl> - <nl> - for _ in range ( 5 ) : <nl> - parser . update ( int ( sec_since_boot ( ) * 1e9 ) , False ) <nl> - time . sleep ( 0 . 01 ) <nl> - <nl> - self . assertAlmostEqual ( parser . vl [ ' ACC_HUD ' ] [ ' PCM_GAS ' ] , int ( 0xc6 * pcm_accel ) , msg = " Car : % s , accel : % . 2f " % ( car_name , pcm_accel ) ) <nl> - <nl> - def test_honda_ui_pcm_speed ( self ) : <nl> - self . longMessage = True <nl> - <nl> - sendcan = messaging . pub_sock ( ' sendcan ' ) <nl> - <nl> - car_name = HONDA . CIVIC <nl> - params = CarInterface . get_params ( car_name ) <nl> - CI = CarInterface ( params , CarController ) <nl> - <nl> - # Get parser <nl> - parser_signals = [ <nl> - # 780 - 0x30c <nl> - ( ' PCM_SPEED ' , ' ACC_HUD ' , 99 ) , <nl> - ] <nl> - parser = CANParser ( CI . cp . dbc_name , parser_signals , [ ] , 0 , sendcan = True , tcp_addr = " 127 . 0 . 0 . 1 " ) <nl> - time . sleep ( 0 . 2 ) # Slow joiner syndrome <nl> - <nl> - for pcm_speed in np . linspace ( 0 , 100 , 20 ) : <nl> - cc = car . CarControl . CruiseControl . new_message ( ) <nl> - cc . speedOverride = float ( pcm_speed * CV . KPH_TO_MS ) <nl> - control = car . CarControl . new_message ( ) <nl> - control . enabled = True <nl> - control . cruiseControl = cc <nl> - <nl> - CI . update ( control ) <nl> - <nl> - for _ in range ( 25 ) : <nl> - can_sends = CI . apply ( control ) <nl> - sendcan . send ( can_list_to_can_capnp ( can_sends , msgtype = ' sendcan ' ) ) <nl> - <nl> - for _ in range ( 5 ) : <nl> - parser . update ( int ( sec_since_boot ( ) * 1e9 ) , False ) <nl> - time . sleep ( 0 . 01 ) <nl> - <nl> - self . assertAlmostEqual ( parser . vl [ ' ACC_HUD ' ] [ ' PCM_SPEED ' ] , round ( pcm_speed , 2 ) , msg = " Car : % s , speed : % . 2f " % ( car_name , pcm_speed ) ) <nl> - <nl> - def test_honda_ui_hud_lead ( self ) : <nl> - self . longMessage = True <nl> - <nl> - sendcan = messaging . pub_sock ( ' sendcan ' ) <nl> - <nl> - for car_name in [ HONDA . CIVIC ] : <nl> - params = CarInterface . get_params ( car_name ) <nl> - CI = CarInterface ( params , CarController ) <nl> - <nl> - # Get parser <nl> - parser_signals = [ <nl> - # 780 - 0x30c <nl> - # 3 : acc off , 2 : solid car ( hud_show_car ) , 1 : dashed car ( enabled , not hud show car ) , 0 : no car ( not enabled ) <nl> - ( ' HUD_LEAD ' , ' ACC_HUD ' , 99 ) , <nl> - ( ' SET_ME_X03 ' , ' ACC_HUD ' , 99 ) , <nl> - ( ' SET_ME_X03_2 ' , ' ACC_HUD ' , 99 ) , <nl> - ( ' SET_ME_X01 ' , ' ACC_HUD ' , 99 ) , <nl> - ( ' ENABLE_MINI_CAR ' , ' ACC_HUD ' , 99 ) , <nl> - ] <nl> - parser = CANParser ( CI . cp . dbc_name , parser_signals , [ ] , 0 , sendcan = True , tcp_addr = " 127 . 0 . 0 . 1 " ) <nl> - time . sleep ( 0 . 2 ) # Slow joiner syndrome <nl> - <nl> - for enabled in [ True , False ] : <nl> - for leadVisible in [ True , False ] : <nl> - <nl> - control = car . CarControl . new_message ( ) <nl> - hud = car . CarControl . HUDControl . new_message ( ) <nl> - hud . leadVisible = leadVisible <nl> - control . enabled = enabled <nl> - control . hudControl = hud <nl> - CI . update ( control ) <nl> - <nl> - for _ in range ( 25 ) : <nl> - can_sends = CI . apply ( control ) <nl> - sendcan . send ( can_list_to_can_capnp ( can_sends , msgtype = ' sendcan ' ) ) <nl> - <nl> - for _ in range ( 5 ) : <nl> - parser . update ( int ( sec_since_boot ( ) * 1e9 ) , False ) <nl> - time . sleep ( 0 . 01 ) <nl> - <nl> - if not enabled : <nl> - hud_lead = 0 <nl> - else : <nl> - hud_lead = 2 if leadVisible else 1 <nl> - self . assertEqual ( int ( parser . vl [ ' ACC_HUD ' ] [ ' HUD_LEAD ' ] ) , hud_lead , msg = " Car : % s , lead : % s , enabled % s " % ( car_name , leadVisible , enabled ) ) <nl> - self . assertTrue ( parser . vl [ ' ACC_HUD ' ] [ ' ENABLE_MINI_CAR ' ] ) <nl> - self . assertEqual ( 0x3 , parser . vl [ ' ACC_HUD ' ] [ ' SET_ME_X03 ' ] ) <nl> - self . assertEqual ( 0x3 , parser . vl [ ' ACC_HUD ' ] [ ' SET_ME_X03_2 ' ] ) <nl> - self . assertEqual ( 0x1 , parser . vl [ ' ACC_HUD ' ] [ ' SET_ME_X01 ' ] ) <nl> - <nl> - <nl> - def test_honda_steering ( self ) : <nl> - self . longMessage = True <nl> - limits = { <nl> - HONDA . CIVIC : 0x1000 , <nl> - HONDA . ODYSSEY : 0x1000 , <nl> - HONDA . PILOT : 0x1000 , <nl> - HONDA . CRV : 0x3e8 , <nl> - HONDA . ACURA_ILX : 0xF00 , <nl> - HONDA . ACURA_RDX : 0x3e8 , <nl> - } <nl> - <nl> - sendcan = messaging . pub_sock ( ' sendcan ' ) <nl> - <nl> - for car_name in limits . keys ( ) : <nl> - params = CarInterface . get_params ( car_name ) <nl> - CI = CarInterface ( params , CarController ) <nl> - <nl> - # Get parser <nl> - parser_signals = [ <nl> - ( ' STEER_TORQUE ' , ' STEERING_CONTROL ' , 0 ) , <nl> - ] <nl> - parser = CANParser ( CI . cp . dbc_name , parser_signals , [ ] , 0 , sendcan = True , tcp_addr = " 127 . 0 . 0 . 1 " ) <nl> - time . sleep ( 0 . 2 ) # Slow joiner syndrome <nl> - <nl> - for steer in np . linspace ( - 1 . , 1 . , 25 ) : <nl> - control = car . CarControl . new_message ( ) <nl> - actuators = car . CarControl . Actuators . new_message ( ) <nl> - actuators . steer = float ( steer ) <nl> - control . enabled = True <nl> - control . actuators = actuators <nl> - CI . update ( control ) <nl> - <nl> - CI . CS . steer_not_allowed = False <nl> - <nl> - for _ in range ( 25 ) : <nl> - can_sends = CI . apply ( control ) <nl> - sendcan . send ( can_list_to_can_capnp ( can_sends , msgtype = ' sendcan ' ) ) <nl> - <nl> - for _ in range ( 5 ) : <nl> - parser . update ( int ( sec_since_boot ( ) * 1e9 ) , False ) <nl> - time . sleep ( 0 . 01 ) <nl> - <nl> - torque = parser . vl [ ' STEERING_CONTROL ' ] [ ' STEER_TORQUE ' ] <nl> - self . assertAlmostEqual ( int ( limits [ car_name ] * - actuators . steer ) , torque , msg = " Car : % s , steer % . 2f " % ( car_name , steer ) ) <nl> - <nl> - sendcan . close ( ) <nl> - <nl> - def test_honda_gas ( self ) : <nl> - self . longMessage = True <nl> - <nl> - sendcan = messaging . pub_sock ( ' sendcan ' ) <nl> - <nl> - car_name = HONDA . ACURA_ILX <nl> - <nl> - params = CarInterface . get_params ( car_name , { 0 : { 0x201 : 6 } , 1 : { } , 2 : { } } ) # Add interceptor to fingerprint <nl> - CI = CarInterface ( params , CarController ) <nl> - <nl> - # Get parser <nl> - parser_signals = [ <nl> - ( ' GAS_COMMAND ' , ' GAS_COMMAND ' , - 1 ) , <nl> - ( ' GAS_COMMAND2 ' , ' GAS_COMMAND ' , - 1 ) , <nl> - ( ' ENABLE ' , ' GAS_COMMAND ' , - 1 ) , <nl> - ] <nl> - parser = CANParser ( CI . cp . dbc_name , parser_signals , [ ] , 0 , sendcan = True , tcp_addr = " 127 . 0 . 0 . 1 " ) <nl> - time . sleep ( 0 . 2 ) # Slow joiner syndrome <nl> - <nl> - for gas in np . linspace ( 0 . , 0 . 95 , 25 ) : <nl> - control = car . CarControl . new_message ( ) <nl> - actuators = car . CarControl . Actuators . new_message ( ) <nl> - actuators . gas = float ( gas ) <nl> - control . enabled = True <nl> - control . actuators = actuators <nl> - CI . update ( control ) <nl> - <nl> - CI . CS . steer_not_allowed = False <nl> - <nl> - for _ in range ( 25 ) : <nl> - can_sends = CI . apply ( control ) <nl> - sendcan . send ( can_list_to_can_capnp ( can_sends , msgtype = ' sendcan ' ) ) <nl> - <nl> - for _ in range ( 5 ) : <nl> - parser . update ( int ( sec_since_boot ( ) * 1e9 ) , False ) <nl> - time . sleep ( 0 . 01 ) <nl> - <nl> - gas_command = parser . vl [ ' GAS_COMMAND ' ] [ ' GAS_COMMAND ' ] / 255 . 0 <nl> - gas_command2 = parser . vl [ ' GAS_COMMAND ' ] [ ' GAS_COMMAND2 ' ] / 255 . 0 <nl> - enabled = gas > 0 . 001 <nl> - self . assertEqual ( enabled , parser . vl [ ' GAS_COMMAND ' ] [ ' ENABLE ' ] , msg = " Car : % s , gas % . 2f " % ( car_name , gas ) ) <nl> - if enabled : <nl> - self . assertAlmostEqual ( gas , gas_command , places = 2 , msg = " Car : % s , gas % . 2f " % ( car_name , gas ) ) <nl> - self . assertAlmostEqual ( gas , gas_command2 , places = 2 , msg = " Car : % s , gas % . 2f " % ( car_name , gas ) ) <nl> - <nl> - sendcan . close ( ) <nl> - <nl> - def test_honda_brake ( self ) : <nl> - self . longMessage = True <nl> - <nl> - sendcan = messaging . pub_sock ( ' sendcan ' ) <nl> - <nl> - car_name = HONDA . CIVIC <nl> - <nl> - params = CarInterface . get_params ( car_name ) <nl> - CI = CarInterface ( params , CarController ) <nl> - <nl> - # Get parser <nl> - parser_signals = [ <nl> - ( ' COMPUTER_BRAKE ' , ' BRAKE_COMMAND ' , 0 ) , <nl> - ( ' BRAKE_PUMP_REQUEST ' , ' BRAKE_COMMAND ' , 0 ) , # pump_on <nl> - ( ' CRUISE_OVERRIDE ' , ' BRAKE_COMMAND ' , 0 ) , # pcm_override <nl> - ( ' CRUISE_FAULT_CMD ' , ' BRAKE_COMMAND ' , 0 ) , # pcm_fault_cmd <nl> - ( ' CRUISE_CANCEL_CMD ' , ' BRAKE_COMMAND ' , 0 ) , # pcm_cancel_cmd <nl> - ( ' COMPUTER_BRAKE_REQUEST ' , ' BRAKE_COMMAND ' , 0 ) , # brake_rq <nl> - ( ' SET_ME_0X80 ' , ' BRAKE_COMMAND ' , 0 ) , <nl> - ( ' BRAKE_LIGHTS ' , ' BRAKE_COMMAND ' , 0 ) , # brakelights <nl> - ( ' FCW ' , ' BRAKE_COMMAND ' , 0 ) , <nl> - ] <nl> - parser = CANParser ( CI . cp . dbc_name , parser_signals , [ ] , 0 , sendcan = True , tcp_addr = " 127 . 0 . 0 . 1 " ) <nl> - time . sleep ( 0 . 2 ) # Slow joiner syndrome <nl> - <nl> - VA = car . CarControl . HUDControl . VisualAlert <nl> - <nl> - for override in [ True , False ] : <nl> - for cancel in [ True , False ] : <nl> - for fcw in [ True , False ] : <nl> - steps = 25 if not override and not cancel else 2 <nl> - for brake in np . linspace ( 0 . , 0 . 95 , steps ) : <nl> - control = car . CarControl . new_message ( ) <nl> - <nl> - hud = car . CarControl . HUDControl . new_message ( ) <nl> - if fcw : <nl> - hud . visualAlert = VA . fcw <nl> - <nl> - cruise = car . CarControl . CruiseControl . new_message ( ) <nl> - cruise . cancel = cancel <nl> - cruise . override = override <nl> - <nl> - actuators = car . CarControl . Actuators . new_message ( ) <nl> - actuators . brake = float ( brake ) <nl> - <nl> - control . enabled = True <nl> - control . actuators = actuators <nl> - control . hudControl = hud <nl> - control . cruiseControl = cruise <nl> - <nl> - CI . update ( control ) <nl> - <nl> - CI . CS . steer_not_allowed = False <nl> - <nl> - for _ in range ( 20 ) : <nl> - can_sends = CI . apply ( control ) <nl> - sendcan . send ( can_list_to_can_capnp ( can_sends , msgtype = ' sendcan ' ) ) <nl> - <nl> - for _ in range ( 5 ) : <nl> - parser . update ( int ( sec_since_boot ( ) * 1e9 ) , False ) <nl> - time . sleep ( 0 . 01 ) <nl> - <nl> - brake_command = parser . vl [ ' BRAKE_COMMAND ' ] [ ' COMPUTER_BRAKE ' ] <nl> - min_expected_brake = int ( 1024 / 4 * ( actuators . brake - 0 . 02 ) ) <nl> - max_expected_brake = int ( 1024 / 4 * ( actuators . brake + 0 . 02 ) ) <nl> - braking = actuators . brake > 0 <nl> - <nl> - braking_ok = min_expected_brake < = brake_command < = max_expected_brake <nl> - if steps = = 2 : <nl> - braking_ok = True <nl> - <nl> - self . assertTrue ( braking_ok , msg = " Car : % s , brake % . 2f " % ( car_name , brake ) ) <nl> - self . assertEqual ( 0x80 , parser . vl [ ' BRAKE_COMMAND ' ] [ ' SET_ME_0X80 ' ] ) <nl> - self . assertEqual ( braking , parser . vl [ ' BRAKE_COMMAND ' ] [ ' BRAKE_PUMP_REQUEST ' ] ) <nl> - self . assertEqual ( braking , parser . vl [ ' BRAKE_COMMAND ' ] [ ' COMPUTER_BRAKE_REQUEST ' ] ) <nl> - self . assertEqual ( braking , parser . vl [ ' BRAKE_COMMAND ' ] [ ' BRAKE_LIGHTS ' ] ) <nl> - self . assertFalse ( parser . vl [ ' BRAKE_COMMAND ' ] [ ' CRUISE_FAULT_CMD ' ] ) <nl> - self . assertEqual ( override , parser . vl [ ' BRAKE_COMMAND ' ] [ ' CRUISE_OVERRIDE ' ] ) <nl> - self . assertEqual ( cancel , parser . vl [ ' BRAKE_COMMAND ' ] [ ' CRUISE_CANCEL_CMD ' ] ) <nl> - self . assertEqual ( fcw , bool ( parser . vl [ ' BRAKE_COMMAND ' ] [ ' FCW ' ] ) ) <nl> - <nl> - if __name__ = = ' __main__ ' : <nl> - unittest . main ( ) <nl> deleted file mode 100755 <nl> index 432684de00 . . 0000000000 <nl> mmm a / selfdrive / car / tests / test_toyota_carcontroller . py <nl> ppp / dev / null <nl> <nl> - # ! / usr / bin / env python3 <nl> - <nl> - import unittest <nl> - from cereal import car , log <nl> - from selfdrive . car . toyota . values import CAR as TOYOTA <nl> - from selfdrive . car . toyota . carcontroller import CarController <nl> - from selfdrive . car . toyota . interface import CarInterface <nl> - from common . realtime import sec_since_boot <nl> - <nl> - from selfdrive . boardd . boardd import can_list_to_can_capnp <nl> - from selfdrive . config import Conversions as CV <nl> - import cereal . messaging as messaging <nl> - from cereal . services import service_list <nl> - from opendbc . can . parser import CANParser <nl> - import zmq <nl> - import time <nl> - import numpy as np <nl> - <nl> - <nl> - class TestToyotaCarcontroller ( unittest . TestCase ) : <nl> - def test_fcw ( self ) : <nl> - # TODO : This message has a 0xc1 setme which is not yet checked or in the dbc file <nl> - self . longMessage = True <nl> - car_name = TOYOTA . RAV4 <nl> - <nl> - sendcan = messaging . pub_sock ( ' sendcan ' ) <nl> - <nl> - params = CarInterface . get_params ( car_name ) <nl> - CI = CarInterface ( params , CarController ) <nl> - <nl> - # Get parser <nl> - parser_signals = [ <nl> - ( ' FCW ' , ' ACC_HUD ' , 0 ) , <nl> - ( ' SET_ME_X20 ' , ' ACC_HUD ' , 0 ) , <nl> - ( ' SET_ME_X10 ' , ' ACC_HUD ' , 0 ) , <nl> - ( ' SET_ME_X80 ' , ' ACC_HUD ' , 0 ) , <nl> - ] <nl> - parser = CANParser ( CI . cp . dbc_name , parser_signals , [ ] , 0 , sendcan = True , tcp_addr = " 127 . 0 . 0 . 1 " ) <nl> - time . sleep ( 0 . 2 ) # Slow joiner syndrome <nl> - <nl> - VA = car . CarControl . HUDControl . VisualAlert <nl> - for fcw in [ True , False ] : <nl> - control = car . CarControl . new_message ( ) <nl> - control . enabled = True <nl> - <nl> - hud = car . CarControl . HUDControl . new_message ( ) <nl> - if fcw : <nl> - hud . visualAlert = VA . fcw <nl> - control . hudControl = hud <nl> - <nl> - CI . update ( control ) <nl> - <nl> - for _ in range ( 200 ) : <nl> - can_sends = CI . apply ( control ) <nl> - sendcan . send ( can_list_to_can_capnp ( can_sends , msgtype = ' sendcan ' ) ) <nl> - <nl> - for _ in range ( 5 ) : <nl> - parser . update ( int ( sec_since_boot ( ) * 1e9 ) , False ) <nl> - time . sleep ( 0 . 01 ) <nl> - <nl> - self . assertEqual ( fcw , parser . vl [ ' ACC_HUD ' ] [ ' FCW ' ] ) <nl> - self . assertEqual ( 0x20 , parser . vl [ ' ACC_HUD ' ] [ ' SET_ME_X20 ' ] ) <nl> - self . assertEqual ( 0x10 , parser . vl [ ' ACC_HUD ' ] [ ' SET_ME_X10 ' ] ) <nl> - self . assertEqual ( 0x80 , parser . vl [ ' ACC_HUD ' ] [ ' SET_ME_X80 ' ] ) <nl> - <nl> - def test_ui ( self ) : <nl> - self . longMessage = True <nl> - car_name = TOYOTA . RAV4 <nl> - <nl> - sendcan = messaging . pub_sock ( ' sendcan ' ) <nl> - <nl> - params = CarInterface . get_params ( car_name ) <nl> - CI = CarInterface ( params , CarController ) <nl> - <nl> - # Get parser <nl> - parser_signals = [ <nl> - ( ' BARRIERS ' , ' LKAS_HUD ' , - 1 ) , <nl> - ( ' RIGHT_LINE ' , ' LKAS_HUD ' , 0 ) , <nl> - ( ' LEFT_LINE ' , ' LKAS_HUD ' , 0 ) , <nl> - ( ' SET_ME_X01 ' , ' LKAS_HUD ' , 0 ) , <nl> - ( ' SET_ME_X01_2 ' , ' LKAS_HUD ' , 0 ) , <nl> - ( ' LDA_ALERT ' , ' LKAS_HUD ' , - 1 ) , <nl> - ( ' SET_ME_X0C ' , ' LKAS_HUD ' , 0 ) , <nl> - ( ' SET_ME_X2C ' , ' LKAS_HUD ' , 0 ) , <nl> - ( ' SET_ME_X38 ' , ' LKAS_HUD ' , 0 ) , <nl> - ( ' SET_ME_X02 ' , ' LKAS_HUD ' , 0 ) , <nl> - ] <nl> - parser = CANParser ( CI . cp . dbc_name , parser_signals , [ ] , 0 , sendcan = True , tcp_addr = " 127 . 0 . 0 . 1 " ) <nl> - time . sleep ( 0 . 2 ) # Slow joiner syndrome <nl> - <nl> - VA = car . CarControl . HUDControl . VisualAlert <nl> - <nl> - for left_lane in [ True , False ] : <nl> - for right_lane in [ True , False ] : <nl> - for steer in [ True , False ] : <nl> - control = car . CarControl . new_message ( ) <nl> - control . enabled = True <nl> - <nl> - hud = car . CarControl . HUDControl . new_message ( ) <nl> - if steer : <nl> - hud . visualAlert = VA . steerRequired <nl> - <nl> - hud . leftLaneVisible = left_lane <nl> - hud . rightLaneVisible = right_lane <nl> - <nl> - control . hudControl = hud <nl> - CI . update ( control ) <nl> - <nl> - for _ in range ( 200 ) : # UI is only sent at 1Hz <nl> - can_sends = CI . apply ( control ) <nl> - sendcan . send ( can_list_to_can_capnp ( can_sends , msgtype = ' sendcan ' ) ) <nl> - <nl> - for _ in range ( 5 ) : <nl> - parser . update ( int ( sec_since_boot ( ) * 1e9 ) , False ) <nl> - time . sleep ( 0 . 01 ) <nl> - <nl> - self . assertEqual ( 0x0c , parser . vl [ ' LKAS_HUD ' ] [ ' SET_ME_X0C ' ] ) <nl> - self . assertEqual ( 0x2c , parser . vl [ ' LKAS_HUD ' ] [ ' SET_ME_X2C ' ] ) <nl> - self . assertEqual ( 0x38 , parser . vl [ ' LKAS_HUD ' ] [ ' SET_ME_X38 ' ] ) <nl> - self . assertEqual ( 0x02 , parser . vl [ ' LKAS_HUD ' ] [ ' SET_ME_X02 ' ] ) <nl> - self . assertEqual ( 0 , parser . vl [ ' LKAS_HUD ' ] [ ' BARRIERS ' ] ) <nl> - self . assertEqual ( 1 if right_lane else 2 , parser . vl [ ' LKAS_HUD ' ] [ ' RIGHT_LINE ' ] ) <nl> - self . assertEqual ( 1 if left_lane else 2 , parser . vl [ ' LKAS_HUD ' ] [ ' LEFT_LINE ' ] ) <nl> - self . assertEqual ( 1 , parser . vl [ ' LKAS_HUD ' ] [ ' SET_ME_X01 ' ] ) <nl> - self . assertEqual ( 1 , parser . vl [ ' LKAS_HUD ' ] [ ' SET_ME_X01_2 ' ] ) <nl> - self . assertEqual ( steer , parser . vl [ ' LKAS_HUD ' ] [ ' LDA_ALERT ' ] ) <nl> - <nl> - def test_standstill_and_cancel ( self ) : <nl> - self . longMessage = True <nl> - car_name = TOYOTA . RAV4 <nl> - <nl> - sendcan = messaging . pub_sock ( ' sendcan ' ) <nl> - <nl> - params = CarInterface . get_params ( car_name ) <nl> - CI = CarInterface ( params , CarController ) <nl> - <nl> - # Get parser <nl> - parser_signals = [ <nl> - ( ' RELEASE_STANDSTILL ' , ' ACC_CONTROL ' , 0 ) , <nl> - ( ' CANCEL_REQ ' , ' ACC_CONTROL ' , 0 ) , <nl> - ( ' SET_ME_X3 ' , ' ACC_CONTROL ' , 0 ) , <nl> - ( ' SET_ME_1 ' , ' ACC_CONTROL ' , 0 ) , <nl> - ] <nl> - parser = CANParser ( CI . cp . dbc_name , parser_signals , [ ] , 0 , sendcan = True , tcp_addr = " 127 . 0 . 0 . 1 " ) <nl> - time . sleep ( 0 . 2 ) # Slow joiner syndrome <nl> - <nl> - control = car . CarControl . new_message ( ) <nl> - control . enabled = True <nl> - <nl> - CI . update ( control ) <nl> - <nl> - CI . CS . pcm_acc_status = 8 # Active <nl> - CI . CS . standstill = True <nl> - can_sends = CI . apply ( control ) <nl> - <nl> - sendcan . send ( can_list_to_can_capnp ( can_sends , msgtype = ' sendcan ' ) ) <nl> - <nl> - for _ in range ( 5 ) : <nl> - parser . update ( int ( sec_since_boot ( ) * 1e9 ) , False ) <nl> - time . sleep ( 0 . 01 ) <nl> - <nl> - self . assertEqual ( 0x3 , parser . vl [ ' ACC_CONTROL ' ] [ ' SET_ME_X3 ' ] ) <nl> - self . assertEqual ( 1 , parser . vl [ ' ACC_CONTROL ' ] [ ' SET_ME_1 ' ] ) <nl> - self . assertFalse ( parser . vl [ ' ACC_CONTROL ' ] [ ' RELEASE_STANDSTILL ' ] ) <nl> - self . assertFalse ( parser . vl [ ' ACC_CONTROL ' ] [ ' CANCEL_REQ ' ] ) <nl> - <nl> - CI . CS . pcm_acc_status = 7 # Standstill <nl> - <nl> - for _ in range ( 10 ) : <nl> - can_sends = CI . apply ( control ) <nl> - sendcan . send ( can_list_to_can_capnp ( can_sends , msgtype = ' sendcan ' ) ) <nl> - <nl> - for _ in range ( 5 ) : <nl> - parser . update ( int ( sec_since_boot ( ) * 1e9 ) , False ) <nl> - time . sleep ( 0 . 01 ) <nl> - <nl> - self . assertTrue ( parser . vl [ ' ACC_CONTROL ' ] [ ' RELEASE_STANDSTILL ' ] ) <nl> - <nl> - cruise = car . CarControl . CruiseControl . new_message ( ) <nl> - cruise . cancel = True <nl> - control . cruiseControl = cruise <nl> - <nl> - for _ in range ( 10 ) : <nl> - can_sends = CI . apply ( control ) <nl> - <nl> - sendcan . send ( can_list_to_can_capnp ( can_sends , msgtype = ' sendcan ' ) ) <nl> - <nl> - for _ in range ( 5 ) : <nl> - parser . update ( int ( sec_since_boot ( ) * 1e9 ) , False ) <nl> - time . sleep ( 0 . 01 ) <nl> - <nl> - self . assertTrue ( parser . vl [ ' ACC_CONTROL ' ] [ ' CANCEL_REQ ' ] ) <nl> - <nl> - @ unittest . skip ( " IPAS logic changed , fix test " ) <nl> - def test_steering_ipas ( self ) : <nl> - self . longMessage = True <nl> - car_name = TOYOTA . RAV4 <nl> - <nl> - sendcan = messaging . pub_sock ( ' sendcan ' ) <nl> - <nl> - params = CarInterface . get_params ( car_name ) <nl> - params . enableApgs = True <nl> - CI = CarInterface ( params , CarController ) <nl> - CI . CC . angle_control = True <nl> - <nl> - # Get parser <nl> - parser_signals = [ <nl> - ( ' SET_ME_X10 ' , ' STEERING_IPAS ' , 0 ) , <nl> - ( ' SET_ME_X40 ' , ' STEERING_IPAS ' , 0 ) , <nl> - ( ' ANGLE ' , ' STEERING_IPAS ' , 0 ) , <nl> - ( ' STATE ' , ' STEERING_IPAS ' , 0 ) , <nl> - ( ' DIRECTION_CMD ' , ' STEERING_IPAS ' , 0 ) , <nl> - ] <nl> - parser = CANParser ( CI . cp . dbc_name , parser_signals , [ ] , 0 , sendcan = True , tcp_addr = " 127 . 0 . 0 . 1 " ) <nl> - time . sleep ( 0 . 2 ) # Slow joiner syndrome <nl> - <nl> - for enabled in [ True , False ] : <nl> - for steer in np . linspace ( - 510 . , 510 . , 25 ) : <nl> - control = car . CarControl . new_message ( ) <nl> - actuators = car . CarControl . Actuators . new_message ( ) <nl> - actuators . steerAngle = float ( steer ) <nl> - control . enabled = enabled <nl> - control . actuators = actuators <nl> - CI . update ( control ) <nl> - <nl> - CI . CS . steer_not_allowed = False <nl> - <nl> - for _ in range ( 1000 if steer < - 505 else 25 ) : <nl> - can_sends = CI . apply ( control ) <nl> - sendcan . send ( can_list_to_can_capnp ( can_sends , msgtype = ' sendcan ' ) ) <nl> - parser . update ( int ( sec_since_boot ( ) * 1e9 ) , False ) <nl> - <nl> - self . assertEqual ( 0x10 , parser . vl [ ' STEERING_IPAS ' ] [ ' SET_ME_X10 ' ] ) <nl> - self . assertEqual ( 0x40 , parser . vl [ ' STEERING_IPAS ' ] [ ' SET_ME_X40 ' ] ) <nl> - <nl> - expected_state = 3 if enabled else 1 <nl> - self . assertEqual ( expected_state , parser . vl [ ' STEERING_IPAS ' ] [ ' STATE ' ] ) <nl> - <nl> - if steer < 0 : <nl> - direction = 3 <nl> - elif steer > 0 : <nl> - direction = 1 <nl> - else : <nl> - direction = 2 <nl> - <nl> - if not enabled : <nl> - direction = 2 <nl> - self . assertEqual ( direction , parser . vl [ ' STEERING_IPAS ' ] [ ' DIRECTION_CMD ' ] ) <nl> - <nl> - expected_steer = int ( round ( steer / 1 . 5 ) ) * 1 . 5 if enabled else 0 <nl> - self . assertAlmostEqual ( expected_steer , parser . vl [ ' STEERING_IPAS ' ] [ ' ANGLE ' ] ) <nl> - <nl> - sendcan . close ( ) <nl> - <nl> - def test_steering ( self ) : <nl> - self . longMessage = True <nl> - car_name = TOYOTA . RAV4 <nl> - <nl> - sendcan = messaging . pub_sock ( ' sendcan ' ) <nl> - <nl> - params = CarInterface . get_params ( car_name ) <nl> - CI = CarInterface ( params , CarController ) <nl> - <nl> - limit = 1500 <nl> - <nl> - # Get parser <nl> - parser_signals = [ <nl> - ( ' STEER_REQUEST ' , ' STEERING_LKA ' , 0 ) , <nl> - ( ' SET_ME_1 ' , ' STEERING_LKA ' , 0 ) , <nl> - ( ' STEER_TORQUE_CMD ' , ' STEERING_LKA ' , - 1 ) , <nl> - ( ' LKA_STATE ' , ' STEERING_LKA ' , - 1 ) , <nl> - ] <nl> - parser = CANParser ( CI . cp . dbc_name , parser_signals , [ ] , 0 , sendcan = True , tcp_addr = " 127 . 0 . 0 . 1 " ) <nl> - time . sleep ( 0 . 2 ) # Slow joiner syndrome <nl> - <nl> - for steer in np . linspace ( - 1 . , 1 . , 25 ) : <nl> - control = car . CarControl . new_message ( ) <nl> - actuators = car . CarControl . Actuators . new_message ( ) <nl> - actuators . steer = float ( steer ) <nl> - control . enabled = True <nl> - control . actuators = actuators <nl> - CI . update ( control ) <nl> - <nl> - CI . CS . steer_not_allowed = False <nl> - CI . CS . steer_torque_motor = limit * steer <nl> - <nl> - # More control applies for the first one because of rate limits <nl> - for _ in range ( 1000 if steer < - 0 . 99 else 25 ) : <nl> - can_sends = CI . apply ( control ) <nl> - sendcan . send ( can_list_to_can_capnp ( can_sends , msgtype = ' sendcan ' ) ) <nl> - <nl> - parser . update ( int ( sec_since_boot ( ) * 1e9 ) , False ) <nl> - <nl> - self . assertEqual ( 1 , parser . vl [ ' STEERING_LKA ' ] [ ' SET_ME_1 ' ] ) <nl> - self . assertEqual ( True , parser . vl [ ' STEERING_LKA ' ] [ ' STEER_REQUEST ' ] ) <nl> - self . assertAlmostEqual ( round ( steer * limit ) , parser . vl [ ' STEERING_LKA ' ] [ ' STEER_TORQUE_CMD ' ] ) <nl> - self . assertEqual ( 0 , parser . vl [ ' STEERING_LKA ' ] [ ' LKA_STATE ' ] ) <nl> - <nl> - sendcan . close ( ) <nl> - <nl> - def test_accel ( self ) : <nl> - self . longMessage = True <nl> - car_name = TOYOTA . RAV4 <nl> - <nl> - sendcan = messaging . pub_sock ( ' sendcan ' ) <nl> - <nl> - params = CarInterface . get_params ( car_name ) <nl> - CI = CarInterface ( params , CarController ) <nl> - <nl> - # Get parser <nl> - parser_signals = [ <nl> - ( ' ACCEL_CMD ' , ' ACC_CONTROL ' , 0 ) , <nl> - ] <nl> - parser = CANParser ( CI . cp . dbc_name , parser_signals , [ ] , 0 , sendcan = True , tcp_addr = " 127 . 0 . 0 . 1 " ) <nl> - time . sleep ( 0 . 2 ) # Slow joiner syndrome <nl> - <nl> - for accel in np . linspace ( - 3 . , 1 . 5 , 25 ) : <nl> - control = car . CarControl . new_message ( ) <nl> - actuators = car . CarControl . Actuators . new_message ( ) <nl> - <nl> - gas = accel / 3 . if accel > 0 . else 0 . <nl> - brake = - accel / 3 . if accel < 0 . else 0 . <nl> - <nl> - actuators . gas = float ( gas ) <nl> - actuators . brake = float ( brake ) <nl> - control . enabled = True <nl> - control . actuators = actuators <nl> - CI . update ( control ) <nl> - <nl> - # More control applies for the first one because of rate limits <nl> - for _ in range ( 25 ) : <nl> - can_sends = CI . apply ( control ) <nl> - sendcan . send ( can_list_to_can_capnp ( can_sends , msgtype = ' sendcan ' ) ) <nl> - <nl> - for _ in range ( 5 ) : <nl> - parser . update ( int ( sec_since_boot ( ) * 1e9 ) , False ) <nl> - time . sleep ( 0 . 01 ) <nl> - <nl> - min_accel = accel - 0 . 061 <nl> - max_accel = accel + 0 . 061 <nl> - sent_accel = parser . vl [ ' ACC_CONTROL ' ] [ ' ACCEL_CMD ' ] <nl> - accel_ok = min_accel < = sent_accel < = max_accel <nl> - self . assertTrue ( accel_ok , msg = " % . 2f < = % . 2f < = % . 2f " % ( min_accel , sent_accel , max_accel ) ) <nl> - sendcan . close ( ) <nl> - <nl> - <nl> - if __name__ = = ' __main__ ' : <nl> - unittest . main ( ) <nl> mmm a / selfdrive / controls / controlsd . py <nl> ppp b / selfdrive / controls / controlsd . py <nl> def controlsd_thread ( sm = None , pm = None , can_sock = None ) : <nl> CP . safetyModel = car . CarParams . SafetyModel . noOutput <nl> <nl> # Write CarParams for radard and boardd safety mode <nl> - params . put ( " CarParams " , CP . to_bytes ( ) ) <nl> + cp_bytes = CP . to_bytes ( ) <nl> + params . put ( " CarParams " , cp_bytes ) <nl> + params . put ( " CarParamsCache " , cp_bytes ) <nl> params . put ( " LongitudinalControl " , " 1 " if CP . openpilotLongitudinalControl else " 0 " ) <nl> <nl> CC = car . CarControl . new_message ( ) <nl> mmm a / selfdrive / debug / fw_query / test_fw_query_on_routes . py <nl> ppp b / selfdrive / debug / fw_query / test_fw_query_on_routes . py <nl> <nl> from selfdrive . car . honda . values import FW_VERSIONS as HONDA_FW_VERSIONS <nl> <nl> <nl> - def fw_versions_to_dict ( car_fw ) : <nl> - fw_versions = { } <nl> - for f in car_fw : <nl> - addr = f . address <nl> - subaddr = f . subAddress <nl> - if subaddr = = 0 : <nl> - subaddr = None <nl> - fw_versions [ ( addr , subaddr ) ] = f . fwVersion <nl> - <nl> - return fw_versions <nl> - <nl> - <nl> if __name__ = = " __main__ " : <nl> if len ( sys . argv ) < 2 : <nl> print ( " Usage : . / test_fw_query_on_routes . py < route_list > " ) <nl> def fw_versions_to_dict ( car_fw ) : <nl> if live_fingerprint not in list ( TOYOTA_FW_VERSIONS . keys ( ) ) + list ( HONDA_FW_VERSIONS . keys ( ) ) : <nl> continue <nl> <nl> - fw_versions = fw_versions_to_dict ( car_fw ) <nl> - candidates = match_fw_to_car ( fw_versions ) <nl> + candidates = match_fw_to_car ( car_fw ) <nl> if ( len ( candidates ) = = 1 ) and ( list ( candidates ) [ 0 ] = = live_fingerprint ) : <nl> print ( " Correct " , live_fingerprint , dongle_id ) <nl> break <nl>
Cache FW query ( )
commaai/openpilot
b7aeb5d64dcbd2a3b9e065fd00ee1039852ab0b2
2020-01-31T01:57:20Z
mmm a / cocos / 2d / CCSprite . cpp <nl> ppp b / cocos / 2d / CCSprite . cpp <nl> void Sprite : : updateTransform ( void ) <nl> float x2 = x1 + size . width ; <nl> float y2 = y1 + size . height ; <nl> <nl> - if ( _flippedX ) <nl> - { <nl> - std : : swap ( x1 , x2 ) ; <nl> - } <nl> - if ( _flippedY ) <nl> - { <nl> - std : : swap ( y1 , y2 ) ; <nl> - } <nl> - <nl> float x = _transformToBatch . m [ 12 ] ; <nl> float y = _transformToBatch . m [ 13 ] ; <nl> <nl> void Sprite : : updateTransform ( void ) <nl> _quad . br . vertices . set ( SPRITE_RENDER_IN_SUBPIXEL ( bx ) , SPRITE_RENDER_IN_SUBPIXEL ( by ) , _positionZ ) ; <nl> _quad . tl . vertices . set ( SPRITE_RENDER_IN_SUBPIXEL ( dx ) , SPRITE_RENDER_IN_SUBPIXEL ( dy ) , _positionZ ) ; <nl> _quad . tr . vertices . set ( SPRITE_RENDER_IN_SUBPIXEL ( cx ) , SPRITE_RENDER_IN_SUBPIXEL ( cy ) , _positionZ ) ; <nl> + setTextureCoords ( _rect ) ; <nl> } <nl> <nl> / / MARMALADE CHANGE : ADDED CHECK FOR nullptr , TO PERMIT SPRITES WITH NO BATCH NODE / TEXTURE ATLAS <nl>
fix setflip for batch sprite
cocos2d/cocos2d-x
eee7726f5f7e8972c3d6c916f153935cd3fba7d4
2015-12-14T07:35:26Z
mmm a / Documentation / current_iteration . md <nl> ppp b / Documentation / current_iteration . md <nl> The comparison numbers for this single node are as follows : <nl> | New implementation | 6 . 581 | 9 . 963 | 5 | <nl> | Speedup / savings Approx . | 30 % Approx . | 65 - 75 % Approx . | 87 % | <nl> <nl> + # # Sequential Convolution <nl> + The implementation of sequential convolution in CNTK has been updated . The updated implementation creates a separate sequential convolution layer . Different from regular convolution layer , this operation convolves also on the dynamic axis ( sequence ) , and filter_shape [ 0 ] is applied to that axis . The updated implementation supports broader cases , such as where stride > 1 for the sequence axis . <nl> + <nl> + For example , a sequential convolution over a batch of one - channel black - and - white images . The images have the same fixed height of 640 , but each with width of variable lengths . The width is then represented by sequential axis . Padding is enabled , and strides for both width and height are 2 . <nl> + <nl> + > > > f = SequentialConvolution ( ( 3 , 3 ) , reduction_rank = 0 , pad = True , strides = ( 2 , 2 ) , activation = C . relu ) <nl> + > > > x = C . input_variable ( * * Sequence [ Tensor [ 640 ] ] ) <nl> + > > > x . shape <nl> + ( 640 , ) <nl> + > > > h = f ( x ) <nl> + > > > h . shape <nl> + ( 320 , ) <nl> + > > > f . W . shape <nl> + ( 1 , 1 , 3 , 3 ) <nl> + <nl> # # Operators <nl> # # # depth_to_space and space_to_depth <nl> There is a breaking change in the * * depth_to_space * * and * * space_to_depth * * operators . These have been updated to match ONNX specification , specifically <nl> Added support for trigonometric ops ` Tan ` and ` Atan ` . <nl> # # # ELU <nl> Added support for ` alpha ` attribute in ELU op . <nl> <nl> + # # # Convolution <nl> + Updated auto padding algorithms of ` Convolution ` to produce symmetric padding at best effort on CPU , without affecting the final convolution output values . This update increases the range of cases that could be covered by MKL API and improves the performance , E . g . ResNet50 . <nl> + <nl> # # Default arguments order <nl> There is a breaking change in the * * arguments * * property in CNTK python API . The default behavior has been updated to return arguments in python order instead of in C + + order . This way it will return arguments in the same order as they are fed into ops . If you wish to still get arguments in C + + order , you can simply override the global option . This change should only affect the following ops : Times , TransposeTimes , and Gemm ( internal ) . <nl> <nl> There is a breaking change in the * * arguments * * property in CNTK python API . The <nl> - Updated doc for Convolution layer to include group and dilation arguments . <nl> - Added improved input validation for group convolution . <nl> - Updated ` LogSoftMax ` to use more numerically stable implementation . <nl> - <nl> + - Fixed Gather op ' s incorrect gradient value . <nl> + - Added validation for ' None ' node in python clone substitution . <nl> + - Added validation for padding channel axis in convolution . <nl> + - Added CNTK native default lotusIR logger to fix the " Attempt to use DefaultLogger " error when loading some ONNX models . <nl> + - Added proper initialization for ONNX TypeStrToProtoMap . <nl> + - Updated python doctest to handle different print format for newer version numpy ( version > = 1 . 14 ) . <nl> + - Fixed Pooling ( CPU ) to produce correct output values when kernel center is on padded input cells . <nl> <nl> # # ONNX <nl> # # # Updates <nl> There is a breaking change in the * * arguments * * property in CNTK python API . The <nl> - Fixed ` Hardmax ` / ` Softmax ` / ` LogSoftmax ` import / export . <nl> - Added support for ` Select ` op export . <nl> - Added import / export support for several trigonometric ops . <nl> - <nl> + - Updated CNTK support for ONNX ` MatMul ` op . <nl> + - Updated CNTK support for ONNX ` Gemm ` op . <nl> + - Updated CNTK ' s ONNX ` MeanVarianceNormalization ` op export / import to latest spec . <nl> + - Updated CNTK ' s ONNX ` LayerNormalization ` op export / import to latest spec . <nl> + - Updated CNTK ' s ONNX ` PRelu ` op export / import to latest spec . <nl> + - Updated CNTK ' s ONNX ` Gather ` op export / import to latest spec . <nl> + - Updated CNTK ' s ONNX ` ImageScaler ` op export / import to latest spec . <nl> + - Updated CNTK ' s ONNX ` Reduce ` ops export / import to latest spec . <nl> + - Updated CNTK ' s ONNX ` Flatten ` op export / import to latest spec . <nl> + - Added CNTK support for ONNX ` Unsqueeze ` op . <nl> <nl> # # # Bug or minor fixes : <nl> - - Updated LRN op to match ONNX 1 . 2 spec where the ` size ` attribute has the semantics of diameter , not radius . <nl> + - Updated LRN op to match ONNX 1 . 2 spec where the ` size ` attribute has the semantics of diameter , not radius . Added validation if LRN kernel size is larger than channel size . <nl> - Updated ` Min ` / ` Max ` import implementation to handle variadic inputs . <nl> - Fixed possible file corruption when resaving on top of existing ONNX model file . <nl> <nl>
Update current_iteration . md
microsoft/CNTK
d264a2603493fecda607c1d1cda87fedba77d36b
2018-09-13T20:44:42Z
new file mode 100644 <nl> index 0000000000 . . b6562ece15 <nl> mmm / dev / null <nl> ppp b / code / data_structures / Heap / MinHeap . cpp <nl> <nl> + / * <nl> + Part of Cosmos by OpenGenus Foundation <nl> + * / <nl> + <nl> + # include < iostream > <nl> + # include < vector > <nl> + using namespace std ; <nl> + <nl> + class Heap { <nl> + vector < int > v ; <nl> + <nl> + bool myCompare ( int a , int b ) { / / / returns a < b for min heap <nl> + return a < b ; <nl> + } <nl> + <nl> + void heapify ( int i ) { <nl> + int left = i < < 1 ; <nl> + int right = left + 1 ; <nl> + int min_i = i ; <nl> + if ( left < v . size ( ) & & myCompare ( v [ left ] , v [ min_i ] ) ) { <nl> + min_i = left ; <nl> + } <nl> + if ( right < v . size ( ) & & myCompare ( v [ right ] , v [ min_i ] ) ) { <nl> + min_i = right ; <nl> + } <nl> + if ( min_i ! = i ) { <nl> + swap ( v [ i ] , v [ min_i ] ) ; <nl> + heapify ( min_i ) ; <nl> + } <nl> + } <nl> + <nl> + public : <nl> + <nl> + Heap ( ) { <nl> + v . push_back ( - 1 ) ; / / / Blocks position 0 , we will start from 1 <nl> + } <nl> + <nl> + void push ( int data ) { <nl> + v . push_back ( data ) ; <nl> + int index = v . size ( ) - 1 ; <nl> + int parent = index / 2 ; <nl> + while ( index > 1 & & myCompare ( v [ index ] , v [ parent ] ) ) { <nl> + swap ( v [ index ] , v [ parent ] ) ; <nl> + index = parent ; <nl> + parent = index / 2 ; <nl> + } <nl> + } <nl> + <nl> + int getTop ( ) { <nl> + return v [ 1 ] ; <nl> + } <nl> + void pop ( ) { <nl> + int last_index = v . size ( ) - 1 ; <nl> + swap ( v [ 1 ] , v [ last_index ] ) ; <nl> + v . pop_back ( ) ; <nl> + heapify ( 1 ) ; <nl> + } <nl> + bool isEmpty ( ) { <nl> + return v . size ( ) = = 1 ; <nl> + } <nl> + } ; <nl> + <nl> + <nl> + int main ( ) { <nl> + <nl> + int nos [ ] = { 5 , 4 , 1 , 3 , 2 , 6 , 7 , 8 } ; <nl> + <nl> + Heap h ; <nl> + <nl> + for ( int i = 0 ; i < 8 ; i + + ) { <nl> + h . push ( nos [ i ] ) ; <nl> + } <nl> + while ( ! h . isEmpty ( ) ) { <nl> + cout < < h . getTop ( ) < < " " ; <nl> + h . pop ( ) ; <nl> + } <nl> + <nl> + return 0 ; <nl> + } <nl>
Added code for Min Heap in C + +
OpenGenus/cosmos
4c20026059c8e9f496e0622ac22b5d0da271db76
2017-10-05T10:55:19Z
mmm a / cmake / CompileBoost . cmake <nl> ppp b / cmake / CompileBoost . cmake <nl> else ( ) <nl> include ( ExternalProject ) <nl> ExternalProject_add ( boostProject <nl> URL " https : / / dl . bintray . com / boostorg / release / 1 . 72 . 0 / source / boost_1_72_0 . tar . bz2 " <nl> - URL_HASH SHA256 = 2684c972994ee57fc5632e03bf044746f6eb45d4920c343937a465fd67a5adba <nl> + URL_HASH SHA256 = 59c9b274bc451cf91a9ba1dd2c7fdcaf5d60b1b3aa83f2c9fa143417cc660722 <nl> CONFIGURE_COMMAND " " <nl> BUILD_COMMAND " " <nl> BUILD_IN_SOURCE ON <nl>
Update cmake / CompileBoost . cmake
apple/foundationdb
6e87770b272ccb3e5a9634a62d6509412624d94a
2020-03-31T23:52:55Z
mmm a / hphp / runtime / ext / gd / ext_gd . cpp <nl> ppp b / hphp / runtime / ext / gd / ext_gd . cpp <nl> const StaticString <nl> s_mime ( " mime " ) , <nl> s_linespacing ( " linespacing " ) ; <nl> <nl> - Variant HHVM_FUNCTION ( getimagesize , const String & filename , <nl> - VRefParam imageinfo / * = null * / ) { <nl> + Variant getImageSize ( Resource stream , VRefParam imageinfo ) { <nl> int itype = 0 ; <nl> struct gfxinfo * result = NULL ; <nl> if ( imageinfo . isReferenced ( ) ) { <nl> imageinfo = Array : : Create ( ) ; <nl> } <nl> <nl> - Variant stream = f_fopen ( filename , " rb " ) ; <nl> - if ( same ( stream , false ) ) { <nl> - return false ; <nl> - } <nl> - itype = php_getimagetype ( stream . toResource ( ) ) ; <nl> + itype = php_getimagetype ( stream ) ; <nl> switch ( itype ) { <nl> case IMAGE_FILETYPE_GIF : <nl> - result = php_handle_gif ( stream . toResource ( ) ) ; <nl> + result = php_handle_gif ( stream ) ; <nl> break ; <nl> case IMAGE_FILETYPE_JPEG : <nl> { <nl> Variant HHVM_FUNCTION ( getimagesize , const String & filename , <nl> if ( imageinfo . isReferenced ( ) ) { <nl> infoArr = Array : : Create ( ) ; <nl> } <nl> - result = php_handle_jpeg ( stream . toResource ( ) , infoArr ) ; <nl> + result = php_handle_jpeg ( stream , infoArr ) ; <nl> if ( ! infoArr . empty ( ) ) { <nl> imageinfo = infoArr ; <nl> } <nl> } <nl> break ; <nl> case IMAGE_FILETYPE_PNG : <nl> - result = php_handle_png ( stream . toResource ( ) ) ; <nl> + result = php_handle_png ( stream ) ; <nl> break ; <nl> case IMAGE_FILETYPE_SWF : <nl> - result = php_handle_swf ( stream . toResource ( ) ) ; <nl> + result = php_handle_swf ( stream ) ; <nl> break ; <nl> case IMAGE_FILETYPE_SWC : <nl> # if HAVE_ZLIB & & ! defined ( COMPILE_DL_ZLIB ) <nl> - result = php_handle_swc ( stream . toResource ( ) ) ; <nl> + result = php_handle_swc ( stream ) ; <nl> # else <nl> raise_notice ( " The image is a compressed SWF file , but you do not " <nl> " have a static version of the zlib extension enabled " ) ; <nl> # endif <nl> break ; <nl> case IMAGE_FILETYPE_PSD : <nl> - result = php_handle_psd ( stream . toResource ( ) ) ; <nl> + result = php_handle_psd ( stream ) ; <nl> break ; <nl> case IMAGE_FILETYPE_BMP : <nl> - result = php_handle_bmp ( stream . toResource ( ) ) ; <nl> + result = php_handle_bmp ( stream ) ; <nl> break ; <nl> case IMAGE_FILETYPE_TIFF_II : <nl> - result = php_handle_tiff ( stream . toResource ( ) , 0 ) ; <nl> + result = php_handle_tiff ( stream , 0 ) ; <nl> break ; <nl> case IMAGE_FILETYPE_TIFF_MM : <nl> - result = php_handle_tiff ( stream . toResource ( ) , 1 ) ; <nl> + result = php_handle_tiff ( stream , 1 ) ; <nl> break ; <nl> case IMAGE_FILETYPE_JPC : <nl> - result = php_handle_jpc ( stream . toResource ( ) ) ; <nl> + result = php_handle_jpc ( stream ) ; <nl> break ; <nl> case IMAGE_FILETYPE_JP2 : <nl> - result = php_handle_jp2 ( stream . toResource ( ) ) ; <nl> + result = php_handle_jp2 ( stream ) ; <nl> break ; <nl> case IMAGE_FILETYPE_IFF : <nl> - result = php_handle_iff ( stream . toResource ( ) ) ; <nl> + result = php_handle_iff ( stream ) ; <nl> break ; <nl> case IMAGE_FILETYPE_WBMP : <nl> - result = php_handle_wbmp ( stream . toResource ( ) ) ; <nl> + result = php_handle_wbmp ( stream ) ; <nl> break ; <nl> case IMAGE_FILETYPE_XBM : <nl> - result = php_handle_xbm ( stream . toResource ( ) ) ; <nl> + result = php_handle_xbm ( stream ) ; <nl> break ; <nl> case IMAGE_FILETYPE_ICO : <nl> - result = php_handle_ico ( stream . toResource ( ) ) ; <nl> + result = php_handle_ico ( stream ) ; <nl> break ; <nl> default : <nl> case IMAGE_FILETYPE_UNKNOWN : <nl> break ; <nl> } <nl> <nl> - f_fclose ( stream . toResource ( ) ) ; <nl> - <nl> if ( result ) { <nl> ArrayInit ret ( 7 , ArrayInit : : Mixed { } ) ; <nl> ret . set ( 0 , ( int64_t ) result - > width ) ; <nl> Variant HHVM_FUNCTION ( getimagesize , const String & filename , <nl> } <nl> } <nl> <nl> + Variant HHVM_FUNCTION ( getimagesize , const String & filename , <nl> + VRefParam imageinfo / * = null * / ) { <nl> + Variant stream = f_fopen ( filename , " rb " ) ; <nl> + if ( same ( stream , false ) ) { <nl> + return false ; <nl> + } <nl> + Variant ret = getImageSize ( stream . toResource ( ) , imageinfo ) ; <nl> + f_fclose ( stream . toResource ( ) ) ; <nl> + return ret ; <nl> + } <nl> + <nl> + Variant HHVM_FUNCTION ( getimagesizefromstring , const String & imagedata , <nl> + VRefParam imageinfo / * = null * / ) { <nl> + String data = " data : / / text / plain ; base64 , " ; <nl> + data + = StringUtil : : Base64Encode ( imagedata ) ; <nl> + Variant stream = f_fopen ( data , " r " ) ; <nl> + if ( same ( stream , false ) ) { <nl> + return false ; <nl> + } <nl> + Variant ret = getImageSize ( stream . toResource ( ) , imageinfo ) ; <nl> + f_fclose ( stream . toResource ( ) ) ; <nl> + return ret ; <nl> + } <nl> + <nl> / / PHP extension gd . c <nl> # define HAVE_GDIMAGECREATEFROMPNG 1 <nl> <nl> class GdExtension : public Extension { <nl> void moduleInit ( ) override { <nl> HHVM_FE ( gd_info ) ; <nl> HHVM_FE ( getimagesize ) ; <nl> + HHVM_FE ( getimagesizefromstring ) ; <nl> HHVM_FE ( image_type_to_extension ) ; <nl> HHVM_FE ( image_type_to_mime_type ) ; <nl> # ifdef HAVE_GD_WBMP <nl> mmm a / hphp / runtime / ext / gd / ext_gd . php <nl> ppp b / hphp / runtime / ext / gd / ext_gd . php <nl> function gd_info ( ) : array ; <nl> function getimagesize ( string $ filename , <nl> mixed & $ imageinfo = null ) : mixed ; <nl> <nl> + / * Identical to getimagesize ( ) except that getimagesizefromstring ( ) accepts <nl> + * a string instead of a file name as the first parameter . <nl> + * / <nl> + < < __Native > > <nl> + function getimagesizefromstring ( string $ filename , <nl> + mixed & $ imageinfo = null ) : mixed ; <nl> + <nl> / * Returns the extension for the given IMAGETYPE_XXX constant . <nl> * / <nl> < < __Native > > <nl> similarity index 100 % <nl> rename from hphp / test / zend / bad / ext / standard / tests / image / getimagesizefromstring1 . php <nl> rename to hphp / test / zend / good / ext / standard / tests / image / getimagesizefromstring1 . php <nl> similarity index 100 % <nl> rename from hphp / test / zend / bad / ext / standard / tests / image / getimagesizefromstring1 . php . expectf <nl> rename to hphp / test / zend / good / ext / standard / tests / image / getimagesizefromstring1 . php . expectf <nl>
Implement getImageSizeFromString ( )
facebook/hhvm
43c17b25a8f7991633ceef26a668a9b74a64579b
2014-09-25T15:00:21Z
mmm a / src / core / CMakeLists . txt <nl> ppp b / src / core / CMakeLists . txt <nl> set ( SRCS <nl> arm / skyeye_common / vfp / vfpdouble . cpp <nl> arm / skyeye_common / vfp / vfpinstr . cpp <nl> arm / skyeye_common / vfp / vfpsingle . cpp <nl> + file_sys / archive_backend . cpp <nl> file_sys / archive_extsavedata . cpp <nl> file_sys / archive_romfs . cpp <nl> file_sys / archive_savedata . cpp <nl> new file mode 100644 <nl> index 00000000000 . . 0439868ab8d <nl> mmm / dev / null <nl> ppp b / src / core / file_sys / archive_backend . cpp <nl> <nl> + / / Copyright 2015 Citra Emulator Project <nl> + / / Licensed under GPLv2 or any later version <nl> + / / Refer to the license . txt file included . <nl> + <nl> + # include < sstream > <nl> + <nl> + # include " common / logging / log . h " <nl> + # include " common / string_util . h " <nl> + <nl> + # include " core / file_sys / archive_backend . h " <nl> + # include " core / mem_map . h " <nl> + <nl> + <nl> + namespace FileSys { <nl> + <nl> + Path : : Path ( LowPathType type , u32 size , u32 pointer ) : type ( type ) { <nl> + switch ( type ) { <nl> + case Binary : <nl> + { <nl> + u8 * data = Memory : : GetPointer ( pointer ) ; <nl> + binary = std : : vector < u8 > ( data , data + size ) ; <nl> + break ; <nl> + } <nl> + <nl> + case Char : <nl> + { <nl> + const char * data = reinterpret_cast < const char * > ( Memory : : GetPointer ( pointer ) ) ; <nl> + string = std : : string ( data , size - 1 ) ; / / Data is always null - terminated . <nl> + break ; <nl> + } <nl> + <nl> + case Wchar : <nl> + { <nl> + const char16_t * data = reinterpret_cast < const char16_t * > ( Memory : : GetPointer ( pointer ) ) ; <nl> + u16str = std : : u16string ( data , size / 2 - 1 ) ; / / Data is always null - terminated . <nl> + break ; <nl> + } <nl> + <nl> + default : <nl> + break ; <nl> + } <nl> + } <nl> + <nl> + const std : : string Path : : DebugStr ( ) const { <nl> + switch ( GetType ( ) ) { <nl> + case Invalid : <nl> + default : <nl> + return " [ Invalid ] " ; <nl> + case Empty : <nl> + return " [ Empty ] " ; <nl> + case Binary : <nl> + { <nl> + std : : stringstream res ; <nl> + res < < " [ Binary : " ; <nl> + for ( unsigned byte : binary ) <nl> + res < < std : : hex < < std : : setw ( 2 ) < < std : : setfill ( ' 0 ' ) < < byte ; <nl> + res < < ' ] ' ; <nl> + return res . str ( ) ; <nl> + } <nl> + case Char : <nl> + return " [ Char : " + AsString ( ) + ' ] ' ; <nl> + case Wchar : <nl> + return " [ Wchar : " + AsString ( ) + ' ] ' ; <nl> + } <nl> + } <nl> + <nl> + const std : : string Path : : AsString ( ) const { <nl> + switch ( GetType ( ) ) { <nl> + case Char : <nl> + return string ; <nl> + case Wchar : <nl> + return Common : : UTF16ToUTF8 ( u16str ) ; <nl> + case Empty : <nl> + return { } ; <nl> + case Invalid : <nl> + case Binary : <nl> + default : <nl> + / / TODO ( yuriks ) : Add assert <nl> + LOG_ERROR ( Service_FS , " LowPathType cannot be converted to string ! " ) ; <nl> + return { } ; <nl> + } <nl> + } <nl> + <nl> + const std : : u16string Path : : AsU16Str ( ) const { <nl> + switch ( GetType ( ) ) { <nl> + case Char : <nl> + return Common : : UTF8ToUTF16 ( string ) ; <nl> + case Wchar : <nl> + return u16str ; <nl> + case Empty : <nl> + return { } ; <nl> + case Invalid : <nl> + case Binary : <nl> + / / TODO ( yuriks ) : Add assert <nl> + LOG_ERROR ( Service_FS , " LowPathType cannot be converted to u16string ! " ) ; <nl> + return { } ; <nl> + } <nl> + } <nl> + <nl> + const std : : vector < u8 > Path : : AsBinary ( ) const { <nl> + switch ( GetType ( ) ) { <nl> + case Binary : <nl> + return binary ; <nl> + case Char : <nl> + return std : : vector < u8 > ( string . begin ( ) , string . end ( ) ) ; <nl> + case Wchar : <nl> + { <nl> + / / use two u8 for each character of u16str <nl> + std : : vector < u8 > to_return ( u16str . size ( ) * 2 ) ; <nl> + for ( size_t i = 0 ; i < u16str . size ( ) ; + + i ) { <nl> + u16 tmp_char = u16str . at ( i ) ; <nl> + to_return [ i * 2 ] = ( tmp_char & 0xFF00 ) > > 8 ; <nl> + to_return [ i * 2 + 1 ] = ( tmp_char & 0x00FF ) ; <nl> + } <nl> + return to_return ; <nl> + } <nl> + case Empty : <nl> + return { } ; <nl> + case Invalid : <nl> + default : <nl> + / / TODO ( yuriks ) : Add assert <nl> + LOG_ERROR ( Service_FS , " LowPathType cannot be converted to binary ! " ) ; <nl> + return { } ; <nl> + } <nl> + } <nl> + <nl> + } <nl> mmm a / src / core / file_sys / archive_backend . h <nl> ppp b / src / core / file_sys / archive_backend . h <nl> <nl> # pragma once <nl> <nl> # include < memory > <nl> - # include < sstream > <nl> # include < string > <nl> # include < utility > <nl> # include < vector > <nl> <nl> # include " common / bit_field . h " <nl> # include " common / common_types . h " <nl> - # include " common / logging / log . h " <nl> - # include " common / string_util . h " <nl> <nl> # include " core / hle / result . h " <nl> - # include " core / mem_map . h " <nl> <nl> <nl> namespace FileSys { <nl> union Mode { <nl> <nl> class Path { <nl> public : <nl> + Path ( ) : type ( Invalid ) { } <nl> + Path ( const char * path ) : type ( Char ) , string ( path ) { } <nl> + Path ( std : : vector < u8 > binary_data ) : type ( Binary ) , binary ( std : : move ( binary_data ) ) { } <nl> + Path ( LowPathType type , u32 size , u32 pointer ) ; <nl> <nl> - Path ( ) : type ( Invalid ) { <nl> - } <nl> - <nl> - Path ( const char * path ) : type ( Char ) , string ( path ) { <nl> - } <nl> - <nl> - Path ( std : : vector < u8 > binary_data ) : type ( Binary ) , binary ( std : : move ( binary_data ) ) { <nl> - } <nl> - <nl> - Path ( LowPathType type , u32 size , u32 pointer ) : type ( type ) { <nl> - switch ( type ) { <nl> - case Binary : <nl> - { <nl> - u8 * data = Memory : : GetPointer ( pointer ) ; <nl> - binary = std : : vector < u8 > ( data , data + size ) ; <nl> - break ; <nl> - } <nl> - <nl> - case Char : <nl> - { <nl> - const char * data = reinterpret_cast < const char * > ( Memory : : GetPointer ( pointer ) ) ; <nl> - string = std : : string ( data , size - 1 ) ; / / Data is always null - terminated . <nl> - break ; <nl> - } <nl> - <nl> - case Wchar : <nl> - { <nl> - const char16_t * data = reinterpret_cast < const char16_t * > ( Memory : : GetPointer ( pointer ) ) ; <nl> - u16str = std : : u16string ( data , size / 2 - 1 ) ; / / Data is always null - terminated . <nl> - break ; <nl> - } <nl> - <nl> - default : <nl> - break ; <nl> - } <nl> - } <nl> - <nl> - LowPathType GetType ( ) const { <nl> - return type ; <nl> - } <nl> + LowPathType GetType ( ) const { return type ; } <nl> <nl> / * * <nl> * Gets the string representation of the path for debugging <nl> * @ return String representation of the path for debugging <nl> * / <nl> - const std : : string DebugStr ( ) const { <nl> - switch ( GetType ( ) ) { <nl> - case Invalid : <nl> - default : <nl> - return " [ Invalid ] " ; <nl> - case Empty : <nl> - return " [ Empty ] " ; <nl> - case Binary : <nl> - { <nl> - std : : stringstream res ; <nl> - res < < " [ Binary : " ; <nl> - for ( unsigned byte : binary ) <nl> - res < < std : : hex < < std : : setw ( 2 ) < < std : : setfill ( ' 0 ' ) < < byte ; <nl> - res < < ' ] ' ; <nl> - return res . str ( ) ; <nl> - } <nl> - case Char : <nl> - return " [ Char : " + AsString ( ) + ' ] ' ; <nl> - case Wchar : <nl> - return " [ Wchar : " + AsString ( ) + ' ] ' ; <nl> - } <nl> - } <nl> + const std : : string DebugStr ( ) const ; <nl> <nl> - const std : : string AsString ( ) const { <nl> - switch ( GetType ( ) ) { <nl> - case Char : <nl> - return string ; <nl> - case Wchar : <nl> - return Common : : UTF16ToUTF8 ( u16str ) ; <nl> - case Empty : <nl> - return { } ; <nl> - case Invalid : <nl> - case Binary : <nl> - default : <nl> - / / TODO ( yuriks ) : Add assert <nl> - LOG_ERROR ( Service_FS , " LowPathType cannot be converted to string ! " ) ; <nl> - return { } ; <nl> - } <nl> - } <nl> - <nl> - const std : : u16string AsU16Str ( ) const { <nl> - switch ( GetType ( ) ) { <nl> - case Char : <nl> - return Common : : UTF8ToUTF16 ( string ) ; <nl> - case Wchar : <nl> - return u16str ; <nl> - case Empty : <nl> - return { } ; <nl> - case Invalid : <nl> - case Binary : <nl> - / / TODO ( yuriks ) : Add assert <nl> - LOG_ERROR ( Service_FS , " LowPathType cannot be converted to u16string ! " ) ; <nl> - return { } ; <nl> - } <nl> - } <nl> - <nl> - const std : : vector < u8 > AsBinary ( ) const { <nl> - switch ( GetType ( ) ) { <nl> - case Binary : <nl> - return binary ; <nl> - case Char : <nl> - return std : : vector < u8 > ( string . begin ( ) , string . end ( ) ) ; <nl> - case Wchar : <nl> - { <nl> - / / use two u8 for each character of u16str <nl> - std : : vector < u8 > to_return ( u16str . size ( ) * 2 ) ; <nl> - for ( size_t i = 0 ; i < u16str . size ( ) ; + + i ) { <nl> - u16 tmp_char = u16str . at ( i ) ; <nl> - to_return [ i * 2 ] = ( tmp_char & 0xFF00 ) > > 8 ; <nl> - to_return [ i * 2 + 1 ] = ( tmp_char & 0x00FF ) ; <nl> - } <nl> - return to_return ; <nl> - } <nl> - case Empty : <nl> - return { } ; <nl> - case Invalid : <nl> - default : <nl> - / / TODO ( yuriks ) : Add assert <nl> - LOG_ERROR ( Service_FS , " LowPathType cannot be converted to binary ! " ) ; <nl> - return { } ; <nl> - } <nl> - } <nl> + const std : : string AsString ( ) const ; <nl> + const std : : u16string AsU16Str ( ) const ; <nl> + const std : : vector < u8 > AsBinary ( ) const ; <nl> <nl> private : <nl> LowPathType type ; <nl> mmm a / src / core / hle / service / cfg / cfg . cpp <nl> ppp b / src / core / hle / service / cfg / cfg . cpp <nl> <nl> <nl> # include < algorithm > <nl> <nl> + # include " common / string_util . h " <nl> + <nl> # include " core / file_sys / file_backend . h " <nl> # include " core / hle / service / cfg / cfg . h " <nl> # include " core / hle / service / cfg / cfg_i . h " <nl>
FileSys : De - inline Path members
yuzu-emu/yuzu
b89f644cfef7592a501f1c0b9aae0c4ae757d854
2015-05-07T02:45:06Z
mmm a / tensorflow / core / nccl / BUILD <nl> ppp b / tensorflow / core / nccl / BUILD <nl> tf_cuda_cc_test ( <nl> srcs = [ " nccl_manager_test . cc " ] , <nl> tags = tf_cuda_tests_tags ( ) + [ <nl> " no_cuda_on_cpu_tap " , # TODO ( b / 120284216 ) : re - enable multi_gpu <nl> + " no_rocm " , <nl> ] , <nl> deps = [ <nl> " / / tensorflow / core : test " , <nl>
Adding ' no_rocm ' tag to the ' / / tensorflow / core / nccl : nccl_manager_test ' . A recent change broke this test for the ROCm platform . We are looking into fixing this test , but need to disable this test in the meantime because this test gets run as part of the ROCm Community Supported Build
tensorflow/tensorflow
189a16d0760dad6ba860d9bcf91db26e44a8f643
2019-08-29T19:45:39Z
mmm a / xbmc / network / AirPlayServer . cpp <nl> ppp b / xbmc / network / AirPlayServer . cpp <nl> <nl> # include < arpa / inet . h > <nl> # include " DllLibPlist . h " <nl> # include " utils / log . h " <nl> + # include " utils / URIUtils . h " <nl> # include " utils / StringUtils . h " <nl> # include " threads / SingleLock . h " <nl> # include " filesystem / File . h " <nl> int CAirPlayServer : : CTCPClient : : ProcessRequest ( CStdString & responseHeader , <nl> <nl> if ( status ! = AIRPLAY_STATUS_NEED_AUTH ) <nl> { <nl> - CFileItem fileToPlay ( location + " | User - Agent = AppleCoreMedia / 1 . 0 . 0 . 8F455 ( Apple † TV ; U ; CPU OS 4_3 like Mac OS X ; de_de ) " , false ) ; <nl> + CStdString userAgent = " AppleCoreMedia / 1 . 0 . 0 . 8F455 ( AppleTV ; U ; CPU OS 4_3 like Mac OS X ; de_de ) " ; <nl> + CURL : : Encode ( userAgent ) ; <nl> + location + = " | User - Agent = " + userAgent ; <nl> + <nl> + CFileItem fileToPlay ( location , false ) ; <nl> fileToPlay . SetProperty ( " StartPercent " , position * 100 . 0f ) ; <nl> g_application . getApplicationMessenger ( ) . MediaPlay ( fileToPlay ) ; <nl> ComposeReverseEvent ( reverseHeader , reverseBody , sessionId , EVENT_PLAYING ) ; <nl>
[ AirPlay ] - urlencode the user - agent used when streaming airplay stuff ( fixes issues with live transcoded AirVideoServer content )
xbmc/xbmc
470973d0e0a1ebb2e743f00c2c5330d44c6cfac3
2012-02-29T17:31:53Z
mmm a / tests / test . cpp <nl> ppp b / tests / test . cpp <nl> void NullableScalarsTest ( ) { <nl> <nl> / / Test if nullable scalars are allowed for each language . <nl> const int kNumLanguages = 17 ; <nl> - const auto supported = ( flatbuffers : : IDLOptions : : kRust | flatbuffers : : IDLOptions : : kSwift ) ; <nl> + const auto supported = ( flatbuffers : : IDLOptions : : kRust | <nl> + flatbuffers : : IDLOptions : : kSwift | <nl> + flatbuffers : : IDLOptions : : kLobster ) ; <nl> for ( int lang = 0 ; lang < kNumLanguages ; lang + + ) { <nl> flatbuffers : : IDLOptions opts ; <nl> opts . lang_to_generate | = 1 < < lang ; <nl>
[ Lobster ] missed a test
google/flatbuffers
f3003e08d02ca4810924a53ca463a8b3150926d8
2020-08-18T21:29:44Z
mmm a / tensorflow / compiler / mlir / tensorflow / ir / tf_generated_ops . td <nl> ppp b / tensorflow / compiler / mlir / tensorflow / ir / tf_generated_ops . td <nl> If ` x ` and ` y ` are reals , this will return the floating - point division . <nl> TF_DerivedOperandTypeAttr T = TF_DerivedOperandTypeAttr < 0 > ; <nl> <nl> let hasCanonicalizer = 1 ; <nl> + <nl> + let hasFolder = 1 ; <nl> } <nl> <nl> def TF_ReciprocalOp : TF_Op < " Reciprocal " , [ NoSideEffect , SameOperandsAndResultType ] > { <nl> mmm a / tensorflow / compiler / mlir / tensorflow / ir / tf_ops . cc <nl> ppp b / tensorflow / compiler / mlir / tensorflow / ir / tf_ops . cc <nl> static inline bool HasRankAtMost ( Value value , int64_t rank ) { <nl> return ! type | | type . getRank ( ) < = rank ; <nl> } <nl> <nl> - <nl> static bool IsUnknownDimOrRank ( int64_t dim_or_rank ) { <nl> return dim_or_rank = = - 1 ; <nl> } <nl> LogicalResult FoldOperandsPermutation ( <nl> namespace { <nl> / / Folder that returns LHS of an Arithmetic Op if the RHS is a constant <nl> / / known to be Identity ( e . g X + 0 ) <nl> - template < typename OpT , <nl> - typename std : : enable_if < llvm : : is_one_of < <nl> - OpT , AddV2Op , SubOp , MulOp , DivOp > : : value > : : type * = nullptr > <nl> + template < <nl> + typename OpT , <nl> + typename std : : enable_if < llvm : : is_one_of < <nl> + OpT , AddV2Op , SubOp , MulOp , DivOp , RealDivOp > : : value > : : type * = nullptr > <nl> OpFoldResult IdentityArithmeticOpFolder ( OpT arithmetic_op , <nl> ArrayRef < Attribute > operands ) { <nl> auto result_op_type = arithmetic_op . getResult ( ) . getType ( ) ; <nl> OpFoldResult IdentityArithmeticOpFolder ( OpT arithmetic_op , <nl> / / Mul and Div ops have identity value one while AddV2 and SubOp have identity <nl> / / value zero . <nl> int identity = <nl> - ( std : : is_same < OpT , MulOp > : : value | | std : : is_same < OpT , DivOp > : : value ) ; <nl> + ( std : : is_same < OpT , MulOp > : : value | | std : : is_same < OpT , DivOp > : : value | | <nl> + std : : is_same < OpT , RealDivOp > : : value ) ; <nl> <nl> Type element_ty = lhs_type . getElementType ( ) ; <nl> Attribute identity_attr ; <nl> void RealDivOp : : getCanonicalizationPatterns ( OwningRewritePatternList & results , <nl> results . insert < RealDivWithSqrtDivisor > ( context ) ; <nl> } <nl> <nl> + OpFoldResult RealDivOp : : fold ( ArrayRef < Attribute > operands ) { <nl> + return IdentityArithmeticOpFolder < RealDivOp > ( * this , operands ) ; <nl> + } <nl> + <nl> / / = = = mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - = = = / / <nl> / / ReshapeOp <nl> / / = = = mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - = = = / / <nl> mmm a / tensorflow / compiler / mlir / tensorflow / tests / constant - fold . mlir <nl> ppp b / tensorflow / compiler / mlir / tensorflow / tests / constant - fold . mlir <nl> func @ RemoveTrivialDiv ( % arg0 : tensor < 2x2xf32 > , % arg1 : tensor < 2x2xf32 > ) - > tensor <nl> / / CHECK - NEXT : return % [ [ RESULT ] ] : tensor < 2x2xf32 > <nl> } <nl> <nl> + func @ RemoveTrivialRealDiv ( % arg0 : tensor < 2x2xf32 > , % arg1 : tensor < 2x2xf32 > ) - > tensor < 2x2xf32 > { <nl> + % cst = constant dense < 1 . 0 > : tensor < 2x2xf32 > <nl> + % 0 = " tf . RealDiv " ( % arg0 , % cst ) : ( tensor < 2x2xf32 > , tensor < 2x2xf32 > ) - > tensor < 2x2xf32 > <nl> + return % 0 : tensor < 2x2xf32 > <nl> + <nl> + / / CHECK - LABEL : RemoveTrivialRealDiv <nl> + / / CHECK - NEXT : return % arg0 : tensor < 2x2xf32 > <nl> + } <nl> + <nl> func @ RemoveTrivialDivBf16RHS ( % arg0 : tensor < 2x2xbf16 > ) - > tensor < 2x2xbf16 > { <nl> % cst = constant dense < 1 . 0 > : tensor < 2x2xbf16 > <nl> % 0 = " tf . Div " ( % arg0 , % cst ) : ( tensor < 2x2xbf16 > , tensor < 2x2xbf16 > ) - > tensor < 2x2xbf16 > <nl>
Optimize trivial RealDiv ops
tensorflow/tensorflow
d70dc548b58c56a6a510b8d676cbc08ffdad3189
2020-05-16T10:54:33Z
mmm a / dbms / tests / queries / 0_stateless / 00719_format_datetime_rand . sql <nl> ppp b / dbms / tests / queries / 0_stateless / 00719_format_datetime_rand . sql <nl> <nl> - WITH toDateTime ( rand ( ) ) AS t SELECT count ( ) FROM numbers ( 1000000 ) WHERE formatDateTime ( t , ' % F % T ' ) ! = toString ( t ) ; <nl> - WITH toDateTime ( rand ( ) ) AS t SELECT count ( ) FROM numbers ( 1000000 ) WHERE formatDateTime ( t , ' % Y - % m - % d % H : % M : % S ' ) ! = toString ( t ) ; <nl> - WITH toDateTime ( rand ( ) ) AS t SELECT count ( ) FROM numbers ( 1000000 ) WHERE formatDateTime ( t , ' % Y - % m - % d % R : % S ' ) ! = toString ( t ) ; <nl> - WITH toDateTime ( rand ( ) ) AS t SELECT count ( ) FROM numbers ( 1000000 ) WHERE formatDateTime ( t , ' % F % R : % S ' ) ! = toString ( t ) ; <nl> + - - We add 1 , because function toString has special behaviour for zero datetime <nl> + WITH toDateTime ( 1 + rand ( ) % 0xFFFFFFFF ) AS t SELECT count ( ) FROM numbers ( 1000000 ) WHERE formatDateTime ( t , ' % F % T ' ) ! = toString ( t ) ; <nl> + WITH toDateTime ( 1 + rand ( ) % 0xFFFFFFFF ) AS t SELECT count ( ) FROM numbers ( 1000000 ) WHERE formatDateTime ( t , ' % Y - % m - % d % H : % M : % S ' ) ! = toString ( t ) ; <nl> + WITH toDateTime ( 1 + rand ( ) % 0xFFFFFFFF ) AS t SELECT count ( ) FROM numbers ( 1000000 ) WHERE formatDateTime ( t , ' % Y - % m - % d % R : % S ' ) ! = toString ( t ) ; <nl> + WITH toDateTime ( 1 + rand ( ) % 0xFFFFFFFF ) AS t SELECT count ( ) FROM numbers ( 1000000 ) WHERE formatDateTime ( t , ' % F % R : % S ' ) ! = toString ( t ) ; <nl> <nl> WITH toDate ( today ( ) + rand ( ) % 4096 ) AS t SELECT count ( ) FROM numbers ( 1000000 ) WHERE formatDateTime ( t , ' % F ' ) ! = toString ( t ) ; <nl> WITH toDate ( today ( ) + rand ( ) % 4096 ) AS t SELECT count ( ) FROM numbers ( 1000000 ) WHERE formatDateTime ( t , ' % F % T ' ) ! = toString ( toDateTime ( t ) ) ; <nl>
Fixed test [ # CLICKHOUSE - 4191 ]
ClickHouse/ClickHouse
aec8c149dce563eddfa08ab6a48c6e871c4e0445
2018-12-10T15:59:56Z
mmm a / include / spdlog / details / async_log_helper . h <nl> ppp b / include / spdlog / details / async_log_helper . h <nl> inline bool spdlog : : details : : async_log_helper : : process_next_msg ( log_clock : : time_ <nl> } <nl> } <nl> <nl> + / / flush all sinks if _flush_interval_ms has expired <nl> inline void spdlog : : details : : async_log_helper : : handle_flush_interval ( log_clock : : time_point & now , log_clock : : time_point & last_flush ) <nl> { <nl> auto should_flush = _flush_requested | | ( _flush_interval_ms ! = std : : chrono : : milliseconds : : zero ( ) & & now - last_flush > = _flush_interval_ms ) ; <nl> inline void spdlog : : details : : async_log_helper : : handle_flush_interval ( log_clock : : <nl> _flush_requested = false ; <nl> } <nl> } <nl> + <nl> inline void spdlog : : details : : async_log_helper : : set_formatter ( formatter_ptr msg_formatter ) <nl> { <nl> _formatter = msg_formatter ; <nl> } <nl> <nl> <nl> - / / sleep , yield or return immediatly using the time passed since last message as a hint <nl> + / / spin , yield or sleep . use the time passed since last message as a hint <nl> inline void spdlog : : details : : async_log_helper : : sleep_or_yield ( const spdlog : : log_clock : : time_point & now , const spdlog : : log_clock : : time_point & last_op_time ) <nl> { <nl> - using std : : chrono : : milliseconds ; <nl> using namespace std : : this_thread ; <nl> - <nl> + using std : : chrono : : milliseconds ; <nl> + using std : : chrono : : microseconds ; <nl> + <nl> auto time_since_op = now - last_op_time ; <nl> - <nl> - / / spin upto 1 ms <nl> - if ( time_since_op < = milliseconds ( 1 ) ) <nl> + <nl> + / / spin upto 50 micros <nl> + if ( time_since_op < = microseconds ( 50 ) ) <nl> return ; <nl> - <nl> - / / yield upto 10ms <nl> - if ( time_since_op < = milliseconds ( 10 ) ) <nl> + <nl> + / / yield upto 150 micros <nl> + if ( time_since_op < = microseconds ( 100 ) ) <nl> return yield ( ) ; <nl> <nl> <nl> - / / sleep for half of duration since last op <nl> - if ( time_since_op < = milliseconds ( 100 ) ) <nl> - return sleep_for ( time_since_op / 2 ) ; <nl> + / / sleep for 20 ms upto 200 ms <nl> + if ( time_since_op < = milliseconds ( 200 ) ) <nl> + return sleep_for ( milliseconds ( 20 ) ) ; <nl> <nl> - return sleep_for ( milliseconds ( 100 ) ) ; <nl> + / / sleep for 200 ms <nl> + return sleep_for ( milliseconds ( 200 ) ) ; <nl> } <nl> <nl> / / throw if the worker thread threw an exception or not active <nl>
reduced spinning duation in async_log_helper
gabime/spdlog
ea611f2d792b3c492451c8965eda183501cfb175
2016-05-13T15:01:49Z
mmm a / dbms / src / Functions / FunctionsConversion . h <nl> ppp b / dbms / src / Functions / FunctionsConversion . h <nl> class FunctionBuilderCast : public FunctionBuilderImpl <nl> <nl> FunctionBuilderCast ( const Context & context ) : context ( context ) { } <nl> <nl> - String getName ( ) const { return name ; } <nl> + String getName ( ) const override { return name ; } <nl> <nl> size_t getNumberOfArguments ( ) const override { return 2 ; } <nl> <nl> mmm a / dbms / src / Functions / FunctionsTransform . h <nl> ppp b / dbms / src / Functions / FunctionsTransform . h <nl> class FunctionTransform : public IFunction <nl> if ( ! array_from | | ! array_to ) <nl> throw Exception { " Second and third arguments of function " + getName ( ) + " must be constant arrays . " , ErrorCodes : : ILLEGAL_COLUMN } ; <nl> <nl> - prepare ( array_from - > getValue < Array > ( ) , array_to - > getValue < Array > ( ) , block , arguments ) ; <nl> + initialize ( array_from - > getValue < Array > ( ) , array_to - > getValue < Array > ( ) , block , arguments ) ; <nl> <nl> const auto in = block . getByPosition ( arguments . front ( ) ) . column . get ( ) ; <nl> <nl> class FunctionTransform : public IFunction <nl> <nl> Field const_default_value ; / / / Null , if not specified . <nl> <nl> - bool prepared = false ; <nl> + bool initialized = false ; <nl> std : : mutex mutex ; <nl> <nl> / / / Can be called from different threads . It works only on the first call . <nl> - void prepare ( const Array & from , const Array & to , Block & block , const ColumnNumbers & arguments ) <nl> + void initialize ( const Array & from , const Array & to , Block & block , const ColumnNumbers & arguments ) <nl> { <nl> - if ( prepared ) <nl> + if ( initialized ) <nl> return ; <nl> <nl> const size_t size = from . size ( ) ; <nl> class FunctionTransform : public IFunction <nl> <nl> std : : lock_guard < std : : mutex > lock ( mutex ) ; <nl> <nl> - if ( prepared ) <nl> + if ( initialized ) <nl> return ; <nl> <nl> if ( from . size ( ) ! = to . size ( ) ) <nl> class FunctionTransform : public IFunction <nl> } <nl> } <nl> <nl> - prepared = true ; <nl> + initialized = true ; <nl> } <nl> } ; <nl> <nl>
Fixed build
ClickHouse/ClickHouse
c704f8b10c0b771a07ac9a8f7c0e608c5a1fee90
2018-02-09T19:32:12Z
mmm a / doc / tutorials / introduction / crosscompilation / arm_crosscompile_with_cmake . rst <nl> ppp b / doc / tutorials / introduction / crosscompilation / arm_crosscompile_with_cmake . rst <nl> Building OpenCV <nl> Enable hardware optimizations <nl> mmmmmmmmmmmmmmmmmmmmmmmmmmm - - <nl> <nl> - Depending on target platfrom architecture different instruction sets can be used . By default <nl> + Depending on target platform architecture different instruction sets can be used . By default <nl> compiler generates code for armv5l without VFPv3 and NEON extensions . Add ` ` - DUSE_VFPV3 = ON ` ` <nl> to cmake command line to enable code generation for VFPv3 and ` ` - DUSE_NEON = ON ` ` for using <nl> NEON SIMD extensions . <nl> mmm a / platforms / android / service / engine / jni / NativeService / PackageInfo . cpp <nl> ppp b / platforms / android / service / engine / jni / NativeService / PackageInfo . cpp <nl> inline string JoinPlatform ( int platform ) <nl> return result ; <nl> } <nl> <nl> - inline int SplitPlatfrom ( const vector < string > & features ) <nl> + inline int SplitPlatform ( const vector < string > & features ) <nl> { <nl> int result = 0 ; <nl> <nl> InstallPath ( install_path ) <nl> return ; <nl> } <nl> <nl> - Platform = SplitPlatfrom ( features ) ; <nl> + Platform = SplitPlatform ( features ) ; <nl> if ( PLATFORM_UNKNOWN ! = Platform ) <nl> { <nl> switch ( Platform ) <nl> mmm a / platforms / android / service / engine / jni / Tests / HardwareDetectionTest . cpp <nl> ppp b / platforms / android / service / engine / jni / Tests / HardwareDetectionTest . cpp <nl> TEST ( CpuID , CheckVFPv3 ) <nl> EXPECT_TRUE ( cpu_id & FEATURES_HAS_VFPv3 ) ; <nl> } <nl> <nl> - TEST ( PlatfromDetector , CheckTegra ) <nl> + TEST ( PlatformDetector , CheckTegra ) <nl> { <nl> EXPECT_NE ( PLATFORM_UNKNOWN , DetectKnownPlatforms ( ) ) ; <nl> } <nl> mmm a / platforms / android / service / engine / src / org / opencv / engine / manager / ManagerActivity . java <nl> ppp b / platforms / android / service / engine / src / org / opencv / engine / manager / ManagerActivity . java <nl> public void onClick ( DialogInterface dialog , int which ) { <nl> mInstalledPackageView . setAdapter ( mInstalledPacksAdapter ) ; <nl> <nl> TextView HardwarePlatformView = ( TextView ) findViewById ( R . id . HardwareValue ) ; <nl> - int Platfrom = HardwareDetector . DetectKnownPlatforms ( ) ; <nl> + int Platform = HardwareDetector . DetectKnownPlatforms ( ) ; <nl> int CpuId = HardwareDetector . GetCpuID ( ) ; <nl> <nl> - if ( HardwareDetector . PLATFORM_UNKNOWN ! = Platfrom ) <nl> + if ( HardwareDetector . PLATFORM_UNKNOWN ! = Platform ) <nl> { <nl> - if ( HardwareDetector . PLATFORM_TEGRA = = Platfrom ) <nl> + if ( HardwareDetector . PLATFORM_TEGRA = = Platform ) <nl> { <nl> HardwarePlatformView . setText ( " Tegra " ) ; <nl> } <nl> - else if ( HardwareDetector . PLATFORM_TEGRA2 = = Platfrom ) <nl> + else if ( HardwareDetector . PLATFORM_TEGRA2 = = Platform ) <nl> { <nl> HardwarePlatformView . setText ( " Tegra 2 " ) ; <nl> } <nl> - else if ( HardwareDetector . PLATFORM_TEGRA3 = = Platfrom ) <nl> + else if ( HardwareDetector . PLATFORM_TEGRA3 = = Platform ) <nl> { <nl> HardwarePlatformView . setText ( " Tegra 3 " ) ; <nl> } <nl> - else if ( HardwareDetector . PLATFORM_TEGRA4i = = Platfrom ) <nl> + else if ( HardwareDetector . PLATFORM_TEGRA4i = = Platform ) <nl> { <nl> HardwarePlatformView . setText ( " Tegra 4i " ) ; <nl> } <nl> - else if ( HardwareDetector . PLATFORM_TEGRA4 = = Platfrom ) <nl> + else if ( HardwareDetector . PLATFORM_TEGRA4 = = Platform ) <nl> { <nl> HardwarePlatformView . setText ( " Tegra 4 " ) ; <nl> } <nl>
Fixed the " platfrom " typo everywhere .
opencv/opencv
2b3105591446cabbf13a1bc7ac26b1f6ca397af9
2013-12-03T13:33:28Z
mmm a / include / swift / AST / DiagnosticsParse . def <nl> ppp b / include / swift / AST / DiagnosticsParse . def <nl> WARNING ( lex_nul_character , lexing , none , <nl> WARNING ( lex_missing_newline_eof , lexing , none , <nl> " missing newline at end of file " , ( ) ) <nl> ERROR ( lex_utf16_bom_marker , lexing , none , <nl> - " UTF - 16 BOM marker is not valid UTF - 8 " , ( ) ) <nl> + " input files must be encoded as UTF - 8 instead of UTF - 16 " , ( ) ) <nl> <nl> ERROR ( lex_hashbang_not_allowed , lexing , none , <nl> " hashbang line is allowed only in the main file " , ( ) ) <nl> ERROR ( lex_hashbang_not_allowed , lexing , none , <nl> ERROR ( lex_unprintable_ascii_character , lexing , none , <nl> " unprintable ASCII character found in source file " , ( ) ) <nl> ERROR ( lex_invalid_utf8_character , lexing , none , <nl> - " input files must be encoded as UTF - 8 instead of UTF - 16 " , ( ) ) <nl> + " invalid UTF8 - encoded character found in source file " , ( ) ) <nl> <nl> ERROR ( lex_unterminated_block_comment , lexing , none , <nl> " unterminated ' / * ' comment " , ( ) ) <nl>
Fix r12422 ; change the text of the diagnostic about UTF - 16 , not bad UTF - 8 .
apple/swift
923a5392fe9b97c285daaa9f19a86c5bc6cfee40
2014-01-17T00:50:11Z
mmm a / src / mips64 / code - stubs - mips64 . cc <nl> ppp b / src / mips64 / code - stubs - mips64 . cc <nl> namespace internal { <nl> void ArrayNArgumentsConstructorStub : : Generate ( MacroAssembler * masm ) { <nl> __ dsll ( t9 , a0 , kPointerSizeLog2 ) ; <nl> __ Daddu ( t9 , sp , t9 ) ; <nl> - __ sw ( a1 , MemOperand ( t9 , 0 ) ) ; <nl> + __ sd ( a1 , MemOperand ( t9 , 0 ) ) ; <nl> __ Push ( a1 ) ; <nl> __ Push ( a2 ) ; <nl> __ Daddu ( a0 , a0 , 3 ) ; <nl>
MIPS64 : Fix ' [ stubs ] Remove N - argument Hydrogen - based Array constructor stub . '
v8/v8
bf51705eca0509afe435248eaf8316a15886b0b3
2016-06-14T09:16:25Z
mmm a / tensorflow / core / ops / tpu_replication_ops . cc <nl> ppp b / tensorflow / core / ops / tpu_replication_ops . cc <nl> REGISTER_OP ( " _TPUReplicate " ) <nl> . Attr ( " padding_map : list ( string ) = [ ] " ) <nl> . Attr ( " step_marker_location : string = \ " STEP_MARK_AT_ENTRY \ " " ) <nl> . Attr ( " allow_soft_placement : bool = false " ) <nl> + . Attr ( " num_distributed_variables : int = 0 " ) <nl> . Input ( " inputs : Tinputs " ) <nl> . Input ( " broadcast_inputs : Tbroadcast_inputs " ) <nl> . Input ( " variables : NumVariables * resource " ) <nl>
Add attribute " num_distributed_variables " to _TPUReplicate op , in order to distinguish per - replica inputs and distributed variable inputs .
tensorflow/tensorflow
cacf5948c106f7a37b04a6f57f0904cf0f3576cd
2020-03-05T21:41:30Z
new file mode 100644 <nl> index 000000000 . . 27d388f97 <nl> mmm / dev / null <nl> ppp b / docs / CNAME <nl> @ @ - 0 , 0 + 1 @ @ <nl> + wkhtmltopdf . org <nl> \ No newline at end of file <nl>
Create CNAME
wkhtmltopdf/wkhtmltopdf
b27dd5d0dc05434892ca0a625056b2000937b6d1
2019-04-11T03:08:14Z
mmm a / lib / libUPnP / Neptune / Source / Core / NptZip . cpp <nl> ppp b / lib / libUPnP / Neptune / Source / Core / NptZip . cpp <nl> NPT_ZipFile : : GetInputStream ( Entry & entry , NPT_InputStreamReference & zip_stream , <nl> return NPT_ERROR_NOT_SUPPORTED ; <nl> } <nl> # else <nl> - if ( entry . m_CompressionMethod ! = NPT_ZIP_COMPRESSION_METHOD_NONE ) { <nl> + if ( entry . m_CompressionMethod ! = NPT_ZIP_FILE_COMPRESSION_METHOD_NONE ) { <nl> return NPT_ERROR_NOT_SUPPORTED ; <nl> } <nl> # endif <nl>
platinum : fix compile error in NptZip . cpp
xbmc/xbmc
9886ea3ec0cbdb198b31c8ece1b9a4100f4204b0
2014-05-17T07:53:06Z
mmm a / src / operator / cudnn_batch_norm . cu <nl> ppp b / src / operator / cudnn_batch_norm . cu <nl> class CuDNNBatchNormOp : public Operator { <nl> out_data [ cudnnbatchnorm : : kMean ] . get_with_shape < gpu , 1 , real_t > ( Shape1 ( shape_ [ 1 ] ) , s ) ; <nl> Tensor < gpu , 1 > save_inv_var = <nl> out_data [ cudnnbatchnorm : : kInvVar ] . get_with_shape < gpu , 1 , real_t > ( Shape1 ( shape_ [ 1 ] ) , s ) ; <nl> - float a = 1 . 0f , b = 0 . 0f ; <nl> + float a = 1 . 0f ; <nl> + float b = 0 . 0f ; <nl> + float b_add = 1 . 0f ; <nl> CHECK_EQ ( s - > dnn_handle_ownership_ , mshadow : : Stream < gpu > : : OwnHandle ) ; <nl> # if CUDNN_VERSION > = 4007 <nl> CHECK_EQ ( cudnnBatchNormalizationBackward ( s - > dnn_handle_ , <nl> class CuDNNBatchNormOp : public Operator { <nl> & a , <nl> & b , <nl> & a , <nl> - & b , <nl> + req [ cudnnbatchnorm : : kGamma ] = = kWriteTo ? & b : & b_add , <nl> io_desc_ , <nl> x . dptr_ , <nl> io_desc_ , <nl> mmm a / src / operator / cudnn_convolution - inl . h <nl> ppp b / src / operator / cudnn_convolution - inl . h <nl> class CuDNNConvolutionOp : public Operator { <nl> size_t expected = param_ . no_bias = = 0 ? 3 : 2 ; <nl> CHECK_EQ ( out_grad . size ( ) , 1 ) ; <nl> CHECK ( in_data . size ( ) = = expected & & in_grad . size ( ) = = expected ) ; <nl> - / / TODO ( bing ) : think about how to support add to <nl> - CHECK_EQ ( req [ conv : : kWeight ] , kWriteTo ) ; <nl> Stream < gpu > * s = ctx . get_stream < gpu > ( ) ; <nl> Tensor < gpu , 4 , DType > grad = out_grad [ conv : : kOut ] . get < gpu , 4 , DType > ( s ) ; <nl> Tensor < gpu , 4 , DType > wmat = in_data [ conv : : kWeight ] . get < gpu , 4 , DType > ( s ) ; <nl> class CuDNNConvolutionOp : public Operator { <nl> for ( uint32_t g = 0 ; g < param_ . num_group ; + + g ) { <nl> typename DataType < DType > : : ScaleType alpha = 1 . 0f ; <nl> typename DataType < DType > : : ScaleType beta = 0 . 0f ; <nl> + typename DataType < DType > : : ScaleType beta_add = 1 . 0f ; <nl> if ( ! param_ . no_bias ) { <nl> Tensor < gpu , 1 , DType > gbias = in_grad [ conv : : kBias ] . get < gpu , 1 , DType > ( s ) ; <nl> CHECK_EQ ( cudnnConvolutionBackwardBias ( s - > dnn_handle_ , <nl> & alpha , <nl> out_desc_ , <nl> grad . dptr_ + out_offset_ * g , <nl> - & beta , <nl> + req [ conv : : kBias ] = = kWriteTo ? & beta : & beta_add , <nl> bias_desc_ , <nl> gbias . dptr_ + bias_offset_ * g ) , <nl> CUDNN_STATUS_SUCCESS ) ; <nl> class CuDNNConvolutionOp : public Operator { <nl> back_algo_w_ , <nl> workspace . dptr_ , <nl> backward_workspace_byte_ , <nl> - & beta , <nl> + req [ conv : : kWeight ] = = kWriteTo ? & beta : & beta_add , <nl> filter_desc_ , <nl> gwmat . dptr_ + weight_offset_ * g ) , CUDNN_STATUS_SUCCESS ) ; <nl> # elif CUDNN_MAJOR = = 5 <nl> class CuDNNConvolutionOp : public Operator { <nl> back_algo_w_ , <nl> workspace . dptr_ , <nl> backward_workspace_byte_ , <nl> - & beta , <nl> + req [ conv : : kWeight ] = = kWriteTo ? & beta : & beta_add , <nl> filter_desc_ , <nl> gwmat . dptr_ + weight_offset_ * g ) , CUDNN_STATUS_SUCCESS ) ; <nl> # endif <nl> mmm a / src / symbol / static_graph . cc <nl> ppp b / src / symbol / static_graph . cc <nl> StaticGraph : : Node StaticGraph : : CreateGradSumNode ( <nl> if ( grad_source . size ( ) < inplace_sum_cap ) { <nl> gsource = grad_source ; <nl> } else { <nl> - LOG ( INFO ) < < " Memory efficient gradient aggregation on . . . " <nl> - < < " to disable , set MXNET_EXEC_INPLACE_GRAD_SUM_CAP to big number " ; <nl> for ( size_t i = 1 ; i < grad_source . size ( ) ; + + i ) { <nl> nodes [ grad_source [ i ] . source_id ] <nl> . addto_index . push_back ( grad_source [ i ] . index ) ; <nl>
AddTo support
apache/incubator-mxnet
6a78b03373afa35622eaec02974543a9dbfff61a
2016-04-18T18:19:58Z
mmm a / drivers / gles3 / rasterizer_scene_gles3 . cpp <nl> ppp b / drivers / gles3 / rasterizer_scene_gles3 . cpp <nl> void RasterizerSceneGLES3 : : _setup_geometry ( RenderList : : Element * e ) { <nl> <nl> RasterizerStorageGLES3 : : Surface * s = static_cast < RasterizerStorageGLES3 : : Surface * > ( e - > geometry ) ; <nl> <nl> - if ( s - > morph_targets . size ( ) & & e - > instance - > morph_values . size ( ) ) { <nl> + if ( s - > blend_shapes . size ( ) & & e - > instance - > blend_values . size ( ) ) { <nl> / / blend shapes , use transform feedback <nl> - storage - > mesh_render_blend_shapes ( s , e - > instance - > morph_values . ptr ( ) ) ; <nl> + storage - > mesh_render_blend_shapes ( s , e - > instance - > blend_values . ptr ( ) ) ; <nl> / / rebind shader <nl> state . scene_shader . bind ( ) ; <nl> } else { <nl> mmm a / drivers / gles3 / rasterizer_storage_gles3 . cpp <nl> ppp b / drivers / gles3 / rasterizer_storage_gles3 . cpp <nl> void RasterizerStorageGLES3 : : mesh_add_surface ( RID p_mesh , uint32_t p_format , VS : : P <nl> } <nl> <nl> <nl> - bool has_morph = p_blend_shapes . size ( ) ; <nl> + / / bool has_morph = p_blend_shapes . size ( ) ; <nl> <nl> Surface : : Attrib attribs [ VS : : ARRAY_MAX ] ; <nl> <nl> void RasterizerStorageGLES3 : : mesh_add_surface ( RID p_mesh , uint32_t p_format , VS : : P <nl> <nl> ERR_FAIL_COND ( p_index_array . size ( ) ! = index_array_size ) ; <nl> <nl> - ERR_FAIL_COND ( p_blend_shapes . size ( ) ! = mesh - > morph_target_count ) ; <nl> + ERR_FAIL_COND ( p_blend_shapes . size ( ) ! = mesh - > blend_shape_count ) ; <nl> <nl> for ( int i = 0 ; i < p_blend_shapes . size ( ) ; i + + ) { <nl> ERR_FAIL_COND ( p_blend_shapes [ i ] . size ( ) ! = array_size ) ; <nl> void RasterizerStorageGLES3 : : mesh_add_surface ( RID p_mesh , uint32_t p_format , VS : : P <nl> <nl> for ( int i = 0 ; i < p_blend_shapes . size ( ) ; i + + ) { <nl> <nl> - Surface : : MorphTarget mt ; <nl> + Surface : : BlendShape mt ; <nl> <nl> PoolVector < uint8_t > : : Read vr = p_blend_shapes [ i ] . read ( ) ; <nl> <nl> void RasterizerStorageGLES3 : : mesh_add_surface ( RID p_mesh , uint32_t p_format , VS : : P <nl> glBindVertexArray ( 0 ) ; <nl> glBindBuffer ( GL_ARRAY_BUFFER , 0 ) ; / / unbind <nl> <nl> - surface - > morph_targets . push_back ( mt ) ; <nl> + surface - > blend_shapes . push_back ( mt ) ; <nl> <nl> } <nl> } <nl> void RasterizerStorageGLES3 : : mesh_add_surface ( RID p_mesh , uint32_t p_format , VS : : P <nl> mesh - > instance_change_notify ( ) ; <nl> } <nl> <nl> - void RasterizerStorageGLES3 : : mesh_set_morph_target_count ( RID p_mesh , int p_amount ) { <nl> + void RasterizerStorageGLES3 : : mesh_set_blend_shape_count ( RID p_mesh , int p_amount ) { <nl> <nl> Mesh * mesh = mesh_owner . getornull ( p_mesh ) ; <nl> ERR_FAIL_COND ( ! mesh ) ; <nl> void RasterizerStorageGLES3 : : mesh_set_morph_target_count ( RID p_mesh , int p_amount <nl> ERR_FAIL_COND ( mesh - > surfaces . size ( ) ! = 0 ) ; <nl> ERR_FAIL_COND ( p_amount < 0 ) ; <nl> <nl> - mesh - > morph_target_count = p_amount ; <nl> + mesh - > blend_shape_count = p_amount ; <nl> <nl> } <nl> - int RasterizerStorageGLES3 : : mesh_get_morph_target_count ( RID p_mesh ) const { <nl> + int RasterizerStorageGLES3 : : mesh_get_blend_shape_count ( RID p_mesh ) const { <nl> <nl> const Mesh * mesh = mesh_owner . getornull ( p_mesh ) ; <nl> ERR_FAIL_COND_V ( ! mesh , 0 ) ; <nl> <nl> - return mesh - > morph_target_count ; <nl> + return mesh - > blend_shape_count ; <nl> } <nl> <nl> <nl> - void RasterizerStorageGLES3 : : mesh_set_morph_target_mode ( RID p_mesh , VS : : MorphTargetMode p_mode ) { <nl> + void RasterizerStorageGLES3 : : mesh_set_blend_shape_mode ( RID p_mesh , VS : : BlendShapeMode p_mode ) { <nl> <nl> Mesh * mesh = mesh_owner . getornull ( p_mesh ) ; <nl> ERR_FAIL_COND ( ! mesh ) ; <nl> <nl> - mesh - > morph_target_mode = p_mode ; <nl> + mesh - > blend_shape_mode = p_mode ; <nl> <nl> } <nl> - VS : : MorphTargetMode RasterizerStorageGLES3 : : mesh_get_morph_target_mode ( RID p_mesh ) const { <nl> + VS : : BlendShapeMode RasterizerStorageGLES3 : : mesh_get_blend_shape_mode ( RID p_mesh ) const { <nl> <nl> const Mesh * mesh = mesh_owner . getornull ( p_mesh ) ; <nl> - ERR_FAIL_COND_V ( ! mesh , VS : : MORPH_MODE_NORMALIZED ) ; <nl> + ERR_FAIL_COND_V ( ! mesh , VS : : BLEND_SHAPE_MODE_NORMALIZED ) ; <nl> <nl> - return mesh - > morph_target_mode ; <nl> + return mesh - > blend_shape_mode ; <nl> } <nl> <nl> void RasterizerStorageGLES3 : : mesh_surface_set_material ( RID p_mesh , int p_surface , RID p_material ) { <nl> Vector < PoolVector < uint8_t > > RasterizerStorageGLES3 : : mesh_surface_get_blend_shap <nl> <nl> Vector < PoolVector < uint8_t > > bsarr ; <nl> <nl> - for ( int i = 0 ; i < mesh - > surfaces [ p_surface ] - > morph_targets . size ( ) ; i + + ) { <nl> + for ( int i = 0 ; i < mesh - > surfaces [ p_surface ] - > blend_shapes . size ( ) ; i + + ) { <nl> <nl> - glBindBuffer ( GL_ELEMENT_ARRAY_BUFFER , mesh - > surfaces [ p_surface ] - > morph_targets [ i ] . vertex_id ) ; <nl> + glBindBuffer ( GL_ELEMENT_ARRAY_BUFFER , mesh - > surfaces [ p_surface ] - > blend_shapes [ i ] . vertex_id ) ; <nl> void * data = glMapBufferRange ( GL_ELEMENT_ARRAY_BUFFER , 0 , mesh - > surfaces [ p_surface ] - > array_byte_size , GL_MAP_READ_BIT ) ; <nl> <nl> ERR_FAIL_COND_V ( ! data , Vector < PoolVector < uint8_t > > ( ) ) ; <nl> void RasterizerStorageGLES3 : : mesh_remove_surface ( RID p_mesh , int p_surface ) { <nl> <nl> glDeleteVertexArrays ( 1 , & surface - > array_id ) ; <nl> <nl> - for ( int i = 0 ; i < surface - > morph_targets . size ( ) ; i + + ) { <nl> + for ( int i = 0 ; i < surface - > blend_shapes . size ( ) ; i + + ) { <nl> <nl> - glDeleteBuffers ( 1 , & surface - > morph_targets [ i ] . vertex_id ) ; <nl> - glDeleteVertexArrays ( 1 , & surface - > morph_targets [ i ] . array_id ) ; <nl> + glDeleteBuffers ( 1 , & surface - > blend_shapes [ i ] . vertex_id ) ; <nl> + glDeleteVertexArrays ( 1 , & surface - > blend_shapes [ i ] . array_id ) ; <nl> } <nl> <nl> mesh - > instance_material_change_notify ( ) ; <nl> void RasterizerStorageGLES3 : : mesh_render_blend_shapes ( Surface * s , float * p_weigh <nl> / / copy all first <nl> float base_weight = 1 . 0 ; <nl> <nl> - int mtc = s - > morph_targets . size ( ) ; <nl> + int mtc = s - > blend_shapes . size ( ) ; <nl> <nl> - if ( s - > mesh - > morph_target_mode = = VS : : MORPH_MODE_NORMALIZED ) { <nl> + if ( s - > mesh - > blend_shape_mode = = VS : : BLEND_SHAPE_MODE_NORMALIZED ) { <nl> <nl> for ( int i = 0 ; i < mtc ; i + + ) { <nl> base_weight - = p_weights [ i ] ; <nl> void RasterizerStorageGLES3 : : mesh_render_blend_shapes ( Surface * s , float * p_weigh <nl> if ( weight < 0 . 001 ) / / not bother with this one <nl> continue ; <nl> <nl> - glBindVertexArray ( s - > morph_targets [ ti ] . array_id ) ; <nl> + glBindVertexArray ( s - > blend_shapes [ ti ] . array_id ) ; <nl> glBindBuffer ( GL_ARRAY_BUFFER , resources . transform_feedback_buffers [ 0 ] ) ; <nl> glBindBufferBase ( GL_TRANSFORM_FEEDBACK_BUFFER , 0 , resources . transform_feedback_buffers [ 1 ] ) ; <nl> <nl> mmm a / drivers / gles3 / rasterizer_storage_gles3 . h <nl> ppp b / drivers / gles3 / rasterizer_storage_gles3 . h <nl> class RasterizerStorageGLES3 : public RasterizerStorage { <nl> <nl> / / bool packed ; <nl> <nl> - struct MorphTarget { <nl> + struct BlendShape { <nl> GLuint vertex_id ; <nl> GLuint array_id ; <nl> } ; <nl> <nl> - Vector < MorphTarget > morph_targets ; <nl> + Vector < BlendShape > blend_shapes ; <nl> <nl> Rect3 aabb ; <nl> <nl> class RasterizerStorageGLES3 : public RasterizerStorage { <nl> <nl> bool active ; <nl> Vector < Surface * > surfaces ; <nl> - int morph_target_count ; <nl> - VS : : MorphTargetMode morph_target_mode ; <nl> + int blend_shape_count ; <nl> + VS : : BlendShapeMode blend_shape_mode ; <nl> Rect3 custom_aabb ; <nl> mutable uint64_t last_pass ; <nl> Mesh ( ) { <nl> - morph_target_mode = VS : : MORPH_MODE_NORMALIZED ; <nl> - morph_target_count = 0 ; <nl> + blend_shape_mode = VS : : BLEND_SHAPE_MODE_NORMALIZED ; <nl> + blend_shape_count = 0 ; <nl> last_pass = 0 ; <nl> active = false ; <nl> } <nl> class RasterizerStorageGLES3 : public RasterizerStorage { <nl> <nl> virtual void mesh_add_surface ( RID p_mesh , uint32_t p_format , VS : : PrimitiveType p_primitive , const PoolVector < uint8_t > & p_array , int p_vertex_count , const PoolVector < uint8_t > & p_index_array , int p_index_count , const Rect3 & p_aabb , const Vector < PoolVector < uint8_t > > & p_blend_shapes = Vector < PoolVector < uint8_t > > ( ) , const Vector < Rect3 > & p_bone_aabbs = Vector < Rect3 > ( ) ) ; <nl> <nl> - virtual void mesh_set_morph_target_count ( RID p_mesh , int p_amount ) ; <nl> - virtual int mesh_get_morph_target_count ( RID p_mesh ) const ; <nl> + virtual void mesh_set_blend_shape_count ( RID p_mesh , int p_amount ) ; <nl> + virtual int mesh_get_blend_shape_count ( RID p_mesh ) const ; <nl> <nl> <nl> - virtual void mesh_set_morph_target_mode ( RID p_mesh , VS : : MorphTargetMode p_mode ) ; <nl> - virtual VS : : MorphTargetMode mesh_get_morph_target_mode ( RID p_mesh ) const ; <nl> + virtual void mesh_set_blend_shape_mode ( RID p_mesh , VS : : BlendShapeMode p_mode ) ; <nl> + virtual VS : : BlendShapeMode mesh_get_blend_shape_mode ( RID p_mesh ) const ; <nl> <nl> virtual void mesh_surface_set_material ( RID p_mesh , int p_surface , RID p_material ) ; <nl> virtual RID mesh_surface_get_material ( RID p_mesh , int p_surface ) const ; <nl> mmm a / scene / 3d / mesh_instance . cpp <nl> ppp b / scene / 3d / mesh_instance . cpp <nl> bool MeshInstance : : _set ( const StringName & p_name , const Variant & p_value ) { <nl> return false ; <nl> <nl> <nl> - Map < StringName , MorphTrack > : : Element * E = morph_tracks . find ( p_name ) ; <nl> + Map < StringName , BlendShapeTrack > : : Element * E = blend_shape_tracks . find ( p_name ) ; <nl> if ( E ) { <nl> E - > get ( ) . value = p_value ; <nl> - VisualServer : : get_singleton ( ) - > instance_set_morph_target_weight ( get_instance ( ) , E - > get ( ) . idx , E - > get ( ) . value ) ; <nl> + VisualServer : : get_singleton ( ) - > instance_set_blend_shape_weight ( get_instance ( ) , E - > get ( ) . idx , E - > get ( ) . value ) ; <nl> return true ; <nl> } <nl> <nl> bool MeshInstance : : _get ( const StringName & p_name , Variant & r_ret ) const { <nl> if ( ! get_instance ( ) . is_valid ( ) ) <nl> return false ; <nl> <nl> - const Map < StringName , MorphTrack > : : Element * E = morph_tracks . find ( p_name ) ; <nl> + const Map < StringName , BlendShapeTrack > : : Element * E = blend_shape_tracks . find ( p_name ) ; <nl> if ( E ) { <nl> r_ret = E - > get ( ) . value ; <nl> return true ; <nl> bool MeshInstance : : _get ( const StringName & p_name , Variant & r_ret ) const { <nl> void MeshInstance : : _get_property_list ( List < PropertyInfo > * p_list ) const { <nl> <nl> List < String > ls ; <nl> - for ( const Map < StringName , MorphTrack > : : Element * E = morph_tracks . front ( ) ; E ; E = E - > next ( ) ) { <nl> + for ( const Map < StringName , BlendShapeTrack > : : Element * E = blend_shape_tracks . front ( ) ; E ; E = E - > next ( ) ) { <nl> <nl> ls . push_back ( E - > key ( ) ) ; <nl> } <nl> void MeshInstance : : set_mesh ( const Ref < Mesh > & p_mesh ) { <nl> <nl> mesh = p_mesh ; <nl> <nl> - morph_tracks . clear ( ) ; <nl> + blend_shape_tracks . clear ( ) ; <nl> if ( mesh . is_valid ( ) ) { <nl> <nl> <nl> - for ( int i = 0 ; i < mesh - > get_morph_target_count ( ) ; i + + ) { <nl> + for ( int i = 0 ; i < mesh - > get_blend_shape_count ( ) ; i + + ) { <nl> <nl> - MorphTrack mt ; <nl> + BlendShapeTrack mt ; <nl> mt . idx = i ; <nl> mt . value = 0 ; <nl> - morph_tracks [ " morph / " + String ( mesh - > get_morph_target_name ( i ) ) ] = mt ; <nl> + blend_shape_tracks [ " blend_shapes / " + String ( mesh - > get_blend_shape_name ( i ) ) ] = mt ; <nl> } <nl> <nl> mesh - > connect ( CoreStringNames : : get_singleton ( ) - > changed , this , SceneStringNames : : get_singleton ( ) - > _mesh_changed ) ; <nl> mmm a / scene / 3d / mesh_instance . h <nl> ppp b / scene / 3d / mesh_instance . h <nl> class MeshInstance : public GeometryInstance { <nl> Ref < Mesh > mesh ; <nl> NodePath skeleton_path ; <nl> <nl> - struct MorphTrack { <nl> + struct BlendShapeTrack { <nl> <nl> int idx ; <nl> float value ; <nl> - MorphTrack ( ) { idx = 0 ; value = 0 ; } <nl> + BlendShapeTrack ( ) { idx = 0 ; value = 0 ; } <nl> } ; <nl> <nl> - Map < StringName , MorphTrack > morph_tracks ; <nl> + Map < StringName , BlendShapeTrack > blend_shape_tracks ; <nl> Vector < Ref < Material > > materials ; <nl> <nl> void _mesh_changed ( ) ; <nl> mmm a / scene / resources / mesh . cpp <nl> ppp b / scene / resources / mesh . cpp <nl> bool Mesh : : _set ( const StringName & p_name , const Variant & p_value ) { <nl> <nl> String sname = p_name ; <nl> <nl> - if ( p_name = = " morph_target / names " ) { <nl> + if ( p_name = = " blend_shape / names " ) { <nl> <nl> PoolVector < String > sk = p_value ; <nl> int sz = sk . size ( ) ; <nl> PoolVector < String > : : Read r = sk . read ( ) ; <nl> for ( int i = 0 ; i < sz ; i + + ) <nl> - add_morph_target ( r [ i ] ) ; <nl> + add_blend_shape ( r [ i ] ) ; <nl> return true ; <nl> } <nl> <nl> - if ( p_name = = " morph_target / mode " ) { <nl> + if ( p_name = = " blend_shape / mode " ) { <nl> <nl> - set_morph_target_mode ( MorphTargetMode ( int ( p_value ) ) ) ; <nl> + set_blend_shape_mode ( BlendShapeMode ( int ( p_value ) ) ) ; <nl> return true ; <nl> } <nl> <nl> bool Mesh : : _set ( const StringName & p_name , const Variant & p_value ) { <nl> <nl> if ( d . has ( " arrays " ) ) { <nl> / / old format <nl> - ERR_FAIL_COND_V ( ! d . has ( " morph_arrays " ) , false ) ; <nl> - add_surface_from_arrays ( PrimitiveType ( int ( d [ " primitive " ] ) ) , d [ " arrays " ] , d [ " morph_arrays " ] ) ; <nl> + ERR_FAIL_COND_V ( ! d . has ( " blend_shape_arrays " ) , false ) ; <nl> + add_surface_from_arrays ( PrimitiveType ( int ( d [ " primitive " ] ) ) , d [ " arrays " ] , d [ " blend_shape_arrays " ] ) ; <nl> <nl> } else if ( d . has ( " array_data " ) ) { <nl> <nl> bool Mesh : : _set ( const StringName & p_name , const Variant & p_value ) { <nl> if ( d . has ( " index_count " ) ) <nl> index_count = d [ " index_count " ] ; <nl> <nl> - Vector < PoolVector < uint8_t > > morphs ; <nl> + Vector < PoolVector < uint8_t > > blend_shapes ; <nl> <nl> - if ( d . has ( " morph_data " ) ) { <nl> - Array morph_data = d [ " morph_data " ] ; <nl> - for ( int i = 0 ; i < morph_data . size ( ) ; i + + ) { <nl> - PoolVector < uint8_t > morph = morph_data [ i ] ; <nl> - morphs . push_back ( morph_data [ i ] ) ; <nl> + if ( d . has ( " blend_shape_data " ) ) { <nl> + Array blend_shape_data = d [ " blend_shape_data " ] ; <nl> + for ( int i = 0 ; i < blend_shape_data . size ( ) ; i + + ) { <nl> + PoolVector < uint8_t > shape = blend_shape_data [ i ] ; <nl> + blend_shapes . push_back ( shape ) ; <nl> } <nl> } <nl> <nl> bool Mesh : : _set ( const StringName & p_name , const Variant & p_value ) { <nl> } <nl> } <nl> <nl> - add_surface ( format , PrimitiveType ( primitive ) , array_data , vertex_count , array_index_data , index_count , aabb , morphs , bone_aabb ) ; <nl> + add_surface ( format , PrimitiveType ( primitive ) , array_data , vertex_count , array_index_data , index_count , aabb , blend_shapes , bone_aabb ) ; <nl> } else { <nl> ERR_FAIL_V ( false ) ; <nl> } <nl> bool Mesh : : _get ( const StringName & p_name , Variant & r_ret ) const { <nl> <nl> String sname = p_name ; <nl> <nl> - if ( p_name = = " morph_target / names " ) { <nl> + if ( p_name = = " blend_shape / names " ) { <nl> <nl> PoolVector < String > sk ; <nl> - for ( int i = 0 ; i < morph_targets . size ( ) ; i + + ) <nl> - sk . push_back ( morph_targets [ i ] ) ; <nl> + for ( int i = 0 ; i < blend_shapes . size ( ) ; i + + ) <nl> + sk . push_back ( blend_shapes [ i ] ) ; <nl> r_ret = sk ; <nl> return true ; <nl> - } else if ( p_name = = " morph_target / mode " ) { <nl> + } else if ( p_name = = " blend_shape / mode " ) { <nl> <nl> - r_ret = get_morph_target_mode ( ) ; <nl> + r_ret = get_blend_shape_mode ( ) ; <nl> return true ; <nl> } else if ( sname . begins_with ( " surface_ " ) ) { <nl> <nl> bool Mesh : : _get ( const StringName & p_name , Variant & r_ret ) const { <nl> } <nl> d [ " skeleton_aabb " ] = arr ; <nl> <nl> - Vector < PoolVector < uint8_t > > morph_data = VS : : get_singleton ( ) - > mesh_surface_get_blend_shapes ( mesh , idx ) ; <nl> + Vector < PoolVector < uint8_t > > blend_shape_data = VS : : get_singleton ( ) - > mesh_surface_get_blend_shapes ( mesh , idx ) ; <nl> <nl> Array md ; <nl> - for ( int i = 0 ; i < morph_data . size ( ) ; i + + ) { <nl> - md . push_back ( morph_data [ i ] ) ; <nl> + for ( int i = 0 ; i < blend_shape_data . size ( ) ; i + + ) { <nl> + md . push_back ( blend_shape_data [ i ] ) ; <nl> } <nl> <nl> - d [ " morph_data " ] = md ; <nl> + d [ " blend_shape_data " ] = md ; <nl> <nl> Ref < Material > m = surface_get_material ( idx ) ; <nl> if ( m . is_valid ( ) ) <nl> bool Mesh : : _get ( const StringName & p_name , Variant & r_ret ) const { <nl> <nl> void Mesh : : _get_property_list ( List < PropertyInfo > * p_list ) const { <nl> <nl> - if ( morph_targets . size ( ) ) { <nl> - p_list - > push_back ( PropertyInfo ( Variant : : POOL_STRING_ARRAY , " morph_target / names " , PROPERTY_HINT_NONE , " " , PROPERTY_USAGE_NOEDITOR ) ) ; <nl> - p_list - > push_back ( PropertyInfo ( Variant : : INT , " morph_target / mode " , PROPERTY_HINT_ENUM , " Normalized , Relative " ) ) ; <nl> + if ( blend_shapes . size ( ) ) { <nl> + p_list - > push_back ( PropertyInfo ( Variant : : POOL_STRING_ARRAY , " blend_shape / names " , PROPERTY_HINT_NONE , " " , PROPERTY_USAGE_NOEDITOR ) ) ; <nl> + p_list - > push_back ( PropertyInfo ( Variant : : INT , " blend_shape / mode " , PROPERTY_HINT_ENUM , " Normalized , Relative " ) ) ; <nl> } <nl> <nl> for ( int i = 0 ; i < surfaces . size ( ) ; i + + ) { <nl> Array Mesh : : surface_get_arrays ( int p_surface ) const { <nl> return VisualServer : : get_singleton ( ) - > mesh_surface_get_arrays ( mesh , p_surface ) ; <nl> <nl> } <nl> - Array Mesh : : surface_get_morph_arrays ( int p_surface ) const { <nl> + Array Mesh : : surface_get_blend_shape_arrays ( int p_surface ) const { <nl> <nl> ERR_FAIL_INDEX_V ( p_surface , surfaces . size ( ) , Array ( ) ) ; <nl> return Array ( ) ; <nl> int Mesh : : get_surface_count ( ) const { <nl> return surfaces . size ( ) ; <nl> } <nl> <nl> - void Mesh : : add_morph_target ( const StringName & p_name ) { <nl> + void Mesh : : add_blend_shape ( const StringName & p_name ) { <nl> <nl> if ( surfaces . size ( ) ) { <nl> ERR_EXPLAIN ( " Can ' t add a shape key count if surfaces are already created . " ) ; <nl> void Mesh : : add_morph_target ( const StringName & p_name ) { <nl> <nl> StringName name = p_name ; <nl> <nl> - if ( morph_targets . find ( name ) ! = - 1 ) { <nl> + if ( blend_shapes . find ( name ) ! = - 1 ) { <nl> <nl> int count = 2 ; <nl> do { <nl> <nl> name = String ( p_name ) + " " + itos ( count ) ; <nl> count + + ; <nl> - } while ( morph_targets . find ( name ) ! = - 1 ) ; <nl> + } while ( blend_shapes . find ( name ) ! = - 1 ) ; <nl> } <nl> <nl> - morph_targets . push_back ( name ) ; <nl> - VS : : get_singleton ( ) - > mesh_set_morph_target_count ( mesh , morph_targets . size ( ) ) ; <nl> + blend_shapes . push_back ( name ) ; <nl> + VS : : get_singleton ( ) - > mesh_set_blend_shape_count ( mesh , blend_shapes . size ( ) ) ; <nl> <nl> } <nl> <nl> <nl> - int Mesh : : get_morph_target_count ( ) const { <nl> + int Mesh : : get_blend_shape_count ( ) const { <nl> <nl> - return morph_targets . size ( ) ; <nl> + return blend_shapes . size ( ) ; <nl> } <nl> - StringName Mesh : : get_morph_target_name ( int p_index ) const { <nl> - ERR_FAIL_INDEX_V ( p_index , morph_targets . size ( ) , StringName ( ) ) ; <nl> - return morph_targets [ p_index ] ; <nl> + StringName Mesh : : get_blend_shape_name ( int p_index ) const { <nl> + ERR_FAIL_INDEX_V ( p_index , blend_shapes . size ( ) , StringName ( ) ) ; <nl> + return blend_shapes [ p_index ] ; <nl> } <nl> - void Mesh : : clear_morph_targets ( ) { <nl> + void Mesh : : clear_blend_shapes ( ) { <nl> <nl> if ( surfaces . size ( ) ) { <nl> ERR_EXPLAIN ( " Can ' t set shape key count if surfaces are already created . " ) ; <nl> ERR_FAIL_COND ( surfaces . size ( ) ) ; <nl> } <nl> <nl> - morph_targets . clear ( ) ; <nl> + blend_shapes . clear ( ) ; <nl> } <nl> <nl> - void Mesh : : set_morph_target_mode ( MorphTargetMode p_mode ) { <nl> + void Mesh : : set_blend_shape_mode ( BlendShapeMode p_mode ) { <nl> <nl> - morph_target_mode = p_mode ; <nl> - VS : : get_singleton ( ) - > mesh_set_morph_target_mode ( mesh , ( VS : : MorphTargetMode ) p_mode ) ; <nl> + blend_shape_mode = p_mode ; <nl> + VS : : get_singleton ( ) - > mesh_set_blend_shape_mode ( mesh , ( VS : : BlendShapeMode ) p_mode ) ; <nl> } <nl> <nl> - Mesh : : MorphTargetMode Mesh : : get_morph_target_mode ( ) const { <nl> + Mesh : : BlendShapeMode Mesh : : get_blend_shape_mode ( ) const { <nl> <nl> - return morph_target_mode ; <nl> + return blend_shape_mode ; <nl> } <nl> <nl> <nl> Ref < Mesh > Mesh : : create_outline ( float p_margin ) const { <nl> <nl> void Mesh : : _bind_methods ( ) { <nl> <nl> - ClassDB : : bind_method ( _MD ( " add_morph_target " , " name " ) , & Mesh : : add_morph_target ) ; <nl> - ClassDB : : bind_method ( _MD ( " get_morph_target_count " ) , & Mesh : : get_morph_target_count ) ; <nl> - ClassDB : : bind_method ( _MD ( " get_morph_target_name " , " index " ) , & Mesh : : get_morph_target_name ) ; <nl> - ClassDB : : bind_method ( _MD ( " clear_morph_targets " ) , & Mesh : : clear_morph_targets ) ; <nl> - ClassDB : : bind_method ( _MD ( " set_morph_target_mode " , " mode " ) , & Mesh : : set_morph_target_mode ) ; <nl> - ClassDB : : bind_method ( _MD ( " get_morph_target_mode " ) , & Mesh : : get_morph_target_mode ) ; <nl> + ClassDB : : bind_method ( _MD ( " add_blend_shape " , " name " ) , & Mesh : : add_blend_shape ) ; <nl> + ClassDB : : bind_method ( _MD ( " get_blend_shape_count " ) , & Mesh : : get_blend_shape_count ) ; <nl> + ClassDB : : bind_method ( _MD ( " get_blend_shape_name " , " index " ) , & Mesh : : get_blend_shape_name ) ; <nl> + ClassDB : : bind_method ( _MD ( " clear_blend_shapes " ) , & Mesh : : clear_blend_shapes ) ; <nl> + ClassDB : : bind_method ( _MD ( " set_blend_shape_mode " , " mode " ) , & Mesh : : set_blend_shape_mode ) ; <nl> + ClassDB : : bind_method ( _MD ( " get_blend_shape_mode " ) , & Mesh : : get_blend_shape_mode ) ; <nl> <nl> ClassDB : : bind_method ( _MD ( " add_surface_from_arrays " , " primitive " , " arrays " , " blend_shapes " , " compress_flags " ) , & Mesh : : add_surface_from_arrays , DEFVAL ( Array ( ) ) , DEFVAL ( ARRAY_COMPRESS_DEFAULT ) ) ; <nl> ClassDB : : bind_method ( _MD ( " get_surface_count " ) , & Mesh : : get_surface_count ) ; <nl> void Mesh : : _bind_methods ( ) { <nl> Mesh : : Mesh ( ) { <nl> <nl> mesh = VisualServer : : get_singleton ( ) - > mesh_create ( ) ; <nl> - morph_target_mode = MORPH_MODE_RELATIVE ; <nl> + blend_shape_mode = BLEND_SHAPE_MODE_RELATIVE ; <nl> <nl> } <nl> <nl> mmm a / scene / resources / mesh . h <nl> ppp b / scene / resources / mesh . h <nl> class Mesh : public Resource { <nl> PRIMITIVE_TRIANGLE_FAN = VisualServer : : PRIMITIVE_TRIANGLE_FAN , <nl> } ; <nl> <nl> - enum MorphTargetMode { <nl> + enum BlendShapeMode { <nl> <nl> - MORPH_MODE_NORMALIZED = VS : : MORPH_MODE_NORMALIZED , <nl> - MORPH_MODE_RELATIVE = VS : : MORPH_MODE_RELATIVE , <nl> + BLEND_SHAPE_MODE_NORMALIZED = VS : : BLEND_SHAPE_MODE_NORMALIZED , <nl> + BLEND_SHAPE_MODE_RELATIVE = VS : : BLEND_SHAPE_MODE_RELATIVE , <nl> } ; <nl> <nl> private : <nl> class Mesh : public Resource { <nl> Vector < Surface > surfaces ; <nl> RID mesh ; <nl> Rect3 aabb ; <nl> - MorphTargetMode morph_target_mode ; <nl> - Vector < StringName > morph_targets ; <nl> + BlendShapeMode blend_shape_mode ; <nl> + Vector < StringName > blend_shapes ; <nl> Rect3 custom_aabb ; <nl> <nl> mutable Ref < TriangleMesh > triangle_mesh ; <nl> class Mesh : public Resource { <nl> void add_surface ( uint32_t p_format , PrimitiveType p_primitive , const PoolVector < uint8_t > & p_array , int p_vertex_count , const PoolVector < uint8_t > & p_index_array , int p_index_count , const Rect3 & p_aabb , const Vector < PoolVector < uint8_t > > & p_blend_shapes = Vector < PoolVector < uint8_t > > ( ) , const Vector < Rect3 > & p_bone_aabbs = Vector < Rect3 > ( ) ) ; <nl> <nl> Array surface_get_arrays ( int p_surface ) const ; <nl> - virtual Array surface_get_morph_arrays ( int p_surface ) const ; <nl> + virtual Array surface_get_blend_shape_arrays ( int p_surface ) const ; <nl> <nl> - void add_morph_target ( const StringName & p_name ) ; <nl> - int get_morph_target_count ( ) const ; <nl> - StringName get_morph_target_name ( int p_index ) const ; <nl> - void clear_morph_targets ( ) ; <nl> + void add_blend_shape ( const StringName & p_name ) ; <nl> + int get_blend_shape_count ( ) const ; <nl> + StringName get_blend_shape_name ( int p_index ) const ; <nl> + void clear_blend_shapes ( ) ; <nl> <nl> - void set_morph_target_mode ( MorphTargetMode p_mode ) ; <nl> - MorphTargetMode get_morph_target_mode ( ) const ; <nl> + void set_blend_shape_mode ( BlendShapeMode p_mode ) ; <nl> + BlendShapeMode get_blend_shape_mode ( ) const ; <nl> <nl> int get_surface_count ( ) const ; <nl> void surface_remove ( int p_idx ) ; <nl> class Mesh : public Resource { <nl> <nl> VARIANT_ENUM_CAST ( Mesh : : ArrayType ) ; <nl> VARIANT_ENUM_CAST ( Mesh : : PrimitiveType ) ; <nl> - VARIANT_ENUM_CAST ( Mesh : : MorphTargetMode ) ; <nl> + VARIANT_ENUM_CAST ( Mesh : : BlendShapeMode ) ; <nl> <nl> # endif <nl> mmm a / servers / visual / rasterizer . h <nl> ppp b / servers / visual / rasterizer . h <nl> class RasterizerScene { <nl> Vector < RID > reflection_probe_instances ; <nl> Vector < RID > gi_probe_instances ; <nl> <nl> - Vector < float > morph_values ; <nl> + Vector < float > blend_values ; <nl> <nl> / / BakedLightData * baked_light ; <nl> VS : : ShadowCastingSetting cast_shadows ; <nl> class RasterizerStorage { <nl> <nl> virtual void mesh_add_surface ( RID p_mesh , uint32_t p_format , VS : : PrimitiveType p_primitive , const PoolVector < uint8_t > & p_array , int p_vertex_count , const PoolVector < uint8_t > & p_index_array , int p_index_count , const Rect3 & p_aabb , const Vector < PoolVector < uint8_t > > & p_blend_shapes = Vector < PoolVector < uint8_t > > ( ) , const Vector < Rect3 > & p_bone_aabbs = Vector < Rect3 > ( ) ) = 0 ; <nl> <nl> - virtual void mesh_set_morph_target_count ( RID p_mesh , int p_amount ) = 0 ; <nl> - virtual int mesh_get_morph_target_count ( RID p_mesh ) const = 0 ; <nl> + virtual void mesh_set_blend_shape_count ( RID p_mesh , int p_amount ) = 0 ; <nl> + virtual int mesh_get_blend_shape_count ( RID p_mesh ) const = 0 ; <nl> <nl> <nl> - virtual void mesh_set_morph_target_mode ( RID p_mesh , VS : : MorphTargetMode p_mode ) = 0 ; <nl> - virtual VS : : MorphTargetMode mesh_get_morph_target_mode ( RID p_mesh ) const = 0 ; <nl> + virtual void mesh_set_blend_shape_mode ( RID p_mesh , VS : : BlendShapeMode p_mode ) = 0 ; <nl> + virtual VS : : BlendShapeMode mesh_get_blend_shape_mode ( RID p_mesh ) const = 0 ; <nl> <nl> virtual void mesh_surface_set_material ( RID p_mesh , int p_surface , RID p_material ) = 0 ; <nl> virtual RID mesh_surface_get_material ( RID p_mesh , int p_surface ) const = 0 ; <nl> mmm a / servers / visual / visual_server_raster . h <nl> ppp b / servers / visual / visual_server_raster . h <nl> class VisualServerRaster : public VisualServer { <nl> <nl> BIND10 ( mesh_add_surface , RID , uint32_t , PrimitiveType , const PoolVector < uint8_t > & , int , const PoolVector < uint8_t > & , int , const Rect3 & , const Vector < PoolVector < uint8_t > > & , const Vector < Rect3 > & ) <nl> <nl> - BIND2 ( mesh_set_morph_target_count , RID , int ) <nl> - BIND1RC ( int , mesh_get_morph_target_count , RID ) <nl> + BIND2 ( mesh_set_blend_shape_count , RID , int ) <nl> + BIND1RC ( int , mesh_get_blend_shape_count , RID ) <nl> <nl> <nl> - BIND2 ( mesh_set_morph_target_mode , RID , MorphTargetMode ) <nl> - BIND1RC ( MorphTargetMode , mesh_get_morph_target_mode , RID ) <nl> + BIND2 ( mesh_set_blend_shape_mode , RID , BlendShapeMode ) <nl> + BIND1RC ( BlendShapeMode , mesh_get_blend_shape_mode , RID ) <nl> <nl> BIND3 ( mesh_surface_set_material , RID , int , RID ) <nl> BIND2RC ( RID , mesh_surface_get_material , RID , int ) <nl> class VisualServerRaster : public VisualServer { <nl> BIND2 ( instance_set_layer_mask , RID , uint32_t ) <nl> BIND2 ( instance_set_transform , RID , const Transform & ) <nl> BIND2 ( instance_attach_object_instance_ID , RID , ObjectID ) <nl> - BIND3 ( instance_set_morph_target_weight , RID , int , float ) <nl> + BIND3 ( instance_set_blend_shape_weight , RID , int , float ) <nl> BIND3 ( instance_set_surface_material , RID , int , RID ) <nl> BIND2 ( instance_set_visible , RID , bool ) <nl> <nl> mmm a / servers / visual / visual_server_scene . cpp <nl> ppp b / servers / visual / visual_server_scene . cpp <nl> void VisualServerScene : : instance_set_base ( RID p_instance , RID p_base ) { <nl> instance - > base_data = NULL ; <nl> } <nl> <nl> - instance - > morph_values . clear ( ) ; <nl> + instance - > blend_values . clear ( ) ; <nl> <nl> for ( int i = 0 ; i < instance - > materials . size ( ) ; i + + ) { <nl> if ( instance - > materials [ i ] . is_valid ( ) ) { <nl> void VisualServerScene : : instance_attach_object_instance_ID ( RID p_instance , Object <nl> instance - > object_ID = p_ID ; <nl> <nl> } <nl> - void VisualServerScene : : instance_set_morph_target_weight ( RID p_instance , int p_shape , float p_weight ) { <nl> + void VisualServerScene : : instance_set_blend_shape_weight ( RID p_instance , int p_shape , float p_weight ) { <nl> <nl> Instance * instance = instance_owner . get ( p_instance ) ; <nl> ERR_FAIL_COND ( ! instance ) ; <nl> void VisualServerScene : : instance_set_morph_target_weight ( RID p_instance , int p_sh <nl> _update_dirty_instance ( instance ) ; <nl> } <nl> <nl> - ERR_FAIL_INDEX ( p_shape , instance - > morph_values . size ( ) ) ; <nl> - instance - > morph_values [ p_shape ] = p_weight ; <nl> + ERR_FAIL_INDEX ( p_shape , instance - > blend_values . size ( ) ) ; <nl> + instance - > blend_values [ p_shape ] = p_weight ; <nl> } <nl> <nl> void VisualServerScene : : instance_set_surface_material ( RID p_instance , int p_surface , RID p_material ) { <nl> void VisualServerScene : : _update_dirty_instance ( Instance * p_instance ) { <nl> } <nl> p_instance - > materials . resize ( new_mat_count ) ; <nl> <nl> - int new_morph_count = VSG : : storage - > mesh_get_morph_target_count ( p_instance - > base ) ; <nl> - if ( new_morph_count ! = p_instance - > morph_values . size ( ) ) { <nl> - p_instance - > morph_values . resize ( new_morph_count ) ; <nl> - for ( int i = 0 ; i < new_morph_count ; i + + ) { <nl> - p_instance - > morph_values [ i ] = 0 ; <nl> + int new_blend_shape_count = VSG : : storage - > mesh_get_blend_shape_count ( p_instance - > base ) ; <nl> + if ( new_blend_shape_count ! = p_instance - > blend_values . size ( ) ) { <nl> + p_instance - > blend_values . resize ( new_blend_shape_count ) ; <nl> + for ( int i = 0 ; i < new_blend_shape_count ; i + + ) { <nl> + p_instance - > blend_values [ i ] = 0 ; <nl> } <nl> } <nl> } <nl> mmm a / servers / visual / visual_server_scene . h <nl> ppp b / servers / visual / visual_server_scene . h <nl> class VisualServerScene { <nl> virtual void instance_set_layer_mask ( RID p_instance , uint32_t p_mask ) ; <nl> virtual void instance_set_transform ( RID p_instance , const Transform & p_transform ) ; <nl> virtual void instance_attach_object_instance_ID ( RID p_instance , ObjectID p_ID ) ; <nl> - virtual void instance_set_morph_target_weight ( RID p_instance , int p_shape , float p_weight ) ; <nl> + virtual void instance_set_blend_shape_weight ( RID p_instance , int p_shape , float p_weight ) ; <nl> virtual void instance_set_surface_material ( RID p_instance , int p_surface , RID p_material ) ; <nl> virtual void instance_set_visible ( RID p_instance , bool p_visible ) ; <nl> <nl> mmm a / servers / visual_server . h <nl> ppp b / servers / visual_server . h <nl> class VisualServer : public Object { <nl> virtual void mesh_add_surface_from_arrays ( RID p_mesh , PrimitiveType p_primitive , const Array & p_arrays , const Array & p_blend_shapes = Array ( ) , uint32_t p_compress_format = ARRAY_COMPRESS_DEFAULT ) ; <nl> virtual void mesh_add_surface ( RID p_mesh , uint32_t p_format , PrimitiveType p_primitive , const PoolVector < uint8_t > & p_array , int p_vertex_count , const PoolVector < uint8_t > & p_index_array , int p_index_count , const Rect3 & p_aabb , const Vector < PoolVector < uint8_t > > & p_blend_shapes = Vector < PoolVector < uint8_t > > ( ) , const Vector < Rect3 > & p_bone_aabbs = Vector < Rect3 > ( ) ) = 0 ; <nl> <nl> - virtual void mesh_set_morph_target_count ( RID p_mesh , int p_amount ) = 0 ; <nl> - virtual int mesh_get_morph_target_count ( RID p_mesh ) const = 0 ; <nl> + virtual void mesh_set_blend_shape_count ( RID p_mesh , int p_amount ) = 0 ; <nl> + virtual int mesh_get_blend_shape_count ( RID p_mesh ) const = 0 ; <nl> <nl> - enum MorphTargetMode { <nl> - MORPH_MODE_NORMALIZED , <nl> - MORPH_MODE_RELATIVE , <nl> + enum BlendShapeMode { <nl> + BLEND_SHAPE_MODE_NORMALIZED , <nl> + BLEND_SHAPE_MODE_RELATIVE , <nl> } ; <nl> <nl> - virtual void mesh_set_morph_target_mode ( RID p_mesh , MorphTargetMode p_mode ) = 0 ; <nl> - virtual MorphTargetMode mesh_get_morph_target_mode ( RID p_mesh ) const = 0 ; <nl> + virtual void mesh_set_blend_shape_mode ( RID p_mesh , BlendShapeMode p_mode ) = 0 ; <nl> + virtual BlendShapeMode mesh_get_blend_shape_mode ( RID p_mesh ) const = 0 ; <nl> <nl> virtual void mesh_surface_set_material ( RID p_mesh , int p_surface , RID p_material ) = 0 ; <nl> virtual RID mesh_surface_get_material ( RID p_mesh , int p_surface ) const = 0 ; <nl> class VisualServer : public Object { <nl> virtual void instance_set_layer_mask ( RID p_instance , uint32_t p_mask ) = 0 ; <nl> virtual void instance_set_transform ( RID p_instance , const Transform & p_transform ) = 0 ; <nl> virtual void instance_attach_object_instance_ID ( RID p_instance , ObjectID p_ID ) = 0 ; <nl> - virtual void instance_set_morph_target_weight ( RID p_instance , int p_shape , float p_weight ) = 0 ; <nl> + virtual void instance_set_blend_shape_weight ( RID p_instance , int p_shape , float p_weight ) = 0 ; <nl> virtual void instance_set_surface_material ( RID p_instance , int p_surface , RID p_material ) = 0 ; <nl> virtual void instance_set_visible ( RID p_instance , bool p_visible ) = 0 ; <nl> <nl> mmm a / tools / editor / io_plugins / editor_import_collada . cpp <nl> ppp b / tools / editor / io_plugins / editor_import_collada . cpp <nl> Error ColladaImport : : _create_mesh_surfaces ( bool p_optimize , Ref < Mesh > & p_mesh , con <nl> ERR_FAIL_COND_V ( ! collada . state . mesh_data_map . has ( target ) , ERR_INVALID_DATA ) ; <nl> String name = collada . state . mesh_data_map [ target ] . name ; <nl> <nl> - p_mesh - > add_morph_target ( name ) ; <nl> + p_mesh - > add_blend_shape ( name ) ; <nl> } <nl> if ( p_morph_data - > mode = = " RELATIVE " ) <nl> - p_mesh - > set_morph_target_mode ( Mesh : : MORPH_MODE_RELATIVE ) ; <nl> + p_mesh - > set_blend_shape_mode ( Mesh : : BLEND_SHAPE_MODE_RELATIVE ) ; <nl> else if ( p_morph_data - > mode = = " NORMALIZED " ) <nl> - p_mesh - > set_morph_target_mode ( Mesh : : MORPH_MODE_NORMALIZED ) ; <nl> + p_mesh - > set_blend_shape_mode ( Mesh : : BLEND_SHAPE_MODE_NORMALIZED ) ; <nl> } <nl> <nl> <nl>
Unify naming of blendshape / morphtarget into just " Blend Shape "
godotengine/godot
35b404ba085819e5cd5f432b7c00ef3167523bb7
2017-01-12T11:34:00Z
mmm a / vendor / cmder_exinit <nl> ppp b / vendor / cmder_exinit <nl> <nl> # Copy this file to your non integrated * nix - like environment , <nl> - # Cygwin / MinGW / MSys2 / Git for Windows SDK , installs ' / etc / profile . d / ' <nl> + # Cygwin / MSys2 / Git for Windows SDK , installs ' / etc / profile . d / ' <nl> # folder to integrate the externally installed Unix like environment <nl> # into Cmder so it has access to settings stored in Cmder / config <nl> # folder when launched . <nl> <nl> # <nl> # bash - Copy to / etc / profile . d / cmder_exinit . sh <nl> # zsh - Copy to / etc / profile . d / cmder_exinit . zsh <nl> - # <nl> - # # Uncomment and edit the CMDER_ROOT line to use Cmder / config even when launched <nl> - # # from outside Cmder . <nl> - # CMDER_ROOT = $ { USERPROFILE } / cmder # This is not required if launched from Cmder . <nl> - <nl> # Add portable user customizations $ { CMDER_ROOT } / config / user - profile . sh or <nl> # add whole config scripts to $ { CMDER_ROOT } / config / profile . d both will be sourced <nl> # from mthis file and be appied to the environment at startup . <nl> <nl> # Add system specific users customizations to $ HOME / . bashrc , these <nl> # customizations will not follow Cmder to another machine . <nl> <nl> + # # Uncomment and edit the CMDER_ROOT line to use Cmder / config even when launched <nl> + # # from outside Cmder . <nl> + # CMDER_ROOT = $ { USERPROFILE } / cmder # This is not required if launched from Cmder . <nl> + <nl> # Check that we haven ' t already been sourced . <nl> [ [ - z $ { CMDER_EXINIT } ] ] & & CMDER_EXINIT = " 1 " | | return <nl> <nl> # We do this for bash as admin sessions since $ CMDER_ROOT is not being set <nl> if [ " $ CMDER_ROOT " = " " - a " $ ConEmuDir " ! = " " ] ; then <nl> - case " $ ConEmuDir " in * \ \ * ) CMDER_ROOT = $ ( cd " $ ( cygpath - u " $ ConEmuDir " ) / . . / . . " ; pwd ) ; ; esac <nl> - if [ ! - d $ CMDER_ROOT / vendor ] ; then <nl> + if [ - d " $ { ConEmuDir } . . / . . / vendor " ] ; then <nl> + case " $ ConEmuDir " in * \ \ * ) CMDER_ROOT = $ ( cd " $ ( cygpath - u " $ ConEmuDir " ) / . . / . . " ; pwd ) ; ; esac <nl> + else <nl> echo " Running in ConEmu without Cmder , skipping Cmder integration . " <nl> - return <nl> fi <nl> elif [ " $ CMDER_ROOT " ! = " " ] ; then <nl> case " $ CMDER_ROOT " in * \ \ * ) CMDER_ROOT = " $ ( cygpath - u " $ CMDER_ROOT " ) " ; ; esac <nl> - else <nl> - return <nl> fi <nl> <nl> - echo " Using \ " CMDER_ROOT \ " at \ " $ { CMDER_ROOT } \ " . . . \ n " <nl> - <nl> - # Remove any trailing ' / ' <nl> - CMDER_ROOT = $ ( echo $ CMDER_ROOT | sed ' s : / * $ : : ' ) <nl> - <nl> - export CMDER_ROOT <nl> - <nl> - PATH = $ { CMDER_ROOT } / bin : $ PATH : $ { CMDER_ROOT } <nl> - <nl> - export PATH <nl> - <nl> - if [ ! - d " $ { CMDER_ROOT } / config / profile . d " ] ; then <nl> - mkdir - p $ { CMDER_ROOT } / config / profile . d <nl> - fi <nl> - <nl> - # Drop * . sh or * . zsh files into " $ { CMDER_ROOT } \ config \ profile . d " <nl> - # to source them at startup . <nl> - if [ - d " $ { CMDER_ROOT } / config / profile . d " ] ; then <nl> - unset profile_d_scripts <nl> - pushd $ { CMDER_ROOT } / config / profile . d > / dev / null <nl> - if [ ! " x $ { ZSH_VERSION } " = " x " ] ; then <nl> - profile_d_scripts = $ ( ls $ { CMDER_ROOT } / config / profile . d / * . zsh ) 2 > / dev / null <nl> - elif [ ! " x $ { BASH_VERSION } " = " x " ] ; then <nl> - profile_d_scripts = $ ( ls $ { CMDER_ROOT } / config / profile . d / * . sh ) 2 > / dev / null <nl> + if [ ! " $ CMDER_ROOT " = " " ] ; then <nl> + # Remove any trailing ' / ' <nl> + CMDER_ROOT = $ ( echo $ CMDER_ROOT | sed ' s : / * $ : : ' ) <nl> + <nl> + echo " Using \ " CMDER_ROOT \ " at \ " $ { CMDER_ROOT } \ " . . . \ n " <nl> + <nl> + export CMDER_ROOT <nl> + <nl> + PATH = $ { CMDER_ROOT } / bin : $ PATH : $ { CMDER_ROOT } <nl> + <nl> + export PATH <nl> + <nl> + # Drop * . sh or * . zsh files into " $ { CMDER_ROOT } \ config \ profile . d " <nl> + # to source them at startup . <nl> + if [ ! - d " $ { CMDER_ROOT } / config / profile . d " ] ; then <nl> + mkdir - p $ { CMDER_ROOT } / config / profile . d <nl> fi <nl> - <nl> - if [ ! " x $ { profile_d_scripts } " = " x " ] ; then <nl> - for x in $ { profile_d_scripts } ; do <nl> - # echo Sourcing " $ { x } " . . . <nl> - . $ x <nl> - done <nl> + <nl> + if [ - d " $ { CMDER_ROOT } / config / profile . d " ] ; then <nl> + unset profile_d_scripts <nl> + pushd $ { CMDER_ROOT } / config / profile . d > / dev / null <nl> + if [ ! " x $ { ZSH_VERSION } " = " x " ] ; then <nl> + profile_d_scripts = $ ( ls $ { CMDER_ROOT } / config / profile . d / * . zsh ) 2 > / dev / null <nl> + elif [ ! " x $ { BASH_VERSION } " = " x " ] ; then <nl> + profile_d_scripts = $ ( ls $ { CMDER_ROOT } / config / profile . d / * . sh ) 2 > / dev / null <nl> + fi <nl> + <nl> + if [ ! " x $ { profile_d_scripts } " = " x " ] ; then <nl> + for x in $ { profile_d_scripts } ; do <nl> + # echo Sourcing " $ { x } " . . . <nl> + . $ x <nl> + done <nl> + fi <nl> + popd > / dev / null <nl> fi <nl> - popd > / dev / null <nl> - fi <nl> - <nl> - if [ - f $ { CMDER_ROOT } / config / user - profile . sh ] ; then <nl> - . $ { CMDER_ROOT } / config / user - profile . sh <nl> - else <nl> - echo Creating user startup file : " $ { CMDER_ROOT } / config / user - profile . sh " <nl> - cat < < - eof > " $ { CMDER_ROOT } / config / user - profile . sh " <nl> + <nl> + if [ - f $ { CMDER_ROOT } / config / user - profile . sh ] ; then <nl> + . $ { CMDER_ROOT } / config / user - profile . sh <nl> + else <nl> + echo Creating user startup file : " $ { CMDER_ROOT } / config / user - profile . sh " <nl> + cat < < - eof > " $ { CMDER_ROOT } / config / user - profile . sh " <nl> # use this file to run your own startup commands for msys2 bash ' <nl> <nl> # To add a new vendor to the path , do something like : <nl> # export PATH = \ $ { CMDER_ROOT } / vendor / whatever : \ $ { PATH } <nl> eof <nl> + fi <nl> fi <nl>
- - amend
cmderdev/cmder
f7a18d4b6e13b9307a912f53d60e0aac00acec4f
2016-02-27T18:00:40Z
mmm a / include / LightGBM / utils / common . h <nl> ppp b / include / LightGBM / utils / common . h <nl> inline static const char * Atoi ( const char * p , int * out ) { <nl> inline static const char * Atof ( const char * p , double * out ) { <nl> int frac ; <nl> double sign , value , scale ; <nl> - <nl> + * out = 0 ; <nl> / / Skip leading white space , if any . <nl> while ( * p = = ' ' ) { <nl> + + p ; <nl>
default to zero in Atof .
microsoft/LightGBM
2d0e8fc9013c6f7e9550ae50b40d3267adba1c50
2016-10-20T06:55:12Z
mmm a / src / caffe / layers / neuron_layer . cpp <nl> ppp b / src / caffe / layers / neuron_layer . cpp <nl> void NeuronLayer < Dtype > : : SetUp ( const vector < Blob < Dtype > * > & bottom , <nl> vector < Blob < Dtype > * > * top ) { <nl> CHECK_EQ ( bottom . size ( ) , 1 ) < < " Neuron Layer takes a single blob as input . " ; <nl> CHECK_EQ ( top - > size ( ) , 1 ) < < " Neuron Layer takes a single blob as output . " ; <nl> - ( * top ) [ 0 ] - > Reshape ( bottom [ 0 ] - > num ( ) , bottom [ 0 ] - > channels ( ) , <nl> - bottom [ 0 ] - > height ( ) , bottom [ 0 ] - > width ( ) ) ; <nl> + / / NeuronLayer allows in - place computations . If the computation is not <nl> + / / in - place , we will need to initialize the top blob . <nl> + if ( ( * top ) [ 0 ] ! = bottom [ 0 ] ) { <nl> + ( * top ) [ 0 ] - > Reshape ( bottom [ 0 ] - > num ( ) , bottom [ 0 ] - > channels ( ) , <nl> + bottom [ 0 ] - > height ( ) , bottom [ 0 ] - > width ( ) ) ; <nl> + } <nl> } ; <nl> <nl> INSTANTIATE_CLASS ( NeuronLayer ) ; <nl> mmm a / src / caffe / net . cpp <nl> ppp b / src / caffe / net . cpp <nl> Net < Dtype > : : Net ( const NetParameter & param , <nl> } <nl> for ( int j = 0 ; j < layer_connection . top_size ( ) ; + + j ) { <nl> const string & blob_name = layer_connection . top ( j ) ; <nl> - if ( blob_name_to_idx . find ( blob_name ) ! = blob_name_to_idx . end ( ) ) { <nl> + / / Check if we are doing in - place computation <nl> + if ( layer_connection . bottom_size ( ) > j & & <nl> + blob_name = = layer_connection . bottom ( j ) ) { <nl> + / / In - place computation <nl> + LOG ( INFO ) < < layer_param . name ( ) < < " - > " < < blob_name < < " ( in - place ) " ; <nl> + available_blobs . insert ( blob_name ) ; <nl> + top_vecs_ [ i ] . push_back ( <nl> + blobs_ [ blob_name_to_idx [ blob_name ] ] . get ( ) ) ; <nl> + top_id_vecs_ [ i ] . push_back ( blob_name_to_idx [ blob_name ] ) ; <nl> + } else if ( blob_name_to_idx . find ( blob_name ) ! = blob_name_to_idx . end ( ) ) { <nl> + / / If we are not doing in - place computation but has duplicated blobs , <nl> + / / raise an error . <nl> LOG ( FATAL ) < < " Duplicate blobs produced by multiple sources . " ; <nl> + } else { <nl> + / / Normal output . <nl> + LOG ( INFO ) < < layer_param . name ( ) < < " - > " < < blob_name ; <nl> + shared_ptr < Blob < Dtype > > blob_pointer ( new Blob < Dtype > ( ) ) ; <nl> + blobs_ . push_back ( blob_pointer ) ; <nl> + blob_names_ . push_back ( blob_name ) ; <nl> + blob_name_to_idx [ blob_name ] = blob_names_ . size ( ) - 1 ; <nl> + available_blobs . insert ( blob_name ) ; <nl> + top_vecs_ [ i ] . push_back ( blobs_ [ blob_names_ . size ( ) - 1 ] . get ( ) ) ; <nl> + top_id_vecs_ [ i ] . push_back ( blob_names_ . size ( ) - 1 ) ; <nl> } <nl> - LOG ( INFO ) < < layer_param . name ( ) < < " - > " < < blob_name ; <nl> - shared_ptr < Blob < Dtype > > blob_pointer ( new Blob < Dtype > ( ) ) ; <nl> - blobs_ . push_back ( blob_pointer ) ; <nl> - blob_names_ . push_back ( blob_name ) ; <nl> - blob_name_to_idx [ blob_name ] = blob_names_ . size ( ) - 1 ; <nl> - available_blobs . insert ( blob_name ) ; <nl> - top_vecs_ [ i ] . push_back ( blobs_ [ blob_names_ . size ( ) - 1 ] . get ( ) ) ; <nl> - top_id_vecs_ [ i ] . push_back ( blob_names_ . size ( ) - 1 ) ; <nl> } <nl> } <nl> / / In the end , all remaining blobs are considered output blobs . <nl>
allow in - place neuron layers
BVLC/caffe
2d975527a4c4eb3afd067bf9673415f25d15793b
2013-10-14T22:29:30Z
mmm a / jstests / aggregation / testshard1 . js <nl> ppp b / jstests / aggregation / testshard1 . js <nl> testSkipLimit ( [ { $ limit : 10 } , { $ skip : 5 } , { $ skip : 3 } ] , 10 - 3 - 5 ) ; <nl> <nl> / / test sort + limit ( using random to pull from both shards ) <nl> function testSortLimit ( limit , direction ) { <nl> + shardedAggTest . stopBalancer ( ) ; / / TODO : remove after fixing SERVER - 9622 <nl> var from_cursor = db . ts1 . find ( { } , { random : 1 , _id : 0 } ) <nl> . sort ( { random : direction } ) <nl> . limit ( limit ) <nl> . toArray ( ) ; <nl> + shardedAggTest . startBalancer ( ) ; / / TODO : remove after fixing SERVER - 9622 <nl> var from_agg = aggregateOrdered ( db . ts1 , [ { $ project : { random : 1 , _id : 0 } } <nl> , { $ sort : { random : direction } } <nl> , { $ limit : limit } <nl> testSortLimit ( 100 , - 1 ) ; <nl> / / test $ out by copying source collection verbatim to output <nl> var outCollection = db . ts1_out ; <nl> var res = aggregateOrdered ( db . ts1 , [ { $ out : outCollection . getName ( ) } ] ) ; <nl> + shardedAggTest . stopBalancer ( ) ; / / TODO : remove after fixing SERVER - 9622 <nl> assert . eq ( db . ts1 . find ( ) . itcount ( ) , outCollection . find ( ) . itcount ( ) ) ; <nl> assert . eq ( db . ts1 . find ( ) . sort ( { _id : 1 } ) . toArray ( ) , <nl> outCollection . find ( ) . sort ( { _id : 1 } ) . toArray ( ) ) ; <nl> + shardedAggTest . startBalancer ( ) ; / / TODO : remove after fixing SERVER - 9622 <nl> <nl> / / Make sure we error out if $ out collection is sharded <nl> assertErrorCode ( outCollection , [ { $ out : db . ts1 . getName ( ) } ] , 17017 ) ; <nl>
SERVER - 13263 Temporarily disable balancer on testshard1 . js when not issuing the aggregate command
mongodb/mongo
8d170f91b67d6e31677668c8c1e9406de8608fb4
2014-03-18T20:51:18Z
mmm a / lib / SILAnalysis / GlobalARCPairingAnalysis . cpp <nl> ppp b / lib / SILAnalysis / GlobalARCPairingAnalysis . cpp <nl> bool ARCMatchingSetBuilder : : matchUpIncDecSetsForPtr ( ) { <nl> <nl> } <nl> <nl> + bool HaveIncInsertPts = ! MatchSet . IncrementInsertPts . empty ( ) ; <nl> + bool HaveDecInsertPts = ! MatchSet . DecrementInsertPts . empty ( ) ; <nl> + <nl> / / If we have insertion points and partial merges , return false to avoid <nl> / / control dependency issues . <nl> - if ( ( ! MatchSet . IncrementInsertPts . empty ( ) | | <nl> - ! MatchSet . DecrementInsertPts . empty ( ) ) & & <nl> - Partial ) <nl> + if ( ( HaveIncInsertPts | | HaveDecInsertPts ) & & Partial ) <nl> + return false ; <nl> + <nl> + / / If we have insertion points for increments , but not for decrements ( or <nl> + / / vis - a - versa ) , return false . This prevents us from inserting retains and <nl> + / / removing releases or vis - a - versa . <nl> + if ( HaveIncInsertPts ! = HaveDecInsertPts ) <nl> return false ; <nl> <nl> - if ( MatchSet . IncrementInsertPts . empty ( ) & & ! MatchSet . Increments . empty ( ) ) <nl> + / / If we do not have any insertion points but we do have increments , we must <nl> + / / be eliminating pairs . <nl> + if ( ! HaveIncInsertPts & & ! MatchSet . Increments . empty ( ) ) <nl> MatchedPair = true ; <nl> <nl> / / Success ! <nl> mmm a / test / SILPasses / globalarcopts . sil <nl> ppp b / test / SILPasses / globalarcopts . sil <nl> import Builtin <nl> <nl> sil @ user : $ @ thin ( Builtin . NativeObject ) - > ( ) <nl> <nl> + struct S { <nl> + var x : Builtin . NativeObject <nl> + } <nl> + sil @ S_user : $ @ thin ( S ) - > ( ) <nl> + <nl> + class Cls { <nl> + var random : Builtin . Int32 <nl> + <nl> + init ( ) <nl> + } <nl> + <nl> + class C { <nl> + var w : Optional < Builtin . NativeObject > <nl> + } <nl> + <nl> + class RetainUser { } <nl> + <nl> + sil @ rawpointer_use : $ @ thin ( Builtin . RawPointer ) - > Bool <nl> + <nl> / / / / / / / / / / / / / / / / / <nl> / / Basic Tests / / <nl> / / / / / / / / / / / / / / / / / <nl> bb0 ( % 0 : $ Builtin . NativeObject ) : <nl> return % 1 : $ ( ) <nl> } <nl> <nl> - struct S { <nl> - var x : Builtin . NativeObject <nl> - } <nl> - <nl> / / CHECK - LABEL : sil @ simple_copyvalue_destroyvalue_pair : $ @ thin ( S ) - > S <nl> / / CHECK : bb0 ( { { % [ 0 - 9 ] + } } : $ S ) <nl> / / CHECK - NEXT : return <nl> bb0 ( % 0 : $ Builtin . NativeObject ) : <nl> return % 2 : $ ( ) <nl> } <nl> <nl> - sil @ S_user : $ @ thin ( S ) - > ( ) <nl> - <nl> / / CHECK - LABEL : sil @ dont_delete_copyvalue_over_decrement_use : $ @ thin ( S ) - > ( ) <nl> / / CHECK : bb0 <nl> / / CHECK : retain_value <nl> bb0 ( % 0 : $ Builtin . NativeObject ) : <nl> return % 2 : $ ( ) <nl> } <nl> <nl> - class Cls { <nl> - var random : Builtin . Int32 <nl> - <nl> - init ( ) <nl> - } <nl> - <nl> / / CHECK - LABEL : sil @ value_that_does_not_alias_pointer_args_cannot_be_decremented : $ @ thin ( Cls ) - > ( ) <nl> / / CHECK - NOT : strong_retain <nl> / / CHECK - NOT : strong_release <nl> bb0 : <nl> return % 11 : $ ( ) <nl> } <nl> <nl> - class RetainUser { } <nl> - <nl> / / CHECK - LABEL : sil @ retain_can_be_used_by_other_pointer : $ @ thin ( RetainUser , Builtin . NativeObject ) - > Builtin . NativeObject { <nl> / / CHECK : strong_retain <nl> / / CHECK : strong_retain <nl> bb1 : <nl> bb2 : <nl> % 1 = tuple ( ) <nl> return % 1 : $ ( ) <nl> - } <nl> \ No newline at end of file <nl> + } <nl> + <nl> + / / Make sure that if top down we do not complete the sequence , but bottom up we <nl> + / / do not move anything . This prevents us from just moving retains and <nl> + / / eliminating releases ( or vis - a - versa ) . <nl> + / / CHECK - LABEL : sil @ test_complete_incomplete_sequence_mismatch : $ @ cc ( method ) @ thin ( @ owned C ) - > ( ) { <nl> + / / CHECK : strong_retain <nl> + / / CHECK : strong_release <nl> + / / CHECK : strong_release <nl> + sil @ test_complete_incomplete_sequence_mismatch : $ @ cc ( method ) @ thin ( @ owned C ) - > ( ) { <nl> + bb0 ( % 0 : $ C ) : <nl> + br bb1 ( undef : $ Builtin . Word ) / / id : % 1 <nl> + <nl> + bb1 ( % 2 : $ Builtin . Word ) : / / Preds : bb0 bb5 <nl> + cond_br undef , bb3 , bb2 / / id : % 3 <nl> + <nl> + bb2 : / / Preds : bb1 <nl> + strong_retain % 0 : $ C / / id : % 4 <nl> + % 5 = ref_element_addr % 0 : $ C , # C . w / / user : % 6 <nl> + % 8 = function_ref @ rawpointer_use : $ @ thin ( Builtin . RawPointer ) - > Bool / / user : % 15 <nl> + % 9 = load % 5 : $ * Optional < Builtin . NativeObject > / / user : % 11 <nl> + % 10 = unchecked_trivial_bit_cast % 9 : $ Optional < Builtin . NativeObject > to $ Builtin . RawPointer <nl> + % 15 = apply % 8 ( % 10 ) : $ @ thin ( Builtin . RawPointer ) - > Bool <nl> + br bb5 / / id : % 16 <nl> + <nl> + bb3 : / / Preds : bb1 <nl> + strong_release % 0 : $ C / / id : % 17 <nl> + % 18 = tuple ( ) / / user : % 19 <nl> + return % 18 : $ ( ) / / id : % 19 <nl> + <nl> + bb4 : <nl> + br bb5 / / id : % 20 <nl> + <nl> + bb5 : / / Preds : bb2 bb4 <nl> + strong_release % 0 : $ C / / id : % 21 <nl> + br bb1 ( undef : $ Builtin . Word ) / / id : % 22 <nl> + } <nl>
[ g - arc - opts ] Ensure that if top down we are moving and bottom up we are eliminating ( or vis - a - versa ) , we do nothing .
apple/swift
7e37d3fcec7745859deec10a6ed310fbf9b11f9c
2014-06-27T22:27:38Z
mmm a / buildscripts / resmokeconfig / suites / replica_sets_auth_5 . yml <nl> ppp b / buildscripts / resmokeconfig / suites / replica_sets_auth_5 . yml <nl> selector : <nl> - jstests / replsets / apply_batches_totalMillis . js <nl> - jstests / replsets / id_index_replication . js <nl> - jstests / replsets / server_status_metrics . js <nl> - - jstests / replsets / initial_sync_visibility . js <nl> + - jstests / replsets / initial_sync_oplog_hole . js <nl> - jstests / replsets / plan_cache_slaveok . js <nl> - jstests / replsets / dbhash_system_collections . js <nl> - jstests / replsets / capped_insert_order . js <nl> mmm a / buildscripts / resmokeconfig / suites / replica_sets_auth_misc . yml <nl> ppp b / buildscripts / resmokeconfig / suites / replica_sets_auth_misc . yml <nl> selector : <nl> - jstests / replsets / apply_batches_totalMillis . js <nl> - jstests / replsets / id_index_replication . js <nl> - jstests / replsets / server_status_metrics . js <nl> - - jstests / replsets / initial_sync_visibility . js <nl> + - jstests / replsets / initial_sync_oplog_hole . js <nl> - jstests / replsets / plan_cache_slaveok . js <nl> - jstests / replsets / dbhash_system_collections . js <nl> - jstests / replsets / capped_insert_order . js <nl> mmm a / buildscripts / resmokeconfig / suites / replica_sets_ese_6 . yml <nl> ppp b / buildscripts / resmokeconfig / suites / replica_sets_ese_6 . yml <nl> selector : <nl> - jstests / replsets / id_index_replication . js <nl> - jstests / replsets / server_status_metrics . js <nl> - jstests / replsets / shutdown . js <nl> - - jstests / replsets / initial_sync_visibility . js <nl> - jstests / replsets / initial_sync_invalid_views . js <nl> + - jstests / replsets / initial_sync_oplog_hole . js <nl> - jstests / replsets / initial_sync_views . js <nl> - jstests / replsets / reconfig_tags . js <nl> - jstests / replsets / oplog_replay_on_startup_with_bad_op . js <nl> mmm a / buildscripts / resmokeconfig / suites / replica_sets_ese_misc . yml <nl> ppp b / buildscripts / resmokeconfig / suites / replica_sets_ese_misc . yml <nl> selector : <nl> - jstests / replsets / id_index_replication . js <nl> - jstests / replsets / server_status_metrics . js <nl> - jstests / replsets / shutdown . js <nl> - - jstests / replsets / initial_sync_visibility . js <nl> - jstests / replsets / initial_sync_invalid_views . js <nl> + - jstests / replsets / initial_sync_oplog_hole . js <nl> - jstests / replsets / initial_sync_views . js <nl> - jstests / replsets / reconfig_tags . js <nl> - jstests / replsets / oplog_replay_on_startup_with_bad_op . js <nl> new file mode 100644 <nl> index 000000000000 . . bad2612351ee <nl> mmm / dev / null <nl> ppp b / jstests / replsets / initial_sync_oplog_hole . js <nl> <nl> + / * * <nl> + * Test that initial sync works without error when the sync source has an oplog hole . <nl> + * / <nl> + ( function ( ) { <nl> + " use strict " ; <nl> + <nl> + load ( " jstests / libs / check_log . js " ) ; <nl> + <nl> + / / Set up replica set . Disallow chaining so nodes always sync from primary . <nl> + const testName = " initial_sync_oplog_hole " ; <nl> + const dbName = testName ; <nl> + / / Set up a three - node replset . The first node is primary throughout the test , the second node <nl> + / / is used as the initial sync node , and the third node is to ensure we maintain a majority ( and <nl> + / / thus no election ) while restarting the second . <nl> + const replTest = new ReplSetTest ( { <nl> + name : testName , <nl> + nodes : [ { } , { rsConfig : { priority : 0 } } , { rsConfig : { priority : 0 } } ] , <nl> + settings : { chainingAllowed : false } <nl> + } ) ; <nl> + replTest . startSet ( ) ; <nl> + replTest . initiate ( ) ; <nl> + <nl> + const primary = replTest . getPrimary ( ) ; <nl> + const primaryDB = primary . getDB ( dbName ) ; <nl> + const secondary = replTest . getSecondary ( ) ; <nl> + const secondaryDB = secondary . getDB ( dbName ) ; <nl> + const collName = " testcoll " ; <nl> + const primaryColl = primaryDB [ collName ] ; <nl> + const secondaryColl = secondaryDB [ collName ] ; <nl> + const nss = primaryColl . getFullName ( ) ; <nl> + TestData . testName = testName ; <nl> + TestData . collectionName = collName ; <nl> + <nl> + jsTestLog ( " Writing data before oplog hole to collection . " ) ; <nl> + assert . writeOK ( primaryColl . insert ( { _id : " a " } ) ) ; <nl> + / / Make sure it gets written out . <nl> + assert . eq ( primaryColl . find ( { _id : " a " } ) . itcount ( ) , 1 ) ; <nl> + <nl> + jsTest . log ( " Create the uncommitted write . " ) ; <nl> + assert . commandWorked ( primaryDB . adminCommand ( { <nl> + configureFailPoint : " hangAfterCollectionInserts " , <nl> + mode : " alwaysOn " , <nl> + data : { collectionNS : primaryColl . getFullName ( ) , first_id : " b " } <nl> + } ) ) ; <nl> + <nl> + const db = primaryDB ; <nl> + const joinHungWrite = startParallelShell ( ( ) = > { <nl> + assert . commandWorked ( <nl> + db . getSiblingDB ( TestData . testName ) [ TestData . collectionName ] . insert ( { _id : " b " } ) ) ; <nl> + } , primary . port ) ; <nl> + checkLog . contains ( <nl> + primaryDB . getMongo ( ) , <nl> + " hangAfterCollectionInserts fail point enabled for " + primaryColl . getFullName ( ) ) ; <nl> + <nl> + jsTest . log ( " Create a write following the uncommitted write . " ) ; <nl> + assert . writeOK ( primaryColl . insert ( { _id : " c " } ) ) ; <nl> + / / Make sure it gets written out . <nl> + assert . eq ( primaryColl . find ( { _id : " c " } ) . itcount ( ) , 1 ) ; <nl> + <nl> + jsTestLog ( " Restarting initial sync node . " ) ; <nl> + replTest . restart ( secondary , { startClean : true } ) ; <nl> + jsTestLog ( " Waiting for initial sync to start . " ) ; <nl> + checkLog . contains ( secondaryDB . getMongo ( ) , " Starting initial sync " ) ; <nl> + <nl> + / / The 5 seconds is because in the non - buggy case , we ' ll be hung waiting for the optime to <nl> + / / advance . However , if we allow the write to finish immediately , we are likely to miss the <nl> + / / race if it happens . By allowing 5 seconds , we ' ll never fail when we should succeed , and <nl> + / / we ' ll nearly always fail when we should fail . <nl> + / / <nl> + / / Once the hangAfterCollectionInserts failpoint is turned off , the write of { _id : " b " } will <nl> + / / complete and both the data and the oplog entry for the write will be written out . The oplog <nl> + / / visibility thread will then close the oplog hole . <nl> + jsTestLog ( " Allow the uncommitted write to finish in 5 seconds . " ) ; <nl> + const joinDisableFailPoint = startParallelShell ( ( ) = > { <nl> + sleep ( 5000 ) ; <nl> + assert . commandWorked ( <nl> + db . adminCommand ( { configureFailPoint : " hangAfterCollectionInserts " , mode : " off " } ) ) ; <nl> + } , primary . port ) ; <nl> + <nl> + jsTestLog ( " Waiting for initial sync to complete . " ) ; <nl> + replTest . waitForState ( secondary , ReplSetTest . State . SECONDARY ) ; <nl> + <nl> + jsTestLog ( " Joining hung write " ) ; <nl> + joinDisableFailPoint ( ) ; <nl> + joinHungWrite ( ) ; <nl> + <nl> + jsTestLog ( " Checking that primary has all data items . " ) ; <nl> + / / Make sure the primary collection has all three data items . <nl> + assert . docEq ( primaryColl . find ( ) . toArray ( ) , [ { " _id " : " a " } , { " _id " : " b " } , { " _id " : " c " } ] ) ; <nl> + <nl> + jsTestLog ( " Checking that secondary has all data items . " ) ; <nl> + replTest . awaitReplication ( ) ; <nl> + assert . docEq ( secondaryColl . find ( ) . toArray ( ) , [ { " _id " : " a " } , { " _id " : " b " } , { " _id " : " c " } ] ) ; <nl> + <nl> + replTest . stopSet ( ) ; <nl> + } ) ( ) ; <nl> deleted file mode 100644 <nl> index fff5a13bbc90 . . 000000000000 <nl> mmm a / jstests / replsets / initial_sync_visibility . js <nl> ppp / dev / null <nl> <nl> - / / SERVER - 30927 Initial sync from a source with an invisible oplog entry <nl> - ( function ( ) { <nl> - ' use strict ' ; <nl> - <nl> - load ( ' jstests / replsets / rslib . js ' ) ; <nl> - const basename = ' initial_sync_visibility ' ; <nl> - <nl> - jsTestLog ( ' Bring up set ' ) ; <nl> - const rst = new ReplSetTest ( { name : basename , nodes : 1 } ) ; <nl> - rst . startSet ( ) ; <nl> - rst . initiate ( ) ; <nl> - <nl> - const primary = rst . getPrimary ( ) ; <nl> - const primaryDB = primary . getDB ( basename ) ; <nl> - <nl> - jsTestLog ( ' Create a collection ' ) ; <nl> - assert . writeOK ( primaryDB [ ' coll ' ] . save ( { _id : " visible " } ) ) ; <nl> - jsTestLog ( ' Make sure synced ' ) ; <nl> - rst . awaitReplication ( ) ; <nl> - <nl> - jsTestLog ( ' Activate WT visibility failpoint and write an invisible document ' ) ; <nl> - assert . commandWorked ( primaryDB . adminCommand ( <nl> - { configureFailPoint : ' WTPausePrimaryOplogDurabilityLoop ' , mode : ' alwaysOn ' } ) ) ; <nl> - assert . writeOK ( primaryDB [ ' coll ' ] . save ( { _id : " invisible " } ) ) ; <nl> - <nl> - jsTestLog ( ' Bring up a new node ' ) ; <nl> - const secondary = rst . add ( { setParameter : ' numInitialSyncAttempts = 3 ' } ) ; <nl> - rst . reInitiate ( ) ; <nl> - assert . eq ( primary , rst . getPrimary ( ) , ' Primary changed after reconfig ' ) ; <nl> - <nl> - jsTestLog ( ' Wait for new node to start cloning ' ) ; <nl> - secondary . setSlaveOk ( ) ; <nl> - const secondaryDB = secondary . getDB ( basename ) ; <nl> - wait ( function ( ) { <nl> - return secondaryDB . stats ( ) . collections > = 1 ; <nl> - } , ' never saw new node starting to clone , was waiting for collections in : ' + basename ) ; <nl> - <nl> - jsTestLog ( ' Disable WT visibility failpoint on primary making all visible . ' ) ; <nl> - assert . commandWorked ( primaryDB . adminCommand ( <nl> - { configureFailPoint : ' WTPausePrimaryOplogDurabilityLoop ' , mode : ' off ' } ) ) ; <nl> - <nl> - jsTestLog ( ' Wait for both nodes to be up - to - date ' ) ; <nl> - rst . awaitSecondaryNodes ( ) ; <nl> - rst . awaitReplication ( ) ; <nl> - <nl> - jsTestLog ( ' Check all OK ' ) ; <nl> - rst . checkReplicatedDataHashes ( ) ; <nl> - rst . stopSet ( 15 ) ; <nl> - } ) ( ) ; <nl> mmm a / src / mongo / db / catalog / collection_impl . cpp <nl> ppp b / src / mongo / db / catalog / collection_impl . cpp <nl> MONGO_FAIL_POINT_DEFINE ( failCollectionInserts ) ; <nl> <nl> / / Used to pause after inserting collection data and calling the opObservers . Inserts to <nl> / / replicated collections that are not part of a multi - statement transaction will have generated <nl> - / / their OpTime and oplog entry . <nl> + / / their OpTime and oplog entry . Supports parameters to limit pause by namespace and by _id <nl> + / / of first data item in an insert ( must be of type string ) : <nl> + / / data : { <nl> + / / collectionNS : < fully - qualified collection namespace > , <nl> + / / first_id : < string > <nl> + / / } <nl> MONGO_FAIL_POINT_DEFINE ( hangAfterCollectionInserts ) ; <nl> <nl> / * * <nl> Status CollectionImpl : : insertDocuments ( OperationContext * opCtx , <nl> MONGO_FAIL_POINT_BLOCK ( hangAfterCollectionInserts , extraData ) { <nl> const BSONObj & data = extraData . getData ( ) ; <nl> const auto collElem = data [ " collectionNS " ] ; <nl> + const auto firstIdElem = data [ " first_id " ] ; <nl> / / If the failpoint specifies no collection or matches the existing one , hang . <nl> - if ( ! collElem | | _ns . ns ( ) = = collElem . str ( ) ) { <nl> + if ( ( ! collElem | | _ns . ns ( ) = = collElem . str ( ) ) & & <nl> + ( ! firstIdElem | | ( begin ! = end & & firstIdElem . type ( ) = = mongo : : String & & <nl> + begin - > doc [ " _id " ] . str ( ) = = firstIdElem . str ( ) ) ) ) { <nl> + string whenFirst = <nl> + firstIdElem ? ( string ( " when first _id is " ) + firstIdElem . str ( ) ) : " " ; <nl> while ( MONGO_FAIL_POINT ( hangAfterCollectionInserts ) ) { <nl> log ( ) < < " hangAfterCollectionInserts fail point enabled for " < < _ns . toString ( ) <nl> - < < " . Blocking until fail point is disabled . " ; <nl> + < < whenFirst < < " . Blocking until fail point is disabled . " ; <nl> mongo : : sleepsecs ( 1 ) ; <nl> opCtx - > checkForInterrupt ( ) ; <nl> } <nl> mmm a / src / mongo / db / repl / initial_syncer . cpp <nl> ppp b / src / mongo / db / repl / initial_syncer . cpp <nl> void InitialSyncer : : _lastOplogEntryFetcherCallbackForBeginTimestamp ( <nl> auto filterBob = BSONObjBuilder ( queryBob . subobjStart ( " filter " ) ) ; <nl> filterBob . append ( " _id " , FeatureCompatibilityVersionParser : : kParameterName ) ; <nl> filterBob . done ( ) ; <nl> + / / As part of reading the FCV , we ensure the source node " all committed " timestamp has advanced <nl> + / / to at least the timestamp of the last optime that we found in the lastOplogEntryFetcher . <nl> + / / When document locking is used , there could be oplog " holes " which would result in <nl> + / / inconsistent initial sync data if we didn ' t do this . <nl> + auto readConcernBob = BSONObjBuilder ( queryBob . subobjStart ( " readConcern " ) ) ; <nl> + readConcernBob . append ( " afterClusterTime " , lastOpTimeWithHash . opTime . getTimestamp ( ) ) ; <nl> + readConcernBob . done ( ) ; <nl> <nl> _fCVFetcher = stdx : : make_unique < Fetcher > ( <nl> _exec , <nl>
SERVER - 37408 Add afterClusterTime to initial sync collection scans
mongodb/mongo
cbd0a1a3df662c54da23d5def4ccc10dd1c1f88e
2018-11-06T20:24:52Z
mmm a / dbms / src / Processors / Executors / PipelineExecutor . cpp <nl> ppp b / dbms / src / Processors / Executors / PipelineExecutor . cpp <nl> void PipelineExecutor : : processFinishedExecutionQueue ( ) <nl> { <nl> while ( ! finished_execution_queue . empty ( ) ) <nl> { <nl> - UInt64 proc = finished_execution_queue . front ( ) ; <nl> + auto finished_job = finished_execution_queue . front ( ) ; <nl> finished_execution_queue . pop ( ) ; <nl> <nl> - graph [ proc ] . status = ExecStatus : : Preparing ; <nl> - prepare_queue . push ( proc ) ; <nl> + if ( finished_job . exception ) <nl> + std : : rethrow_exception ( finished_job . exception ) ; <nl> + <nl> + graph [ finished_job . node ] . status = ExecStatus : : Preparing ; <nl> + prepare_queue . push ( finished_job . node ) ; <nl> } <nl> } <nl> <nl> void PipelineExecutor : : processFinishedExecutionQueueSafe ( ThreadPool * pool ) <nl> { <nl> if ( pool ) <nl> { <nl> - exception_handler . throwIfException ( ) ; <nl> std : : lock_guard lock ( finished_execution_mutex ) ; <nl> processFinishedExecutionQueue ( ) ; <nl> } <nl> void PipelineExecutor : : addJob ( UInt64 pid , ThreadPool * pool ) <nl> { <nl> auto job = [ this , pid , processor = graph [ pid ] . processor ] ( ) <nl> { <nl> + FinishedJob finished_job = <nl> + { <nl> + . node = pid , <nl> + . exception = nullptr <nl> + } ; <nl> + <nl> SCOPE_EXIT ( <nl> - { <nl> - std : : lock_guard lock ( finished_execution_mutex ) ; <nl> - finished_execution_queue . push ( pid ) ; <nl> - } <nl> - event_counter . notify ( ) <nl> + { <nl> + std : : lock_guard lock ( finished_execution_mutex ) ; <nl> + finished_execution_queue . push ( finished_job ) ; <nl> + } <nl> + event_counter . notify ( ) <nl> ) ; <nl> <nl> - executeJob ( processor ) ; <nl> + try <nl> + { <nl> + executeJob ( processor ) ; <nl> + } <nl> + catch ( . . . ) <nl> + { <nl> + finished_job . exception = std : : current_exception ( ) ; <nl> + } <nl> } ; <nl> <nl> - pool - > schedule ( createExceptionHandledJob ( std : : move ( job ) , exception_handler ) ) ; <nl> + pool - > schedule ( std : : move ( job ) ) ; <nl> + + num_tasks_to_wait ; <nl> } <nl> else <nl> { <nl> / / / Execute task in main thread . <nl> + <nl> + FinishedJob finished_job = <nl> + { <nl> + . node = pid , <nl> + . exception = nullptr <nl> + } ; <nl> + <nl> executeJob ( graph [ pid ] . processor ) ; <nl> - finished_execution_queue . push ( pid ) ; <nl> + finished_execution_queue . push ( finished_job ) ; <nl> } <nl> } <nl> <nl> mmm a / dbms / src / Processors / Executors / PipelineExecutor . h <nl> ppp b / dbms / src / Processors / Executors / PipelineExecutor . h <nl> class PipelineExecutor <nl> <nl> Nodes graph ; <nl> <nl> + struct FinishedJob <nl> + { <nl> + UInt64 node ; <nl> + std : : exception_ptr exception ; <nl> + } ; <nl> + <nl> using Queue = std : : queue < UInt64 > ; <nl> + using FinishedJobsQueue = std : : queue < FinishedJob > ; <nl> <nl> / / / Queue of processes which we want to call prepare . Is used only in main thread . <nl> Queue prepare_queue ; <nl> / / / Queue of processes which have finished execution . Must me used with mutex if executing with pool . <nl> - Queue finished_execution_queue ; <nl> + FinishedJobsQueue finished_execution_queue ; <nl> std : : mutex finished_execution_mutex ; <nl> - ExceptionHandler exception_handler ; <nl> + <nl> EventCounter event_counter ; <nl> <nl> UInt64 num_waited_tasks = 0 ; <nl>
Remove ExceptionHandler from PipelineExecutor .
ClickHouse/ClickHouse
fc5f28faeb3c33659a65d36bdac19b8b6dbd201f
2019-06-25T16:41:06Z
mmm a / unittest / Makefile . am <nl> ppp b / unittest / Makefile . am <nl> applybox_test_LDADD = $ ( GTEST_LIBS ) $ ( TESS_LIBS ) $ ( LEPTONICA_LIBS ) <nl> baseapi_test_SOURCES = baseapi_test . cc <nl> baseapi_test_LDADD = $ ( ABSEIL_LIBS ) $ ( GTEST_LIBS ) $ ( TESS_LIBS ) <nl> <nl> - baseapi_thread_test_SOURCES = baseapi_thread_test . cc <nl> - baseapi_thread_test_LDADD = $ ( ABSEIL_LIBS ) $ ( GTEST_LIBS ) $ ( TESS_LIBS ) <nl> + # baseapi_thread_test_SOURCES = baseapi_thread_test . cc <nl> + # baseapi_thread_test_LDADD = $ ( ABSEIL_LIBS ) $ ( GTEST_LIBS ) $ ( TESS_LIBS ) <nl> <nl> bitvector_test_SOURCES = bitvector_test . cc <nl> bitvector_test_LDADD = $ ( GTEST_LIBS ) $ ( TESS_LIBS ) <nl> matrix_test_LDADD = $ ( GTEST_LIBS ) $ ( TESS_LIBS ) <nl> nthitem_test_SOURCES = nthitem_test . cc <nl> nthitem_test_LDADD = $ ( GTEST_LIBS ) $ ( TESS_LIBS ) <nl> <nl> - pango_font_info_test_SOURCES = pango_font_info_test . cc <nl> - pango_font_info_test_LDADD = $ ( GTEST_LIBS ) $ ( TESS_LIBS ) <nl> + # pango_font_info_test_SOURCES = pango_font_info_test . cc <nl> + # pango_font_info_test_LDADD = $ ( GTEST_LIBS ) $ ( TESS_LIBS ) <nl> <nl> paragraphs_test_SOURCES = paragraphs_test . cc <nl> paragraphs_test_LDADD = $ ( ABSEIL_LIBS ) $ ( GTEST_LIBS ) $ ( TESS_LIBS ) <nl>
Merge pull request from stweil / unittest
tesseract-ocr/tesseract
460c4a6950256521529554d67c453c7cd38d8e7d
2018-10-12T16:04:11Z
mmm a / src / Storages / MergeTree / MergeTreeDataSelectExecutor . cpp <nl> ppp b / src / Storages / MergeTree / MergeTreeDataSelectExecutor . cpp <nl> Pipe MergeTreeDataSelectExecutor : : spreadMarkRangesAmongStreamsFinal ( <nl> continue ; <nl> } <nl> <nl> - pipe . addSimpleTransform ( [ & metadata_snapshot ] ( const Block & header ) { <nl> + pipe . addSimpleTransform ( [ & metadata_snapshot ] ( const Block & header ) <nl> + { <nl> return std : : make_shared < ExpressionTransform > ( header , metadata_snapshot - > getSortingKey ( ) . expression ) ; <nl> } ) ; <nl> <nl> Pipe MergeTreeDataSelectExecutor : : spreadMarkRangesAmongStreamsFinal ( <nl> for ( size_t i = 0 ; i < sort_columns_size ; + + i ) <nl> sort_description . emplace_back ( header . getPositionByName ( sort_columns [ i ] ) , 1 , 1 ) ; <nl> <nl> - auto get_merging_processor = [ & ] ( ) - > MergingTransformPtr { <nl> + auto get_merging_processor = [ & ] ( ) - > MergingTransformPtr <nl> + { <nl> switch ( data . merging_params . mode ) <nl> { <nl> case MergeTreeData : : MergingParams : : Ordinary : { <nl> Pipe MergeTreeDataSelectExecutor : : spreadMarkRangesAmongStreamsFinal ( <nl> key_columns . emplace_back ( desc . column_number ) ; <nl> } <nl> <nl> - pipe . addSimpleTransform ( [ & ] ( const Block & stream_header ) { <nl> + pipe . addSimpleTransform ( [ & ] ( const Block & stream_header ) <nl> + { <nl> return std : : make_shared < AddingSelectorTransform > ( stream_header , num_streams , key_columns ) ; <nl> } ) ; <nl> <nl> - pipe . transform ( [ & ] ( OutputPortRawPtrs ports ) { <nl> + pipe . transform ( [ & ] ( OutputPortRawPtrs ports ) <nl> + { <nl> Processors processors ; <nl> std : : vector < OutputPorts : : iterator > output_ports ; <nl> processors . reserve ( ports . size ( ) + num_streams ) ; <nl>
Fix style
ClickHouse/ClickHouse
44c2b138f32a732f60730bcaebf19c60dbe9e01f
2020-10-13T19:53:36Z
mmm a / Marlin / src / pins / pinsDebug . h <nl> ppp b / Marlin / src / pins / pinsDebug . h <nl> <nl> <nl> # define _ADD_PIN_2 ( PIN_NAME , ENTRY_NAME ) static const char ENTRY_NAME [ ] PROGMEM = { PIN_NAME } ; <nl> # define _ADD_PIN ( PIN_NAME , COUNTER ) _ADD_PIN_2 ( PIN_NAME , entry_NAME_ # # COUNTER ) <nl> - # define REPORT_NAME_DIGITAL ( NAME , COUNTER ) _ADD_PIN ( # NAME , COUNTER ) <nl> - # define REPORT_NAME_ANALOG ( NAME , COUNTER ) _ADD_PIN ( # NAME , COUNTER ) <nl> + # define REPORT_NAME_DIGITAL ( COUNTER , NAME ) _ADD_PIN ( # NAME , COUNTER ) <nl> + # define REPORT_NAME_ANALOG ( COUNTER , NAME ) _ADD_PIN ( # NAME , COUNTER ) <nl> <nl> # include " pinsDebug_list . h " <nl> # line 47 <nl> <nl> <nl> # define _ADD_PIN_2 ( ENTRY_NAME , NAME , IS_DIGITAL ) { ENTRY_NAME , NAME , IS_DIGITAL } , <nl> # define _ADD_PIN ( NAME , COUNTER , IS_DIGITAL ) _ADD_PIN_2 ( entry_NAME_ # # COUNTER , NAME , IS_DIGITAL ) <nl> - # define REPORT_NAME_DIGITAL ( NAME , COUNTER ) _ADD_PIN ( NAME , COUNTER , true ) <nl> - # define REPORT_NAME_ANALOG ( NAME , COUNTER ) _ADD_PIN ( analogInputToDigitalPin ( NAME ) , COUNTER , false ) <nl> + # define REPORT_NAME_DIGITAL ( COUNTER , NAME ) _ADD_PIN ( NAME , COUNTER , true ) <nl> + # define REPORT_NAME_ANALOG ( COUNTER , NAME ) _ADD_PIN ( analogInputToDigitalPin ( NAME ) , COUNTER , false ) <nl> <nl> <nl> typedef struct { <nl> mmm a / Marlin / src / pins / pinsDebug_list . h <nl> ppp b / Marlin / src / pins / pinsDebug_list . h <nl> <nl> * ( at your option ) any later version . <nl> * <nl> * This program is distributed in the hope that it will be useful , <nl> - * but WITHOUT ANY WARRANTY without even the implied warranty of <nl> + * but WITHOUT ANY WARRANTY ; without even the implied warranty of <nl> * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the <nl> * GNU General Public License for more details . <nl> * <nl> <nl> / / Following this pattern is a must . <nl> / / If the new pin name is over 28 characters long then pinsDebug . h will need to be modified . <nl> <nl> - / / Pin list updated from 25 JUL 2017 Re - ARM branch - max length of pin name is 24 <nl> + / / Pin lists 1 . 1 . x and 2 . 0 . x synchronized 2018 - 02 - 17 <nl> <nl> - # line 31 / / set __LINE__ to a known value for both passes <nl> + # define PIN_EXISTS_ANALOG ( PN ) ( PIN_EXISTS ( PN ) & & PN # # _PIN < NUM_ANALOG_INPUTS ) <nl> + # define _EXISTS ( PN ) ( defined ( PN ) & & PN > = 0 ) <nl> + # define _EXISTS_ANALOG ( PN ) ( _EXISTS ( PN ) & & PN < NUM_ANALOG_INPUTS ) <nl> <nl> - # if defined ( EXT_AUX_A0 ) & & EXT_AUX_A0 > = 0 & & EXT_AUX_A0 < NUM_ANALOG_INPUTS <nl> - REPORT_NAME_ANALOG ( EXT_AUX_A0 , __LINE__ ) <nl> + # line 0 / / set __LINE__ to a known value for both passes <nl> + <nl> + # if _EXISTS_ANALOG ( EXT_AUX_A0 ) <nl> + REPORT_NAME_ANALOG ( __LINE__ , EXT_AUX_A0 ) <nl> # endif <nl> - # if defined ( EXT_AUX_A1 ) & & EXT_AUX_A1 > = 0 & & EXT_AUX_A1 < NUM_ANALOG_INPUTS <nl> - REPORT_NAME_ANALOG ( EXT_AUX_A1 , __LINE__ ) <nl> + # if _EXISTS_ANALOG ( EXT_AUX_A1 ) <nl> + REPORT_NAME_ANALOG ( __LINE__ , EXT_AUX_A1 ) <nl> # endif <nl> - # if defined ( EXT_AUX_A2 ) & & EXT_AUX_A2 > = 0 & & EXT_AUX_A2 < NUM_ANALOG_INPUTS <nl> - REPORT_NAME_ANALOG ( EXT_AUX_A2 , __LINE__ ) <nl> + # if _EXISTS_ANALOG ( EXT_AUX_A2 ) <nl> + REPORT_NAME_ANALOG ( __LINE__ , EXT_AUX_A2 ) <nl> # endif <nl> - # if defined ( EXT_AUX_A3 ) & & EXT_AUX_A3 > = 0 & & EXT_AUX_A3 < NUM_ANALOG_INPUTS <nl> - REPORT_NAME_ANALOG ( EXT_AUX_A3 , __LINE__ ) <nl> + # if _EXISTS_ANALOG ( EXT_AUX_A3 ) <nl> + REPORT_NAME_ANALOG ( __LINE__ , EXT_AUX_A3 ) <nl> # endif <nl> - # if defined ( EXT_AUX_A4 ) & & EXT_AUX_A4 > = 0 & & EXT_AUX_A4 < NUM_ANALOG_INPUTS <nl> - REPORT_NAME_ANALOG ( EXT_AUX_A4 , __LINE__ ) <nl> + # if _EXISTS_ANALOG ( EXT_AUX_A4 ) <nl> + REPORT_NAME_ANALOG ( __LINE__ , EXT_AUX_A4 ) <nl> # endif <nl> - # if PIN_EXISTS ( FILWIDTH ) & & FILWIDTH_PIN < NUM_ANALOG_INPUTS <nl> - REPORT_NAME_ANALOG ( FILWIDTH_PIN , __LINE__ ) <nl> + # if PIN_EXISTS_ANALOG ( FILWIDTH ) <nl> + REPORT_NAME_ANALOG ( __LINE__ , FILWIDTH_PIN ) <nl> # endif <nl> - # if PIN_EXISTS ( MAIN_VOLTAGE_MEASURE ) & & MAIN_VOLTAGE_MEASURE_PIN < NUM_ANALOG_INPUTS <nl> - REPORT_NAME_ANALOG ( MAIN_VOLTAGE_MEASURE_PIN , __LINE__ ) <nl> + # if PIN_EXISTS_ANALOG ( MAIN_VOLTAGE_MEASURE ) <nl> + REPORT_NAME_ANALOG ( __LINE__ , MAIN_VOLTAGE_MEASURE_PIN ) <nl> # endif <nl> # if ! defined ( ARDUINO_ARCH_SAM ) / / TC1 & TC2 are macros in the SAM tool chain <nl> - # if defined ( TC1 ) & & TC1 > = 0 & & TC1 < NUM_ANALOG_INPUTS <nl> - REPORT_NAME_ANALOG ( TC1 , __LINE__ ) <nl> + # if _EXISTS_ANALOG ( TC1 ) <nl> + REPORT_NAME_ANALOG ( __LINE__ , TC1 ) <nl> # endif <nl> - # if defined ( TC2 ) & & TC2 > = 0 & & TC2 < NUM_ANALOG_INPUTS <nl> - REPORT_NAME_ANALOG ( TC2 , __LINE__ ) <nl> + # if _EXISTS_ANALOG ( TC2 ) <nl> + REPORT_NAME_ANALOG ( __LINE__ , TC2 ) <nl> # endif <nl> # endif <nl> - # if PIN_EXISTS ( TEMP_0 ) & & TEMP_0_PIN < NUM_ANALOG_INPUTS <nl> - REPORT_NAME_ANALOG ( TEMP_0_PIN , __LINE__ ) <nl> + # if PIN_EXISTS_ANALOG ( TEMP_0 ) <nl> + REPORT_NAME_ANALOG ( __LINE__ , TEMP_0_PIN ) <nl> + # endif <nl> + # if PIN_EXISTS_ANALOG ( TEMP_1 ) <nl> + REPORT_NAME_ANALOG ( __LINE__ , TEMP_1_PIN ) <nl> + # endif <nl> + # if PIN_EXISTS_ANALOG ( TEMP_2 ) <nl> + REPORT_NAME_ANALOG ( __LINE__ , TEMP_2_PIN ) <nl> + # endif <nl> + # if PIN_EXISTS_ANALOG ( TEMP_3 ) <nl> + REPORT_NAME_ANALOG ( __LINE__ , TEMP_3_PIN ) <nl> + # endif <nl> + # if PIN_EXISTS_ANALOG ( TEMP_4 ) <nl> + REPORT_NAME_ANALOG ( __LINE__ , TEMP_4_PIN ) <nl> # endif <nl> - # if PIN_EXISTS ( TEMP_1 ) & & TEMP_1_PIN < NUM_ANALOG_INPUTS <nl> - REPORT_NAME_ANALOG ( TEMP_1_PIN , __LINE__ ) <nl> + # if _EXISTS ( THERMO_CS2 ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , THERMO_CS2 ) <nl> # endif <nl> - # if PIN_EXISTS ( TEMP_2 ) & & TEMP_2_PIN < NUM_ANALOG_INPUTS <nl> - REPORT_NAME_ANALOG ( TEMP_2_PIN , __LINE__ ) <nl> + # if PIN_EXISTS_ANALOG ( TEMP_BED ) <nl> + REPORT_NAME_ANALOG ( __LINE__ , TEMP_BED_PIN ) <nl> # endif <nl> - # if PIN_EXISTS ( TEMP_3 ) & & TEMP_3_PIN < NUM_ANALOG_INPUTS <nl> - REPORT_NAME_ANALOG ( TEMP_3_PIN , __LINE__ ) <nl> + # if PIN_EXISTS_ANALOG ( TEMP_CHAMBER ) <nl> + REPORT_NAME_ANALOG ( __LINE__ , TEMP_CHAMBER_PIN ) <nl> # endif <nl> - # if PIN_EXISTS ( TEMP_4 ) & & TEMP_4_PIN < NUM_ANALOG_INPUTS <nl> - REPORT_NAME_ANALOG ( TEMP_4_PIN , __LINE__ ) <nl> + # if PIN_EXISTS_ANALOG ( TEMP_X ) <nl> + REPORT_NAME_ANALOG ( __LINE__ , TEMP_X_PIN ) <nl> # endif <nl> - # if PIN_EXISTS ( TEMP_BED ) & & TEMP_BED_PIN < NUM_ANALOG_INPUTS <nl> - REPORT_NAME_ANALOG ( TEMP_BED_PIN , __LINE__ ) <nl> + # if PIN_EXISTS_ANALOG ( ADC_KEYPAD ) <nl> + REPORT_NAME_ANALOG ( __LINE__ , ADC_KEYPAD_PIN ) <nl> # endif <nl> - # if PIN_EXISTS ( TEMP_CHAMBER ) & & TEMP_CHAMBER_PIN < NUM_ANALOG_INPUTS <nl> - REPORT_NAME_ANALOG ( TEMP_CHAMBER_PIN , __LINE__ ) <nl> + # if _EXISTS ( __FD ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , __FD ) <nl> # endif <nl> - # if PIN_EXISTS ( TEMP_X ) & & TEMP_X_PIN < NUM_ANALOG_INPUTS <nl> - REPORT_NAME_ANALOG ( TEMP_X_PIN , __LINE__ ) <nl> + # if _EXISTS ( __FS ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , __FS ) <nl> # endif <nl> - # if defined ( __FD ) & & __FD > = 0 <nl> - REPORT_NAME_DIGITAL ( __FD , __LINE__ ) <nl> + # if _EXISTS ( __GD ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , __GD ) <nl> # endif <nl> - # if defined ( __FS ) & & __FS > = 0 <nl> - REPORT_NAME_DIGITAL ( __FS , __LINE__ ) <nl> + # if _EXISTS ( __GS ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , __GS ) <nl> # endif <nl> - # if defined ( __GD ) & & __GD > = 0 <nl> - REPORT_NAME_DIGITAL ( __GD , __LINE__ ) <nl> + # if PIN_EXISTS ( AVR_MISO ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , AVR_MISO_PIN ) <nl> # endif <nl> - # if defined ( __GS ) & & __GS > = 0 <nl> - REPORT_NAME_DIGITAL ( __GS , __LINE__ ) <nl> + # if PIN_EXISTS ( AVR_MOSI ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , AVR_MOSI_PIN ) <nl> # endif <nl> - # if PIN_EXISTS ( ADC_KEYPAD ) <nl> - REPORT_NAME_DIGITAL ( ADC_KEYPAD_PIN , __LINE__ ) <nl> + # if PIN_EXISTS ( AVR_SCK ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , AVR_SCK_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( ALARM ) <nl> - REPORT_NAME_DIGITAL ( ALARM_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , ALARM_PIN ) <nl> + # endif <nl> + # if PIN_EXISTS ( AVR_SS ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , AVR_SS_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( BEEPER ) <nl> - REPORT_NAME_DIGITAL ( BEEPER_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , BEEPER_PIN ) <nl> # endif <nl> - # if defined ( BTN_BACK ) & & BTN_BACK > = 0 <nl> - REPORT_NAME_DIGITAL ( BTN_BACK , __LINE__ ) <nl> + # if _EXISTS ( BTN_BACK ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , BTN_BACK ) <nl> # endif <nl> - # if defined ( BTN_CENTER ) & & BTN_CENTER > = 0 <nl> - REPORT_NAME_DIGITAL ( BTN_CENTER , __LINE__ ) <nl> + # if _EXISTS ( BTN_CENTER ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , BTN_CENTER ) <nl> # endif <nl> - # if defined ( BTN_DOWN ) & & BTN_DOWN > = 0 <nl> - REPORT_NAME_DIGITAL ( BTN_DOWN , __LINE__ ) <nl> + # if _EXISTS ( BTN_DOWN ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , BTN_DOWN ) <nl> # endif <nl> - # if defined ( BTN_DWN ) & & BTN_DWN > = 0 <nl> - REPORT_NAME_DIGITAL ( BTN_DWN , __LINE__ ) <nl> + # if _EXISTS ( BTN_DWN ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , BTN_DWN ) <nl> # endif <nl> - # if defined ( BTN_EN1 ) & & BTN_EN1 > = 0 <nl> - REPORT_NAME_DIGITAL ( BTN_EN1 , __LINE__ ) <nl> + # if _EXISTS ( BTN_EN1 ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , BTN_EN1 ) <nl> # endif <nl> - # if defined ( BTN_EN2 ) & & BTN_EN2 > = 0 <nl> - REPORT_NAME_DIGITAL ( BTN_EN2 , __LINE__ ) <nl> + # if _EXISTS ( BTN_EN2 ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , BTN_EN2 ) <nl> # endif <nl> - # if defined ( BTN_ENC ) & & BTN_ENC > = 0 <nl> - REPORT_NAME_DIGITAL ( BTN_ENC , __LINE__ ) <nl> + # if _EXISTS ( BTN_ENC ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , BTN_ENC ) <nl> # endif <nl> - # if defined ( BTN_HOME ) & & BTN_HOME > = 0 <nl> - REPORT_NAME_DIGITAL ( BTN_HOME , __LINE__ ) <nl> + # if _EXISTS ( BTN_HOME ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , BTN_HOME ) <nl> # endif <nl> - # if defined ( BTN_LEFT ) & & BTN_LEFT > = 0 <nl> - REPORT_NAME_DIGITAL ( BTN_LEFT , __LINE__ ) <nl> + # if _EXISTS ( BTN_LEFT ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , BTN_LEFT ) <nl> # endif <nl> - # if defined ( BTN_LFT ) & & BTN_LFT > = 0 <nl> - REPORT_NAME_DIGITAL ( BTN_LFT , __LINE__ ) <nl> + # if _EXISTS ( BTN_LFT ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , BTN_LFT ) <nl> # endif <nl> - # if defined ( BTN_RIGHT ) & & BTN_RIGHT > = 0 <nl> - REPORT_NAME_DIGITAL ( BTN_RIGHT , __LINE__ ) <nl> + # if _EXISTS ( BTN_RIGHT ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , BTN_RIGHT ) <nl> # endif <nl> - # if defined ( BTN_RT ) & & BTN_RT > = 0 <nl> - REPORT_NAME_DIGITAL ( BTN_RT , __LINE__ ) <nl> + # if _EXISTS ( BTN_RT ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , BTN_RT ) <nl> # endif <nl> - # if defined ( BTN_UP ) & & BTN_UP > = 0 <nl> - REPORT_NAME_DIGITAL ( BTN_UP , __LINE__ ) <nl> + # if _EXISTS ( BTN_UP ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , BTN_UP ) <nl> # endif <nl> # if PIN_EXISTS ( CASE_LIGHT ) <nl> - REPORT_NAME_DIGITAL ( CASE_LIGHT_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , CASE_LIGHT_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( CONTROLLER_FAN ) <nl> - REPORT_NAME_DIGITAL ( CONTROLLER_FAN_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , CONTROLLER_FAN_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( CUTOFF_RESET ) <nl> - REPORT_NAME_DIGITAL ( CUTOFF_RESET_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , CUTOFF_RESET_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( CUTOFF_TEST ) <nl> - REPORT_NAME_DIGITAL ( CUTOFF_TEST_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , CUTOFF_TEST_PIN ) <nl> # endif <nl> - # if defined ( D57 ) & & D57 > = 0 <nl> - REPORT_NAME_DIGITAL ( D57 , __LINE__ ) <nl> + # if _EXISTS ( D57 ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , D57 ) <nl> # endif <nl> - # if defined ( D58 ) & & D58 > = 0 <nl> - REPORT_NAME_DIGITAL ( D58 , __LINE__ ) <nl> + # if _EXISTS ( D58 ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , D58 ) <nl> # endif <nl> # if PIN_EXISTS ( DAC_DISABLE ) <nl> - REPORT_NAME_DIGITAL ( DAC_DISABLE_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , DAC_DISABLE_PIN ) <nl> # endif <nl> - # if defined ( DAC0_SYNC ) & & DAC0_SYNC > = 0 <nl> - REPORT_NAME_DIGITAL ( DAC0_SYNC , __LINE__ ) <nl> + # if _EXISTS ( DAC0_SYNC ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , DAC0_SYNC ) <nl> # endif <nl> - # if defined ( DAC1_SYNC ) & & DAC1_SYNC > = 0 <nl> - REPORT_NAME_DIGITAL ( DAC1_SYNC , __LINE__ ) <nl> + # if _EXISTS ( DAC1_SYNC ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , DAC1_SYNC ) <nl> # endif <nl> # if PIN_EXISTS ( DEBUG ) <nl> - REPORT_NAME_DIGITAL ( DEBUG_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , DEBUG_PIN ) <nl> # endif <nl> - # if defined ( DIGIPOTS_I2C_SCL ) & & DIGIPOTS_I2C_SCL > = 0 <nl> - REPORT_NAME_DIGITAL ( DIGIPOTS_I2C_SCL , __LINE__ ) <nl> + # if _EXISTS ( DIGIPOTS_I2C_SCL ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , DIGIPOTS_I2C_SCL ) <nl> # endif <nl> - # if defined ( DIGIPOTS_I2C_SDA_E0 ) & & DIGIPOTS_I2C_SDA_E0 > = 0 <nl> - REPORT_NAME_DIGITAL ( DIGIPOTS_I2C_SDA_E0 , __LINE__ ) <nl> + # if _EXISTS ( DIGIPOTS_I2C_SDA_E0 ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , DIGIPOTS_I2C_SDA_E0 ) <nl> # endif <nl> - # if defined ( DIGIPOTS_I2C_SDA_E1 ) & & DIGIPOTS_I2C_SDA_E1 > = 0 <nl> - REPORT_NAME_DIGITAL ( DIGIPOTS_I2C_SDA_E1 , __LINE__ ) <nl> + # if _EXISTS ( DIGIPOTS_I2C_SDA_E1 ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , DIGIPOTS_I2C_SDA_E1 ) <nl> # endif <nl> - # if defined ( DIGIPOTS_I2C_SDA_X ) & & DIGIPOTS_I2C_SDA_X > = 0 <nl> - REPORT_NAME_DIGITAL ( DIGIPOTS_I2C_SDA_X , __LINE__ ) <nl> + # if _EXISTS ( DIGIPOTS_I2C_SDA_X ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , DIGIPOTS_I2C_SDA_X ) <nl> # endif <nl> - # if defined ( DIGIPOTS_I2C_SDA_Y ) & & DIGIPOTS_I2C_SDA_Y > = 0 <nl> - REPORT_NAME_DIGITAL ( DIGIPOTS_I2C_SDA_Y , __LINE__ ) <nl> + # if _EXISTS ( DIGIPOTS_I2C_SDA_Y ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , DIGIPOTS_I2C_SDA_Y ) <nl> # endif <nl> - # if defined ( DIGIPOTS_I2C_SDA_Z ) & & DIGIPOTS_I2C_SDA_Z > = 0 <nl> - REPORT_NAME_DIGITAL ( DIGIPOTS_I2C_SDA_Z , __LINE__ ) <nl> + # if _EXISTS ( DIGIPOTS_I2C_SDA_Z ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , DIGIPOTS_I2C_SDA_Z ) <nl> # endif <nl> # if PIN_EXISTS ( DIGIPOTSS ) <nl> - REPORT_NAME_DIGITAL ( DIGIPOTSS_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , DIGIPOTSS_PIN ) <nl> # endif <nl> - # if defined ( DOGLCD_A0 ) & & DOGLCD_A0 > = 0 <nl> - REPORT_NAME_DIGITAL ( DOGLCD_A0 , __LINE__ ) <nl> + # if _EXISTS ( DOGLCD_A0 ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , DOGLCD_A0 ) <nl> # endif <nl> - # if defined ( DOGLCD_CS ) & & DOGLCD_CS > = 0 <nl> - REPORT_NAME_DIGITAL ( DOGLCD_CS , __LINE__ ) <nl> + # if _EXISTS ( DOGLCD_CS ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , DOGLCD_CS ) <nl> # endif <nl> - # if defined ( DOGLCD_MOSI ) & & DOGLCD_MOSI > = 0 <nl> - REPORT_NAME_DIGITAL ( DOGLCD_MOSI , __LINE__ ) <nl> + # if _EXISTS ( DOGLCD_MOSI ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , DOGLCD_MOSI ) <nl> # endif <nl> - # if defined ( DOGLCD_SCK ) & & DOGLCD_SCK > = 0 <nl> - REPORT_NAME_DIGITAL ( DOGLCD_SCK , __LINE__ ) <nl> + # if _EXISTS ( DOGLCD_SCK ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , DOGLCD_SCK ) <nl> # endif <nl> # if PIN_EXISTS ( E_MUX0 ) <nl> - REPORT_NAME_DIGITAL ( E_MUX0_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , E_MUX0_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( E_MUX1 ) <nl> - REPORT_NAME_DIGITAL ( E_MUX1_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , E_MUX1_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( E_MUX2 ) <nl> - REPORT_NAME_DIGITAL ( E_MUX2_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , E_MUX2_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( E_STOP ) <nl> - REPORT_NAME_DIGITAL ( E_STOP_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , E_STOP_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( E0_ATT ) <nl> - REPORT_NAME_DIGITAL ( E0_ATT_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , E0_ATT_PIN ) <nl> + # endif <nl> + # if PIN_EXISTS ( E0_AUTO_FAN ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , E0_AUTO_FAN_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( E0_CS ) <nl> - REPORT_NAME_DIGITAL ( E0_CS_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , E0_CS_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( E0_DIR ) <nl> - REPORT_NAME_DIGITAL ( E0_DIR_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , E0_DIR_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( E0_ENABLE ) <nl> - REPORT_NAME_DIGITAL ( E0_ENABLE_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , E0_ENABLE_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( E0_MS1 ) <nl> - REPORT_NAME_DIGITAL ( E0_MS1_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , E0_MS1_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( E0_MS2 ) <nl> - REPORT_NAME_DIGITAL ( E0_MS2_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , E0_MS2_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( E0_STEP ) <nl> - REPORT_NAME_DIGITAL ( E0_STEP_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , E0_STEP_PIN ) <nl> + # endif <nl> + # if PIN_EXISTS ( E1_AUTO_FAN ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , E1_AUTO_FAN_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( E1_CS ) <nl> - REPORT_NAME_DIGITAL ( E1_CS_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , E1_CS_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( E1_DIR ) <nl> - REPORT_NAME_DIGITAL ( E1_DIR_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , E1_DIR_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( E1_ENABLE ) <nl> - REPORT_NAME_DIGITAL ( E1_ENABLE_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , E1_ENABLE_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( E1_MS1 ) <nl> - REPORT_NAME_DIGITAL ( E1_MS1_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , E1_MS1_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( E1_MS2 ) <nl> - REPORT_NAME_DIGITAL ( E1_MS2_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , E1_MS2_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( E1_STEP ) <nl> - REPORT_NAME_DIGITAL ( E1_STEP_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , E1_STEP_PIN ) <nl> + # endif <nl> + # if PIN_EXISTS ( E2_AUTO_FAN ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , E2_AUTO_FAN_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( E2_CS ) <nl> - REPORT_NAME_DIGITAL ( E2_CS_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , E2_CS_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( E2_DIR ) <nl> - REPORT_NAME_DIGITAL ( E2_DIR_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , E2_DIR_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( E2_ENABLE ) <nl> - REPORT_NAME_DIGITAL ( E2_ENABLE_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , E2_ENABLE_PIN ) <nl> + # endif <nl> + # if PIN_EXISTS ( E2_MS1 ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , E2_MS1_PIN ) <nl> + # endif <nl> + # if PIN_EXISTS ( E2_MS2 ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , E2_MS2_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( E2_STEP ) <nl> - REPORT_NAME_DIGITAL ( E2_STEP_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , E2_STEP_PIN ) <nl> + # endif <nl> + # if PIN_EXISTS ( E3_AUTO_FAN ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , E3_AUTO_FAN_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( E3_CS ) <nl> - REPORT_NAME_DIGITAL ( E3_CS_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , E3_CS_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( E3_DIR ) <nl> - REPORT_NAME_DIGITAL ( E3_DIR_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , E3_DIR_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( E3_ENABLE ) <nl> - REPORT_NAME_DIGITAL ( E3_ENABLE_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , E3_ENABLE_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( E3_MS1 ) <nl> - REPORT_NAME_DIGITAL ( E3_MS1_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , E3_MS1_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( E3_MS2 ) <nl> - REPORT_NAME_DIGITAL ( E3_MS2_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , E3_MS2_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( E3_MS3 ) <nl> - REPORT_NAME_DIGITAL ( E3_MS3_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , E3_MS3_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( E3_STEP ) <nl> - REPORT_NAME_DIGITAL ( E3_STEP_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , E3_STEP_PIN ) <nl> + # endif <nl> + # if PIN_EXISTS ( E4_AUTO_FAN ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , E4_AUTO_FAN_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( E4_CS ) <nl> - REPORT_NAME_DIGITAL ( E4_CS_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , E4_CS_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( E4_DIR ) <nl> - REPORT_NAME_DIGITAL ( E4_DIR_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , E4_DIR_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( E4_ENABLE ) <nl> - REPORT_NAME_DIGITAL ( E4_ENABLE_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , E4_ENABLE_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( E4_MS1 ) <nl> - REPORT_NAME_DIGITAL ( E4_MS1_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , E4_MS1_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( E4_MS2 ) <nl> - REPORT_NAME_DIGITAL ( E4_MS2_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , E4_MS2_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( E4_MS3 ) <nl> - REPORT_NAME_DIGITAL ( E4_MS3_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , E4_MS3_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( E4_STEP ) <nl> - REPORT_NAME_DIGITAL ( E4_STEP_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , E4_STEP_PIN ) <nl> # endif <nl> - # if defined ( ENET_CRS ) & & ENET_CRS > = 0 <nl> - REPORT_NAME_DIGITAL ( ENET_CRS , __LINE__ ) <nl> + # if _EXISTS ( ENET_CRS ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , ENET_CRS ) <nl> # endif <nl> - # if defined ( ENET_MDIO ) & & ENET_MDIO > = 0 <nl> - REPORT_NAME_DIGITAL ( ENET_MDIO , __LINE__ ) <nl> + # if _EXISTS ( ENET_MDIO ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , ENET_MDIO ) <nl> # endif <nl> - # if defined ( ENET_MOC ) & & ENET_MOC > = 0 <nl> - REPORT_NAME_DIGITAL ( ENET_MOC , __LINE__ ) <nl> + # if _EXISTS ( ENET_MOC ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , ENET_MOC ) <nl> # endif <nl> - # if defined ( ENET_RX_ER ) & & ENET_RX_ER > = 0 <nl> - REPORT_NAME_DIGITAL ( ENET_RX_ER , __LINE__ ) <nl> + # if _EXISTS ( ENET_RX_ER ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , ENET_RX_ER ) <nl> # endif <nl> - # if defined ( ENET_RXD0 ) & & ENET_RXD0 > = 0 <nl> - REPORT_NAME_DIGITAL ( ENET_RXD0 , __LINE__ ) <nl> + # if _EXISTS ( ENET_RXD0 ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , ENET_RXD0 ) <nl> # endif <nl> - # if defined ( ENET_RXD1 ) & & ENET_RXD1 > = 0 <nl> - REPORT_NAME_DIGITAL ( ENET_RXD1 , __LINE__ ) <nl> + # if _EXISTS ( ENET_RXD1 ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , ENET_RXD1 ) <nl> # endif <nl> - # if defined ( ENET_TX_EN ) & & ENET_TX_EN > = 0 <nl> - REPORT_NAME_DIGITAL ( ENET_TX_EN , __LINE__ ) <nl> + # if _EXISTS ( ENET_TX_EN ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , ENET_TX_EN ) <nl> # endif <nl> - # if defined ( ENET_TXD0 ) & & ENET_TXD0 > = 0 <nl> - REPORT_NAME_DIGITAL ( ENET_TXD0 , __LINE__ ) <nl> + # if _EXISTS ( ENET_TXD0 ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , ENET_TXD0 ) <nl> # endif <nl> - # if defined ( ENET_TXD1 ) & & ENET_TXD1 > = 0 <nl> - REPORT_NAME_DIGITAL ( ENET_TXD1 , __LINE__ ) <nl> + # if _EXISTS ( ENET_TXD1 ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , ENET_TXD1 ) <nl> # endif <nl> # if PIN_EXISTS ( EXP_VOLTAGE_LEVEL ) <nl> - REPORT_NAME_DIGITAL ( EXP_VOLTAGE_LEVEL_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , EXP_VOLTAGE_LEVEL_PIN ) <nl> + # endif <nl> + # if _EXISTS_ANALOG ( EXT_AUX_A0 ) <nl> + REPORT_NAME_ANALOG ( __LINE__ , EXT_AUX_A0 ) <nl> + # endif <nl> + # if _EXISTS ( EXT_AUX_A0 ) & & EXT_AUX_A0 > = NUM_ANALOG_INPUTS <nl> + REPORT_NAME_DIGITAL ( __LINE__ , EXT_AUX_A0 ) <nl> + # endif <nl> + # if _EXISTS ( EXT_AUX_A0_IO ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , EXT_AUX_A0_IO ) <nl> + # endif <nl> + # if _EXISTS_ANALOG ( EXT_AUX_A1 ) <nl> + REPORT_NAME_ANALOG ( __LINE__ , EXT_AUX_A1 ) <nl> # endif <nl> - # if defined ( EXT_AUX_A0_IO ) & & EXT_AUX_A0_IO > = 0 <nl> - REPORT_NAME_DIGITAL ( EXT_AUX_A0_IO , __LINE__ ) <nl> + # if _EXISTS ( EXT_AUX_A1 ) & & EXT_AUX_A1 > = NUM_ANALOG_INPUTS <nl> + REPORT_NAME_DIGITAL ( __LINE__ , EXT_AUX_A1 ) <nl> # endif <nl> - # if defined ( EXT_AUX_A1_IO ) & & EXT_AUX_A1_IO > = 0 <nl> - REPORT_NAME_DIGITAL ( EXT_AUX_A1_IO , __LINE__ ) <nl> + # if _EXISTS ( EXT_AUX_A1_IO ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , EXT_AUX_A1_IO ) <nl> # endif <nl> - # if defined ( EXT_AUX_A2_IO ) & & EXT_AUX_A2_IO > = 0 <nl> - REPORT_NAME_DIGITAL ( EXT_AUX_A2_IO , __LINE__ ) <nl> + # if _EXISTS_ANALOG ( EXT_AUX_A2 ) <nl> + REPORT_NAME_ANALOG ( __LINE__ , EXT_AUX_A2 ) <nl> # endif <nl> - # if defined ( EXT_AUX_A3_IO ) & & EXT_AUX_A3_IO > = 0 <nl> - REPORT_NAME_DIGITAL ( EXT_AUX_A3_IO , __LINE__ ) <nl> + # if _EXISTS ( EXT_AUX_A2 ) & & EXT_AUX_A2 > = NUM_ANALOG_INPUTS <nl> + REPORT_NAME_DIGITAL ( __LINE__ , EXT_AUX_A2 ) <nl> # endif <nl> - # if defined ( EXT_AUX_A4_IO ) & & EXT_AUX_A4_IO > = 0 <nl> - REPORT_NAME_DIGITAL ( EXT_AUX_A4_IO , __LINE__ ) <nl> + # if _EXISTS ( EXT_AUX_A2_IO ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , EXT_AUX_A2_IO ) <nl> # endif <nl> - # if defined ( EXT_AUX_PWM_D24 ) & & EXT_AUX_PWM_D24 > = 0 <nl> - REPORT_NAME_DIGITAL ( EXT_AUX_PWM_D24 , __LINE__ ) <nl> + # if _EXISTS_ANALOG ( EXT_AUX_A3 ) <nl> + REPORT_NAME_ANALOG ( __LINE__ , EXT_AUX_A3 ) <nl> # endif <nl> - # if defined ( EXT_AUX_RX1_D2 ) & & EXT_AUX_RX1_D2 > = 0 <nl> - REPORT_NAME_DIGITAL ( EXT_AUX_RX1_D2 , __LINE__ ) <nl> + # if _EXISTS ( EXT_AUX_A3 ) & & EXT_AUX_A3 > = NUM_ANALOG_INPUTS <nl> + REPORT_NAME_DIGITAL ( __LINE__ , EXT_AUX_A3 ) <nl> # endif <nl> - # if defined ( EXT_AUX_SCL_D0 ) & & EXT_AUX_SCL_D0 > = 0 <nl> - REPORT_NAME_DIGITAL ( EXT_AUX_SCL_D0 , __LINE__ ) <nl> + # if _EXISTS ( EXT_AUX_A3_IO ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , EXT_AUX_A3_IO ) <nl> # endif <nl> - # if defined ( EXT_AUX_SDA_D1 ) & & EXT_AUX_SDA_D1 > = 0 <nl> - REPORT_NAME_DIGITAL ( EXT_AUX_SDA_D1 , __LINE__ ) <nl> + # if _EXISTS_ANALOG ( EXT_AUX_A4 ) <nl> + REPORT_NAME_ANALOG ( __LINE__ , EXT_AUX_A4 ) <nl> # endif <nl> - # if defined ( EXT_AUX_TX1_D3 ) & & EXT_AUX_TX1_D3 > = 0 <nl> - REPORT_NAME_DIGITAL ( EXT_AUX_TX1_D3 , __LINE__ ) <nl> + # if _EXISTS ( EXT_AUX_A4 ) & & EXT_AUX_A4 > = NUM_ANALOG_INPUTS <nl> + REPORT_NAME_DIGITAL ( __LINE__ , EXT_AUX_A4 ) <nl> + # endif <nl> + # if _EXISTS ( EXT_AUX_A4_IO ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , EXT_AUX_A4_IO ) <nl> + # endif <nl> + # if _EXISTS ( EXT_AUX_PWM_D24 ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , EXT_AUX_PWM_D24 ) <nl> + # endif <nl> + # if _EXISTS ( EXT_AUX_RX1_D2 ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , EXT_AUX_RX1_D2 ) <nl> + # endif <nl> + # if _EXISTS ( EXT_AUX_SCL_D0 ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , EXT_AUX_SCL_D0 ) <nl> + # endif <nl> + # if _EXISTS ( EXT_AUX_SDA_D1 ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , EXT_AUX_SDA_D1 ) <nl> + # endif <nl> + # if _EXISTS ( EXT_AUX_TX1_D3 ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , EXT_AUX_TX1_D3 ) <nl> + # endif <nl> + # if _EXISTS ( EXTRUDER_0_AUTO_FAN ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , EXTRUDER_0_AUTO_FAN ) <nl> + # endif <nl> + # if _EXISTS ( EXTRUDER_1_AUTO_FAN ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , EXTRUDER_1_AUTO_FAN ) <nl> # endif <nl> # if PIN_EXISTS ( FAN ) <nl> - REPORT_NAME_DIGITAL ( FAN_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , FAN_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( FAN1 ) <nl> - REPORT_NAME_DIGITAL ( FAN1_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , FAN1_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( FAN2 ) <nl> - REPORT_NAME_DIGITAL ( FAN2_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , FAN2_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( FAN3 ) <nl> - REPORT_NAME_DIGITAL ( FAN3_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , FAN3_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( FIL_RUNOUT ) <nl> - REPORT_NAME_DIGITAL ( FIL_RUNOUT_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , FIL_RUNOUT_PIN ) <nl> # endif <nl> - # if defined ( GEN7_VERSION ) & & GEN7_VERSION > = 0 <nl> - REPORT_NAME_DIGITAL ( GEN7_VERSION , __LINE__ ) <nl> + # if PIN_EXISTS_ANALOG ( FILWIDTH ) <nl> + REPORT_NAME_ANALOG ( __LINE__ , FILWIDTH_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( HEATER_0 ) <nl> - REPORT_NAME_DIGITAL ( HEATER_0_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , HEATER_0_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( HEATER_1 ) <nl> - REPORT_NAME_DIGITAL ( HEATER_1_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , HEATER_1_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( HEATER_2 ) <nl> - REPORT_NAME_DIGITAL ( HEATER_2_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , HEATER_2_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( HEATER_3 ) <nl> - REPORT_NAME_DIGITAL ( HEATER_3_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , HEATER_3_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( HEATER_4 ) <nl> - REPORT_NAME_DIGITAL ( HEATER_4_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , HEATER_4_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( HEATER_5 ) <nl> - REPORT_NAME_DIGITAL ( HEATER_5_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , HEATER_5_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( HEATER_6 ) <nl> - REPORT_NAME_DIGITAL ( HEATER_6_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , HEATER_6_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( HEATER_7 ) <nl> - REPORT_NAME_DIGITAL ( HEATER_7_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , HEATER_7_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( HEATER_BED ) <nl> - REPORT_NAME_DIGITAL ( HEATER_BED_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , HEATER_BED_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( HOME ) <nl> - REPORT_NAME_DIGITAL ( HOME_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , HOME_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( I2C_SCL ) <nl> - REPORT_NAME_DIGITAL ( I2C_SCL_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , I2C_SCL_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( I2C_SDA ) <nl> - REPORT_NAME_DIGITAL ( I2C_SDA_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , I2C_SDA_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( KILL ) <nl> - REPORT_NAME_DIGITAL ( KILL_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , KILL_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( LCD_BACKLIGHT ) <nl> - REPORT_NAME_DIGITAL ( LCD_BACKLIGHT_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , LCD_BACKLIGHT_PIN ) <nl> # endif <nl> - # if defined ( LCD_PINS_D4 ) & & LCD_PINS_D4 > = 0 <nl> - REPORT_NAME_DIGITAL ( LCD_PINS_D4 , __LINE__ ) <nl> + # if _EXISTS ( LCD_PINS_D4 ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , LCD_PINS_D4 ) <nl> # endif <nl> - # if defined ( LCD_PINS_D5 ) & & LCD_PINS_D5 > = 0 <nl> - REPORT_NAME_DIGITAL ( LCD_PINS_D5 , __LINE__ ) <nl> + # if _EXISTS ( LCD_PINS_D5 ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , LCD_PINS_D5 ) <nl> # endif <nl> - # if defined ( LCD_PINS_D6 ) & & LCD_PINS_D6 > = 0 <nl> - REPORT_NAME_DIGITAL ( LCD_PINS_D6 , __LINE__ ) <nl> + # if _EXISTS ( LCD_PINS_D6 ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , LCD_PINS_D6 ) <nl> # endif <nl> - # if defined ( LCD_PINS_D7 ) & & LCD_PINS_D7 > = 0 <nl> - REPORT_NAME_DIGITAL ( LCD_PINS_D7 , __LINE__ ) <nl> + # if _EXISTS ( LCD_PINS_D7 ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , LCD_PINS_D7 ) <nl> # endif <nl> - # if defined ( LCD_PINS_ENABLE ) & & LCD_PINS_ENABLE > = 0 <nl> - REPORT_NAME_DIGITAL ( LCD_PINS_ENABLE , __LINE__ ) <nl> + # if _EXISTS ( LCD_PINS_ENABLE ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , LCD_PINS_ENABLE ) <nl> # endif <nl> - # if defined ( LCD_PINS_RS ) & & LCD_PINS_RS > = 0 <nl> - REPORT_NAME_DIGITAL ( LCD_PINS_RS , __LINE__ ) <nl> + # if _EXISTS ( LCD_PINS_RS ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , LCD_PINS_RS ) <nl> # endif <nl> - # if defined ( LCD_SDSS ) & & LCD_SDSS > = 0 <nl> - REPORT_NAME_DIGITAL ( LCD_SDSS , __LINE__ ) <nl> + # if _EXISTS ( LCD_SDSS ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , LCD_SDSS ) <nl> # endif <nl> # if PIN_EXISTS ( LED_GREEN ) <nl> - REPORT_NAME_DIGITAL ( LED_GREEN_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , LED_GREEN_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( LED ) <nl> - REPORT_NAME_DIGITAL ( LED_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , LED_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( LED_RED ) <nl> - REPORT_NAME_DIGITAL ( LED_RED_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , LED_RED_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( MAX ) <nl> - REPORT_NAME_DIGITAL ( MAX_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , MAX_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( MAX6675_DO ) <nl> - REPORT_NAME_DIGITAL ( MAX6675_DO_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , MAX6675_DO_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( MAX6675_SCK ) <nl> - REPORT_NAME_DIGITAL ( MAX6675_SCK_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , MAX6675_SCK_PIN ) <nl> # endif <nl> - # if defined ( MAX6675_SS ) & & MAX6675_SS > = 0 <nl> - REPORT_NAME_DIGITAL ( MAX6675_SS , __LINE__ ) <nl> + # if PIN_EXISTS_ANALOG ( MAIN_VOLTAGE_MEASURE ) <nl> + REPORT_NAME_ANALOG ( __LINE__ , MAIN_VOLTAGE_MEASURE_PIN ) <nl> # endif <nl> - / / # if defined ( MISO ) & & MISO > = 0 <nl> - / / REPORT_NAME_DIGITAL ( MISO , __LINE__ ) <nl> + # if _EXISTS ( MAX6675_SS ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , MAX6675_SS ) <nl> + # endif <nl> + / / # if _EXISTS ( MISO ) <nl> + / / REPORT_NAME_DIGITAL ( __LINE__ , MISO ) <nl> / / # endif <nl> # if PIN_EXISTS ( MISO ) <nl> - REPORT_NAME_DIGITAL ( MISO_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , MISO_PIN ) <nl> + # endif <nl> + # if PIN_EXISTS ( MOSFET_A ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , MOSFET_A_PIN ) <nl> + # endif <nl> + # if PIN_EXISTS ( MOSFET_B ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , MOSFET_B_PIN ) <nl> + # endif <nl> + # if PIN_EXISTS ( MOSFET_C ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , MOSFET_C_PIN ) <nl> # endif <nl> - / / # if defined ( MOSI ) & & MOSI > = 0 <nl> - / / REPORT_NAME_DIGITAL ( MOSI , __LINE__ ) <nl> + # if PIN_EXISTS ( MOSFET_D ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , MOSFET_D_PIN ) <nl> + # endif <nl> + / / # if _EXISTS ( MOSI ) <nl> + / / REPORT_NAME_DIGITAL ( __LINE__ , MOSI ) <nl> / / # endif <nl> # if PIN_EXISTS ( MOSI ) <nl> - REPORT_NAME_DIGITAL ( MOSI_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , MOSI_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( MOTOR_CURRENT_PWM_E ) <nl> - REPORT_NAME_DIGITAL ( MOTOR_CURRENT_PWM_E_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , MOTOR_CURRENT_PWM_E_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( MOTOR_CURRENT_PWM_XY ) <nl> - REPORT_NAME_DIGITAL ( MOTOR_CURRENT_PWM_XY_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , MOTOR_CURRENT_PWM_XY_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( MOTOR_CURRENT_PWM_Z ) <nl> - REPORT_NAME_DIGITAL ( MOTOR_CURRENT_PWM_Z_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , MOTOR_CURRENT_PWM_Z_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( MOTOR_FAULT ) <nl> - REPORT_NAME_DIGITAL ( MOTOR_FAULT_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , MOTOR_FAULT_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( NUM_DIGITAL ) <nl> - REPORT_NAME_DIGITAL ( NUM_DIGITAL_PINS , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , NUM_DIGITAL_PINS ) <nl> # endif <nl> # if PIN_EXISTS ( ORIG_E0_AUTO_FAN ) <nl> - REPORT_NAME_DIGITAL ( ORIG_E0_AUTO_FAN_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , ORIG_E0_AUTO_FAN_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( ORIG_E1_AUTO_FAN ) <nl> - REPORT_NAME_DIGITAL ( ORIG_E1_AUTO_FAN_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , ORIG_E1_AUTO_FAN_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( ORIG_E2_AUTO_FAN ) <nl> - REPORT_NAME_DIGITAL ( ORIG_E2_AUTO_FAN_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , ORIG_E2_AUTO_FAN_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( ORIG_E3_AUTO_FAN ) <nl> - REPORT_NAME_DIGITAL ( ORIG_E3_AUTO_FAN_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , ORIG_E3_AUTO_FAN_PIN ) <nl> + # endif <nl> + # if PIN_EXISTS ( ORIG_E4_AUTO_FAN ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , ORIG_E4_AUTO_FAN_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( PHOTOGRAPH ) <nl> - REPORT_NAME_DIGITAL ( PHOTOGRAPH_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , PHOTOGRAPH_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( PS_ON ) <nl> - REPORT_NAME_DIGITAL ( PS_ON_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , PS_ON_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( PWM_1 ) <nl> - REPORT_NAME_DIGITAL ( PWM_1_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , PWM_1_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( PWM_2 ) <nl> - REPORT_NAME_DIGITAL ( PWM_2_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , PWM_2_PIN ) <nl> + # endif <nl> + # if _EXISTS ( REF_CLK ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , REF_CLK ) <nl> + # endif <nl> + # if PIN_EXISTS ( RAMPS_D10 ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , RAMPS_D10_PIN ) <nl> # endif <nl> - # if defined ( REF_CLK ) & & REF_CLK > = 0 <nl> - REPORT_NAME_DIGITAL ( REF_CLK , __LINE__ ) <nl> + # if PIN_EXISTS ( RAMPS_D8 ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , RAMPS_D8_PIN ) <nl> + # endif <nl> + # if PIN_EXISTS ( RAMPS_D9 ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , RAMPS_D9_PIN ) <nl> + # endif <nl> + # if PIN_EXISTS ( RGB_LED_R ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , RGB_LED_R_PIN ) <nl> + # endif <nl> + # if PIN_EXISTS ( RGB_LED_G ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , RGB_LED_G_PIN ) <nl> + # endif <nl> + # if PIN_EXISTS ( RGB_LED_B ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , RGB_LED_B_PIN ) <nl> + # endif <nl> + # if PIN_EXISTS ( RGB_LED_W ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , RGB_LED_W_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( RX_ENABLE ) <nl> - REPORT_NAME_DIGITAL ( RX_ENABLE_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , RX_ENABLE_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( SAFETY_TRIGGERED ) <nl> - REPORT_NAME_DIGITAL ( SAFETY_TRIGGERED_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , SAFETY_TRIGGERED_PIN ) <nl> # endif <nl> - / / # if defined ( SCK ) & & SCK > = 0 <nl> - / / REPORT_NAME_DIGITAL ( SCK , __LINE__ ) <nl> + / / # if _EXISTS ( SCK ) <nl> + / / REPORT_NAME_DIGITAL ( __LINE__ , SCK ) <nl> / / # endif <nl> # if PIN_EXISTS ( SCK ) <nl> - REPORT_NAME_DIGITAL ( SCK_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , SCK_PIN ) <nl> + # endif <nl> + # if _EXISTS ( SCL ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , SCL ) <nl> # endif <nl> # if PIN_EXISTS ( SD_DETECT ) <nl> - REPORT_NAME_DIGITAL ( SD_DETECT_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , SD_DETECT_PIN ) <nl> # endif <nl> - # if defined ( SDPOWER ) & & SDPOWER > = 0 <nl> - REPORT_NAME_DIGITAL ( SDPOWER , __LINE__ ) <nl> + # if _EXISTS ( SDA ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , SDA ) <nl> # endif <nl> - # if defined ( SDSS ) & & SDSS > = 0 <nl> - REPORT_NAME_DIGITAL ( SDSS , __LINE__ ) <nl> + # if _EXISTS ( SDPOWER ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , SDPOWER ) <nl> # endif <nl> - # if defined ( SERVO0 ) & & SERVO0 > = 0 <nl> - REPORT_NAME_DIGITAL ( SERVO0 , __LINE__ ) <nl> + # if _EXISTS ( SDSS ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , SDSS ) <nl> + # endif <nl> + # if _EXISTS ( SERVO0 ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , SERVO0 ) <nl> # endif <nl> # if PIN_EXISTS ( SERVO0 ) <nl> - REPORT_NAME_DIGITAL ( SERVO0_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , SERVO0_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( SERVO1 ) <nl> - REPORT_NAME_DIGITAL ( SERVO1_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , SERVO1_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( SERVO2 ) <nl> - REPORT_NAME_DIGITAL ( SERVO2_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , SERVO2_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( SERVO3 ) <nl> - REPORT_NAME_DIGITAL ( SERVO3_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , SERVO3_PIN ) <nl> + # endif <nl> + # if _EXISTS ( SHIFT_CLK ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , SHIFT_CLK ) <nl> # endif <nl> - # if defined ( SHIFT_CLK ) & & SHIFT_CLK > = 0 <nl> - REPORT_NAME_DIGITAL ( SHIFT_CLK , __LINE__ ) <nl> + # if _EXISTS ( SHIFT_EN ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , SHIFT_EN ) <nl> # endif <nl> - # if defined ( SHIFT_EN ) & & SHIFT_EN > = 0 <nl> - REPORT_NAME_DIGITAL ( SHIFT_EN , __LINE__ ) <nl> + # if _EXISTS ( SHIFT_LD ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , SHIFT_LD ) <nl> # endif <nl> - # if defined ( SHIFT_LD ) & & SHIFT_LD > = 0 <nl> - REPORT_NAME_DIGITAL ( SHIFT_LD , __LINE__ ) <nl> + # if _EXISTS ( SHIFT_OUT ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , SHIFT_OUT ) <nl> # endif <nl> - # if defined ( SHIFT_OUT ) & & SHIFT_OUT > = 0 <nl> - REPORT_NAME_DIGITAL ( SHIFT_OUT , __LINE__ ) <nl> + # if PIN_EXISTS ( SLED ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , SLED_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( SLEEP_WAKE ) <nl> - REPORT_NAME_DIGITAL ( SLEEP_WAKE_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , SLEEP_WAKE_PIN ) <nl> + # endif <nl> + # if PIN_EXISTS ( SOL0 ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , SOL0_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( SOL1 ) <nl> - REPORT_NAME_DIGITAL ( SOL1_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , SOL1_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( SOL2 ) <nl> - REPORT_NAME_DIGITAL ( SOL2_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , SOL2_PIN ) <nl> + # endif <nl> + # if PIN_EXISTS ( SOL3 ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , SOL3_PIN ) <nl> # endif <nl> - # if defined ( SPARE_IO ) & & SPARE_IO > = 0 <nl> - REPORT_NAME_DIGITAL ( SPARE_IO , __LINE__ ) <nl> + # if PIN_EXISTS ( SOL4 ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , SOL4_PIN ) <nl> # endif <nl> - # if defined ( SPI_CHAN_DAC ) & & SPI_CHAN_DAC > = 0 <nl> - REPORT_NAME_DIGITAL ( SPI_CHAN_DAC , __LINE__ ) <nl> + # if _EXISTS ( SPARE_IO ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , SPARE_IO ) <nl> # endif <nl> - # if defined ( SPI_CHAN_EEPROM1 ) & & SPI_CHAN_EEPROM1 > = 0 <nl> - REPORT_NAME_DIGITAL ( SPI_CHAN_EEPROM1 , __LINE__ ) <nl> + # if _EXISTS ( SPI_CHAN_DAC ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , SPI_CHAN_DAC ) <nl> # endif <nl> - # if defined ( SPI_EEPROM ) & & SPI_EEPROM > = 0 <nl> - REPORT_NAME_DIGITAL ( SPI_EEPROM , __LINE__ ) <nl> + # if _EXISTS ( SPI_CHAN_EEPROM1 ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , SPI_CHAN_EEPROM1 ) <nl> # endif <nl> - # if defined ( SPI_EEPROM1_CS ) & & SPI_EEPROM1_CS > = 0 <nl> - REPORT_NAME_DIGITAL ( SPI_EEPROM1_CS , __LINE__ ) <nl> + # if _EXISTS ( SPI_EEPROM ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , SPI_EEPROM ) <nl> # endif <nl> - # if defined ( SPI_EEPROM2_CS ) & & SPI_EEPROM2_CS > = 0 <nl> - REPORT_NAME_DIGITAL ( SPI_EEPROM2_CS , __LINE__ ) <nl> + # if _EXISTS ( SPI_EEPROM1_CS ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , SPI_EEPROM1_CS ) <nl> # endif <nl> - # if defined ( SPI_FLASH_CS ) & & SPI_FLASH_CS > = 0 <nl> - REPORT_NAME_DIGITAL ( SPI_FLASH_CS , __LINE__ ) <nl> + # if _EXISTS ( SPI_EEPROM2_CS ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , SPI_EEPROM2_CS ) <nl> + # endif <nl> + # if _EXISTS ( SPI_FLASH_CS ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , SPI_FLASH_CS ) <nl> # endif <nl> # if PIN_EXISTS ( SPINDLE_DIR ) <nl> - REPORT_NAME_DIGITAL ( SPINDLE_DIR_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , SPINDLE_DIR_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( SPINDLE_ENABLE ) <nl> - REPORT_NAME_DIGITAL ( SPINDLE_ENABLE_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , SPINDLE_ENABLE_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( SPINDLE_LASER_ENABLE ) <nl> - REPORT_NAME_DIGITAL ( SPINDLE_LASER_ENABLE_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , SPINDLE_LASER_ENABLE_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( SPINDLE_LASER_PWM ) <nl> - REPORT_NAME_DIGITAL ( SPINDLE_LASER_PWM_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , SPINDLE_LASER_PWM_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( SR_CLK ) <nl> - REPORT_NAME_DIGITAL ( SR_CLK_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , SR_CLK_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( SR_DATA ) <nl> - REPORT_NAME_DIGITAL ( SR_DATA_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , SR_DATA_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( SR_STROBE ) <nl> - REPORT_NAME_DIGITAL ( SR_STROBE_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , SR_STROBE_PIN ) <nl> + # endif <nl> + # if PIN_EXISTS ( SS ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , SS_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( STAT_LED_BLUE ) <nl> - REPORT_NAME_DIGITAL ( STAT_LED_BLUE_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , STAT_LED_BLUE_PIN ) <nl> # endif <nl> - # if defined ( STAT_LED_RED_LED ) & & STAT_LED_RED_LED > = 0 <nl> - REPORT_NAME_DIGITAL ( STAT_LED_RED_LED , __LINE__ ) <nl> + # if _EXISTS ( STAT_LED_RED_LED ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , STAT_LED_RED_LED ) <nl> # endif <nl> # if PIN_EXISTS ( STAT_LED_RED ) <nl> - REPORT_NAME_DIGITAL ( STAT_LED_RED_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , STAT_LED_RED_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( STEPPER_RESET ) <nl> - REPORT_NAME_DIGITAL ( STEPPER_RESET_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , STEPPER_RESET_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( SUICIDE ) <nl> - REPORT_NAME_DIGITAL ( SUICIDE_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , SUICIDE_PIN ) <nl> # endif <nl> - # if defined ( THERMO_CS1 ) & & THERMO_CS1 > = 0 <nl> - REPORT_NAME_DIGITAL ( THERMO_CS1 , __LINE__ ) <nl> + # if _EXISTS ( THERMO_CS1 ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , THERMO_CS1 ) <nl> # endif <nl> - # if defined ( THERMO_CS2 ) & & THERMO_CS2 > = 0 <nl> - REPORT_NAME_DIGITAL ( THERMO_CS2 , __LINE__ ) <nl> + # if _EXISTS ( THERMO_CS2 ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , THERMO_CS2 ) <nl> # endif <nl> - # if PIN_EXISTS ( THERMO_DO ) <nl> - REPORT_NAME_DIGITAL ( THERMO_DO_PIN , __LINE__ ) <nl> + # if _EXISTS_ANALOG ( TC1 ) <nl> + REPORT_NAME_ANALOG ( __LINE__ , TC1 ) <nl> # endif <nl> - # if PIN_EXISTS ( THERMO_SCK ) <nl> - REPORT_NAME_DIGITAL ( THERMO_SCK_PIN , __LINE__ ) <nl> + # if _EXISTS_ANALOG ( TC2 ) <nl> + REPORT_NAME_ANALOG ( __LINE__ , TC2 ) <nl> # endif <nl> - # if defined ( TLC_BLANK_BIT ) & & TLC_BLANK_BIT > = 0 <nl> - REPORT_NAME_DIGITAL ( TLC_BLANK_BIT , __LINE__ ) <nl> + # if PIN_EXISTS_ANALOG ( TEMP_0 ) <nl> + REPORT_NAME_ANALOG ( __LINE__ , TEMP_0_PIN ) <nl> # endif <nl> - # if PIN_EXISTS ( TLC_BLANK ) <nl> - REPORT_NAME_DIGITAL ( TLC_BLANK_PIN , __LINE__ ) <nl> + # if PIN_EXISTS_ANALOG ( TEMP_1 ) <nl> + REPORT_NAME_ANALOG ( __LINE__ , TEMP_1_PIN ) <nl> # endif <nl> - # if defined ( TLC_BLANK_PORT ) & & TLC_BLANK_PORT > = 0 <nl> - REPORT_NAME_DIGITAL ( TLC_BLANK_PORT , __LINE__ ) <nl> + # if PIN_EXISTS_ANALOG ( TEMP_2 ) <nl> + REPORT_NAME_ANALOG ( __LINE__ , TEMP_2_PIN ) <nl> # endif <nl> - # if defined ( TLC_CLOCK_BIT ) & & TLC_CLOCK_BIT > = 0 <nl> - REPORT_NAME_DIGITAL ( TLC_CLOCK_BIT , __LINE__ ) <nl> + # if PIN_EXISTS_ANALOG ( TEMP_3 ) <nl> + REPORT_NAME_ANALOG ( __LINE__ , TEMP_3_PIN ) <nl> # endif <nl> - # if PIN_EXISTS ( TLC_CLOCK ) <nl> - REPORT_NAME_DIGITAL ( TLC_CLOCK_PIN , __LINE__ ) <nl> + # if PIN_EXISTS_ANALOG ( TEMP_4 ) <nl> + REPORT_NAME_ANALOG ( __LINE__ , TEMP_4_PIN ) <nl> # endif <nl> - # if defined ( TLC_CLOCK_PORT ) & & TLC_CLOCK_PORT > = 0 <nl> - REPORT_NAME_DIGITAL ( TLC_CLOCK_PORT , __LINE__ ) <nl> + # if PIN_EXISTS_ANALOG ( TEMP_BED ) <nl> + REPORT_NAME_ANALOG ( __LINE__ , TEMP_BED_PIN ) <nl> # endif <nl> - # if defined ( TLC_DATA_BIT ) & & TLC_DATA_BIT > = 0 <nl> - REPORT_NAME_DIGITAL ( TLC_DATA_BIT , __LINE__ ) <nl> + # if PIN_EXISTS_ANALOG ( TEMP_CHAMBER ) <nl> + REPORT_NAME_ANALOG ( __LINE__ , TEMP_CHAMBER_PIN ) <nl> # endif <nl> - # if PIN_EXISTS ( TLC_DATA ) <nl> - REPORT_NAME_DIGITAL ( TLC_DATA_PIN , __LINE__ ) <nl> + # if PIN_EXISTS_ANALOG ( TEMP_X ) <nl> + REPORT_NAME_ANALOG ( __LINE__ , TEMP_X_PIN ) <nl> # endif <nl> - # if defined ( TLC_DATA_PORT ) & & TLC_DATA_PORT > = 0 <nl> - REPORT_NAME_DIGITAL ( TLC_DATA_PORT , __LINE__ ) <nl> + # if PIN_EXISTS ( THERMO_DO ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , THERMO_DO_PIN ) <nl> # endif <nl> - # if defined ( TLC_XLAT_BIT ) & & TLC_XLAT_BIT > = 0 <nl> - REPORT_NAME_DIGITAL ( TLC_XLAT_BIT , __LINE__ ) <nl> + # if PIN_EXISTS ( THERMO_SCK ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , THERMO_SCK_PIN ) <nl> # endif <nl> - # if PIN_EXISTS ( TLC_XLAT ) <nl> - REPORT_NAME_DIGITAL ( TLC_XLAT_PIN , __LINE__ ) <nl> + # if PIN_EXISTS ( TLC_BLANK ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , TLC_BLANK_PIN ) <nl> + # endif <nl> + # if PIN_EXISTS ( TLC_CLOCK ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , TLC_CLOCK_PIN ) <nl> + # endif <nl> + # if PIN_EXISTS ( TLC_DATA ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , TLC_DATA_PIN ) <nl> # endif <nl> - # if defined ( TLC_XLAT_PORT ) & & TLC_XLAT_PORT > = 0 <nl> - REPORT_NAME_DIGITAL ( TLC_XLAT_PORT , __LINE__ ) <nl> + # if PIN_EXISTS ( TLC_XLAT ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , TLC_XLAT_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( TOOL_0 ) <nl> - REPORT_NAME_DIGITAL ( TOOL_0_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , TOOL_0_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( TOOL_0_PWM ) <nl> - REPORT_NAME_DIGITAL ( TOOL_0_PWM_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , TOOL_0_PWM_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( TOOL_1 ) <nl> - REPORT_NAME_DIGITAL ( TOOL_1_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , TOOL_1_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( TOOL_1_PWM ) <nl> - REPORT_NAME_DIGITAL ( TOOL_1_PWM_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , TOOL_1_PWM_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( TOOL_2 ) <nl> - REPORT_NAME_DIGITAL ( TOOL_2_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , TOOL_2_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( TOOL_2_PWM ) <nl> - REPORT_NAME_DIGITAL ( TOOL_2_PWM_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , TOOL_2_PWM_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( TOOL_3 ) <nl> - REPORT_NAME_DIGITAL ( TOOL_3_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , TOOL_3_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( TOOL_3_PWM ) <nl> - REPORT_NAME_DIGITAL ( TOOL_3_PWM_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , TOOL_3_PWM_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( TOOL_PWM ) <nl> - REPORT_NAME_DIGITAL ( TOOL_PWM_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , TOOL_PWM_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( TX_ENABLE ) <nl> - REPORT_NAME_DIGITAL ( TX_ENABLE_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , TX_ENABLE_PIN ) <nl> # endif <nl> - # if defined ( UI1 ) & & UI1 > = 0 <nl> - REPORT_NAME_DIGITAL ( UI1 , __LINE__ ) <nl> + # if _EXISTS ( UI1 ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , UI1 ) <nl> # endif <nl> - # if defined ( UI2 ) & & UI2 > = 0 <nl> - REPORT_NAME_DIGITAL ( UI2 , __LINE__ ) <nl> + # if _EXISTS ( UI2 ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , UI2 ) <nl> # endif <nl> - # if defined ( UNUSED_PWM ) & & UNUSED_PWM > = 0 <nl> - REPORT_NAME_DIGITAL ( UNUSED_PWM , __LINE__ ) <nl> + # if _EXISTS ( UNUSED_PWM ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , UNUSED_PWM ) <nl> # endif <nl> # if PIN_EXISTS ( X_ATT ) <nl> - REPORT_NAME_DIGITAL ( X_ATT_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , X_ATT_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( X_CS ) <nl> - REPORT_NAME_DIGITAL ( X_CS_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , X_CS_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( X_DIR ) <nl> - REPORT_NAME_DIGITAL ( X_DIR_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , X_DIR_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( X_ENABLE ) <nl> - REPORT_NAME_DIGITAL ( X_ENABLE_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , X_ENABLE_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( X_MAX ) <nl> - REPORT_NAME_DIGITAL ( X_MAX_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , X_MAX_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( X_MIN ) <nl> - REPORT_NAME_DIGITAL ( X_MIN_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , X_MIN_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( X_MS1 ) <nl> - REPORT_NAME_DIGITAL ( X_MS1_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , X_MS1_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( X_MS2 ) <nl> - REPORT_NAME_DIGITAL ( X_MS2_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , X_MS2_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( X_STEP ) <nl> - REPORT_NAME_DIGITAL ( X_STEP_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , X_STEP_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( X_STOP ) <nl> - REPORT_NAME_DIGITAL ( X_STOP_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , X_STOP_PIN ) <nl> + # endif <nl> + # if PIN_EXISTS ( X2_DIR ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , X2_DIR_PIN ) <nl> + # endif <nl> + # if PIN_EXISTS ( X2_ENABLE ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , X2_ENABLE_PIN ) <nl> + # endif <nl> + # if PIN_EXISTS ( X2_STEP ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , X2_STEP_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( Y_ATT ) <nl> - REPORT_NAME_DIGITAL ( Y_ATT_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , Y_ATT_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( Y_CS ) <nl> - REPORT_NAME_DIGITAL ( Y_CS_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , Y_CS_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( Y_DIR ) <nl> - REPORT_NAME_DIGITAL ( Y_DIR_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , Y_DIR_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( Y_ENABLE ) <nl> - REPORT_NAME_DIGITAL ( Y_ENABLE_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , Y_ENABLE_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( Y_MAX ) <nl> - REPORT_NAME_DIGITAL ( Y_MAX_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , Y_MAX_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( Y_MIN ) <nl> - REPORT_NAME_DIGITAL ( Y_MIN_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , Y_MIN_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( Y_MS1 ) <nl> - REPORT_NAME_DIGITAL ( Y_MS1_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , Y_MS1_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( Y_MS2 ) <nl> - REPORT_NAME_DIGITAL ( Y_MS2_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , Y_MS2_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( Y_STEP ) <nl> - REPORT_NAME_DIGITAL ( Y_STEP_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , Y_STEP_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( Y_STOP ) <nl> - REPORT_NAME_DIGITAL ( Y_STOP_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , Y_STOP_PIN ) <nl> + # endif <nl> + # if PIN_EXISTS ( Y2_DIR ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , Y2_DIR_PIN ) <nl> + # endif <nl> + # if PIN_EXISTS ( Y2_ENABLE ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , Y2_ENABLE_PIN ) <nl> + # endif <nl> + # if PIN_EXISTS ( Y2_STEP ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , Y2_STEP_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( Z_ATT ) <nl> - REPORT_NAME_DIGITAL ( Z_ATT_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , Z_ATT_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( Z_CS ) <nl> - REPORT_NAME_DIGITAL ( Z_CS_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , Z_CS_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( Z_DIR ) <nl> - REPORT_NAME_DIGITAL ( Z_DIR_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , Z_DIR_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( Z_ENABLE ) <nl> - REPORT_NAME_DIGITAL ( Z_ENABLE_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , Z_ENABLE_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( Z_MAX ) <nl> - REPORT_NAME_DIGITAL ( Z_MAX_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , Z_MAX_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( Z_MIN ) <nl> - REPORT_NAME_DIGITAL ( Z_MIN_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , Z_MIN_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( Z_MIN_PROBE ) <nl> - REPORT_NAME_DIGITAL ( Z_MIN_PROBE_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , Z_MIN_PROBE_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( Z_MS1 ) <nl> - REPORT_NAME_DIGITAL ( Z_MS1_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , Z_MS1_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( Z_MS2 ) <nl> - REPORT_NAME_DIGITAL ( Z_MS2_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , Z_MS2_PIN ) <nl> # endif <nl> - # if defined ( Z_probe_pin ) & & Z_probe_pin > = 0 <nl> - REPORT_NAME_DIGITAL ( Z_probe_pin , __LINE__ ) <nl> + # if PIN_EXISTS ( Z_PROBE ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , Z_PROBE_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( Z_STEP ) <nl> - REPORT_NAME_DIGITAL ( Z_STEP_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , Z_STEP_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( Z_STOP ) <nl> - REPORT_NAME_DIGITAL ( Z_STOP_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , Z_STOP_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( Z2_CS ) <nl> - REPORT_NAME_DIGITAL ( Z2_CS_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , Z2_CS_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( Z2_DIR ) <nl> - REPORT_NAME_DIGITAL ( Z2_DIR_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , Z2_DIR_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( Z2_ENABLE ) <nl> - REPORT_NAME_DIGITAL ( Z2_ENABLE_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , Z2_ENABLE_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( Z2_MS1 ) <nl> - REPORT_NAME_DIGITAL ( Z2_MS1_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , Z2_MS1_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( Z2_MS2 ) <nl> - REPORT_NAME_DIGITAL ( Z2_MS2_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , Z2_MS2_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( Z2_MS3 ) <nl> - REPORT_NAME_DIGITAL ( Z2_MS3_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , Z2_MS3_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( Z2_STEP ) <nl> - REPORT_NAME_DIGITAL ( Z2_STEP_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , Z2_STEP_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( ZRIB_V20_D6 ) <nl> - REPORT_NAME_DIGITAL ( ZRIB_V20_D6_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , ZRIB_V20_D6_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( ZRIB_V20_D9 ) <nl> - REPORT_NAME_DIGITAL ( ZRIB_V20_D9_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , ZRIB_V20_D9_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( X_SERIAL_TX ) <nl> - REPORT_NAME_DIGITAL ( X_SERIAL_TX_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , X_SERIAL_TX_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( X_SERIAL_RX ) <nl> - REPORT_NAME_DIGITAL ( X_SERIAL_RX_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , X_SERIAL_RX_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( X2_SERIAL_TX ) <nl> - REPORT_NAME_DIGITAL ( X2_SERIAL_TX_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , X2_SERIAL_TX_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( X2_SERIAL_RX ) <nl> - REPORT_NAME_DIGITAL ( X2_SERIAL_RX_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , X2_SERIAL_RX_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( Y_SERIAL_TX ) <nl> - REPORT_NAME_DIGITAL ( Y_SERIAL_TX_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , Y_SERIAL_TX_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( Y_SERIAL_RX ) <nl> - REPORT_NAME_DIGITAL ( Y_SERIAL_RX_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , Y_SERIAL_RX_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( Y2_SERIAL_TX ) <nl> - REPORT_NAME_DIGITAL ( Y2_SERIAL_TX_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , Y2_SERIAL_TX_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( Y2_SERIAL_RX ) <nl> - REPORT_NAME_DIGITAL ( Y2_SERIAL_RX_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , Y2_SERIAL_RX_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( Z_SERIAL_TX ) <nl> - REPORT_NAME_DIGITAL ( Z_SERIAL_TX_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , Z_SERIAL_TX_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( Z_SERIAL_RX ) <nl> - REPORT_NAME_DIGITAL ( Z_SERIAL_RX_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , Z_SERIAL_RX_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( Z2_SERIAL_TX ) <nl> - REPORT_NAME_DIGITAL ( Z2_SERIAL_TX_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , Z2_SERIAL_TX_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( Z2_SERIAL_RX ) <nl> - REPORT_NAME_DIGITAL ( Z2_SERIAL_RX_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , Z2_SERIAL_RX_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( E0_SERIAL_TX ) <nl> - REPORT_NAME_DIGITAL ( E0_SERIAL_TX_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , E0_SERIAL_TX_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( E0_SERIAL_RX ) <nl> - REPORT_NAME_DIGITAL ( E0_SERIAL_RX_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , E0_SERIAL_RX_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( E1_SERIAL_TX ) <nl> - REPORT_NAME_DIGITAL ( E1_SERIAL_TX_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , E1_SERIAL_TX_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( E1_SERIAL_RX ) <nl> - REPORT_NAME_DIGITAL ( E1_SERIAL_RX_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , E1_SERIAL_RX_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( E2_SERIAL_TX ) <nl> - REPORT_NAME_DIGITAL ( E2_SERIAL_TX_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , E2_SERIAL_TX_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( E2_SERIAL_RX ) <nl> - REPORT_NAME_DIGITAL ( E2_SERIAL_RX_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , E2_SERIAL_RX_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( E3_SERIAL_TX ) <nl> - REPORT_NAME_DIGITAL ( E3_SERIAL_TX_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , E3_SERIAL_TX_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( E3_SERIAL_RX ) <nl> - REPORT_NAME_DIGITAL ( E3_SERIAL_RX_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , E3_SERIAL_RX_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( E4_SERIAL_TX ) <nl> - REPORT_NAME_DIGITAL ( E4_SERIAL_TX_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , E4_SERIAL_TX_PIN ) <nl> # endif <nl> # if PIN_EXISTS ( E4_SERIAL_RX ) <nl> - REPORT_NAME_DIGITAL ( E4_SERIAL_RX_PIN , __LINE__ ) <nl> + REPORT_NAME_DIGITAL ( __LINE__ , E4_SERIAL_RX_PIN ) <nl> # endif <nl>
Sync pinsDebug_list . h between 1 . 1 . x and 2 . 0 . x ( )
MarlinFirmware/Marlin
a6563df705ea69c3da68d907b62ca4fbb3e0045f
2018-02-18T08:25:32Z
mmm a / tensorflow / cc / gradients / nn_grad . cc <nl> ppp b / tensorflow / cc / gradients / nn_grad . cc <nl> Status LRNGradHelper ( const Scope & scope , const Operation & op , <nl> } <nl> REGISTER_GRADIENT_OP ( " LRN " , LRNGradHelper ) ; <nl> <nl> + Status SoftplusGradHelper ( const Scope & scope , const Operation & op , <nl> + const std : : vector < Output > & grad_inputs , <nl> + std : : vector < Output > * grad_outputs ) { <nl> + auto dx = internal : : SoftplusGrad ( scope , grad_inputs [ 0 ] , op . input ( 0 ) ) ; <nl> + grad_outputs - > push_back ( dx ) ; <nl> + return scope . status ( ) ; <nl> + } <nl> + REGISTER_GRADIENT_OP ( " Softplus " , SoftplusGradHelper ) ; <nl> + <nl> + Status SoftsignGradHelper ( const Scope & scope , const Operation & op , <nl> + const std : : vector < Output > & grad_inputs , <nl> + std : : vector < Output > * grad_outputs ) { <nl> + auto dx = internal : : SoftsignGrad ( scope , grad_inputs [ 0 ] , op . input ( 0 ) ) ; <nl> + grad_outputs - > push_back ( dx ) ; <nl> + return scope . status ( ) ; <nl> + } <nl> + REGISTER_GRADIENT_OP ( " Softsign " , SoftsignGradHelper ) ; <nl> + <nl> + Status FractionalAvgPoolGradHelper ( const Scope & scope , const Operation & op , <nl> + const std : : vector < Output > & grad_inputs , <nl> + std : : vector < Output > * grad_outputs ) { <nl> + bool overlapping ; <nl> + TF_RETURN_IF_ERROR ( <nl> + GetNodeAttr ( op . output ( 0 ) . node ( ) - > attrs ( ) , " overlapping " , & overlapping ) ) ; <nl> + auto dx = internal : : FractionalAvgPoolGrad ( <nl> + scope , Shape ( scope , op . input ( 0 ) , Shape : : OutType ( DT_INT64 ) ) , <nl> + grad_inputs [ 0 ] , op . output ( 1 ) , op . output ( 2 ) , <nl> + internal : : FractionalAvgPoolGrad : : Overlapping ( overlapping ) ) ; <nl> + grad_outputs - > push_back ( dx ) ; <nl> + return scope . status ( ) ; <nl> + } <nl> + REGISTER_GRADIENT_OP ( " FractionalAvgPool " , FractionalAvgPoolGradHelper ) ; <nl> + <nl> + Status FractionalMaxPoolGradHelper ( const Scope & scope , const Operation & op , <nl> + const std : : vector < Output > & grad_inputs , <nl> + std : : vector < Output > * grad_outputs ) { <nl> + bool overlapping ; <nl> + TF_RETURN_IF_ERROR ( <nl> + GetNodeAttr ( op . output ( 0 ) . node ( ) - > attrs ( ) , " overlapping " , & overlapping ) ) ; <nl> + auto dx = internal : : FractionalMaxPoolGrad ( <nl> + scope , op . input ( 0 ) , op . output ( 0 ) , grad_inputs [ 0 ] , op . output ( 1 ) , <nl> + op . output ( 2 ) , internal : : FractionalMaxPoolGrad : : Overlapping ( overlapping ) ) ; <nl> + grad_outputs - > push_back ( dx ) ; <nl> + return scope . status ( ) ; <nl> + } <nl> + REGISTER_GRADIENT_OP ( " FractionalMaxPool " , FractionalMaxPoolGradHelper ) ; <nl> + <nl> } / / anonymous namespace <nl> } / / namespace ops <nl> } / / namespace tensorflow <nl> mmm a / tensorflow / cc / gradients / nn_grad_test . cc <nl> ppp b / tensorflow / cc / gradients / nn_grad_test . cc <nl> namespace { <nl> using ops : : BiasAdd ; <nl> using ops : : Conv2D ; <nl> using ops : : Elu ; <nl> + using ops : : FractionalAvgPool ; <nl> + using ops : : FractionalMaxPool ; <nl> using ops : : L2Loss ; <nl> using ops : : LogSoftmax ; <nl> using ops : : LRN ; <nl> using ops : : Relu ; <nl> using ops : : Relu6 ; <nl> using ops : : Selu ; <nl> using ops : : Softmax ; <nl> + using ops : : Softplus ; <nl> + using ops : : Softsign ; <nl> <nl> class NNGradTest : public : : testing : : Test { <nl> protected : <nl> class NNGradTest : public : : testing : : Test { <nl> EXPECT_LT ( max_error , 1e - 3 ) ; <nl> } <nl> <nl> - / / Sets tensor with random values , ensuring that the max value is largest by <nl> - / / a reasonable amount . <nl> - / / This is an issue for MaxPool , MaxPoolV2 and MaxPool3D , in which <nl> - / / perturbations by the numeric gradient computation in the gradient checker <nl> - / / can change the max value if values are too close together . <nl> + / / Sets tensor with random values , ensuring that every pair of elements are at <nl> + / / least a reasonable amount apart . <nl> + / / This is an issue for max pooling operations , in which perturbations by the <nl> + / / numeric gradient computation in the gradient checker can change the max <nl> + / / value if a pool has values that are too close together . <nl> template < typename T > <nl> - void SetRandomValuesWithBumpedMax ( Tensor * tensor ) { <nl> + void SetRandomValuesForMaxPooling ( Tensor * tensor ) { <nl> auto tensor_flat = tensor - > flat < T > ( ) ; <nl> - tensor_flat . setRandom ( ) ; <nl> - int32 max_index = 0 ; <nl> - for ( size_t i = 1 ; i < tensor - > NumElements ( ) ; i + + ) { <nl> - if ( tensor_flat ( i ) > tensor_flat ( max_index ) ) { <nl> - max_index = i ; <nl> - } <nl> + / / First set the array to an increasing sequence of values spaced <nl> + / / a reasonable amount apart <nl> + T cur = 0 ; <nl> + for ( size_t i = 0 ; i < tensor - > NumElements ( ) ; i + + ) { <nl> + tensor_flat ( i ) = cur ; <nl> + cur + = 5e - 2 ; <nl> + } <nl> + / / Fischer - Yates shuffle the array <nl> + for ( size_t i = tensor - > NumElements ( ) - 1 ; i > = 1 ; i - - ) { <nl> + / / j < - random integer 0 < = j < = i <nl> + size_t j = random : : New64 ( ) % ( i + 1 ) ; <nl> + / / swap values at i , j <nl> + T tmp = tensor_flat ( i ) ; <nl> + tensor_flat ( i ) = tensor_flat ( j ) ; <nl> + tensor_flat ( j ) = tmp ; <nl> } <nl> - tensor_flat ( max_index ) + = 1e - 2 ; <nl> } <nl> <nl> Scope scope_ ; <nl> TEST_F ( NNGradTest , MaxPoolGradHelper ) { <nl> const std : : vector < int > strides { 1 , 2 , 2 , 1 } ; <nl> auto y = MaxPool ( scope_ , x , ksize , strides , " VALID " ) ; <nl> Tensor x_init_value = Tensor ( DT_FLOAT , x_shape ) ; <nl> - SetRandomValuesWithBumpedMax < float > ( & x_init_value ) ; <nl> + SetRandomValuesForMaxPooling < float > ( & x_init_value ) ; <nl> RunTest ( x , x_init_value , y , y_shape ) ; <nl> } <nl> <nl> TEST_F ( NNGradTest , MaxPoolGradV2Helper ) { <nl> Tensor strides = test : : AsTensor < int > ( { 1 , 2 , 2 , 1 } , { 4 } ) ; <nl> auto y = MaxPoolV2 ( scope_ , x , ksize , strides , " VALID " ) ; <nl> Tensor x_init_value = Tensor ( DT_FLOAT , x_shape ) ; <nl> - SetRandomValuesWithBumpedMax < float > ( & x_init_value ) ; <nl> + SetRandomValuesForMaxPooling < float > ( & x_init_value ) ; <nl> RunTest ( x , x_init_value , y , y_shape ) ; <nl> } <nl> <nl> TEST_F ( NNGradTest , MaxPool3DGradHelper ) { <nl> const std : : vector < int > strides { 1 , 3 , 3 , 3 , 1 } ; <nl> auto y = MaxPool3D ( scope_ , x , ksize , strides , " VALID " ) ; <nl> Tensor x_init_value = Tensor ( DT_FLOAT , x_shape ) ; <nl> - SetRandomValuesWithBumpedMax < float > ( & x_init_value ) ; <nl> + SetRandomValuesForMaxPooling < float > ( & x_init_value ) ; <nl> RunTest ( x , x_init_value , y , y_shape ) ; <nl> } <nl> <nl> TEST_F ( NNGradTest , LRN ) { <nl> RunTest ( x , x_shape , y , x_shape ) ; <nl> } <nl> <nl> + TEST_F ( NNGradTest , SoftplusGrad ) { <nl> + TensorShape shape ( { 3 , 7 } ) ; <nl> + auto x = Placeholder ( scope_ , DT_FLOAT , Placeholder : : Shape ( shape ) ) ; <nl> + auto y = Softplus ( scope_ , x ) ; <nl> + RunTest ( x , shape , y , shape ) ; <nl> + } <nl> + <nl> + TEST_F ( NNGradTest , SoftsignGrad ) { <nl> + TensorShape shape ( { 3 , 7 } ) ; <nl> + auto x = Placeholder ( scope_ , DT_FLOAT , Placeholder : : Shape ( shape ) ) ; <nl> + auto y = Softsign ( scope_ , x ) ; <nl> + RunTest ( x , shape , y , shape ) ; <nl> + } <nl> + <nl> + TEST_F ( NNGradTest , FractionalAvgPoolGradHelper ) { <nl> + TensorShape x_shape ( { 1 , 3 , 7 , 1 } ) ; <nl> + auto x = Placeholder ( scope_ , DT_FLOAT , Placeholder : : Shape ( x_shape ) ) ; <nl> + / / Force consistent pooling regions for unit testing . <nl> + auto y = FractionalAvgPool ( <nl> + scope_ , x , { 1 , 1 . 2 , 1 . 9 , 1 } , <nl> + FractionalAvgPool : : Deterministic ( true ) . Overlapping ( true ) . Seed ( 1 ) . Seed2 ( <nl> + 2 ) ) ; <nl> + TensorShape y_shape ( { 1 , 2 , 3 , 1 } ) ; <nl> + RunTest ( x , x_shape , y . output , y_shape ) ; <nl> + } <nl> + <nl> + TEST_F ( NNGradTest , FractionalMaxPoolGradHelper ) { <nl> + TensorShape x_shape ( { 1 , 3 , 7 , 1 } ) ; <nl> + auto x = Placeholder ( scope_ , DT_FLOAT , Placeholder : : Shape ( x_shape ) ) ; <nl> + / / Force consistent pooling regions for unit testing . <nl> + auto y = FractionalMaxPool ( <nl> + scope_ , x , { 1 , 1 . 2 , 1 . 9 , 1 } , <nl> + FractionalMaxPool : : Deterministic ( true ) . Overlapping ( true ) . Seed ( 1 ) . Seed2 ( <nl> + 2 ) ) ; <nl> + Tensor x_init_value = Tensor ( DT_FLOAT , x_shape ) ; <nl> + SetRandomValuesForMaxPooling < float > ( & x_init_value ) ; <nl> + TensorShape y_shape ( { 1 , 2 , 3 , 1 } ) ; <nl> + RunTest ( x , x_init_value , y . output , y_shape ) ; <nl> + } <nl> + <nl> } / / namespace <nl> } / / namespace tensorflow <nl>
Merge pull request from kbsriram / easy - nn - grads
tensorflow/tensorflow
7d5f8f40deb228473951f30ba3b3ff864da5c444
2018-05-25T17:38:23Z
mmm a / modules / planning / tasks / optimizers / open_space_trajectory_generation / open_space_trajectory_provider . cc <nl> ppp b / modules / planning / tasks / optimizers / open_space_trajectory_generation / open_space_trajectory_provider . cc <nl> Status OpenSpaceTrajectoryProvider : : Process ( ) { <nl> auto stitching_trajectory = TrajectoryStitcher : : ComputeStitchingTrajectory ( <nl> vehicle_state , start_timestamp , planning_cycle_time , <nl> & last_frame_complete_trajectory , & replan_reason ) ; <nl> - <nl> / / Get open_space_info from current frame <nl> const auto & open_space_info = frame_ - > open_space_info ( ) ; <nl> <nl> Status OpenSpaceTrajectoryProvider : : Process ( ) { <nl> thread_data_ . obstacles_vertices_vec = <nl> open_space_info . obstacles_vertices_vec ( ) ; <nl> thread_data_ . XYbounds = open_space_info . ROI_xy_boundary ( ) ; <nl> + data_ready_ . store ( true ) ; <nl> } <nl> <nl> / / Check vehicle state <nl> Status OpenSpaceTrajectoryProvider : : Process ( ) { <nl> frame_ - > mutable_open_space_info ( ) - > sync_debug_instance ( ) ; <nl> } <nl> trajectory_updated_ . store ( false ) ; <nl> + data_ready_ . store ( false ) ; <nl> return Status : : OK ( ) ; <nl> } <nl> <nl> Status OpenSpaceTrajectoryProvider : : Process ( ) { <nl> <nl> void OpenSpaceTrajectoryProvider : : GenerateTrajectoryThread ( ) { <nl> while ( ! is_stop_ ) { <nl> - OpenSpaceTrajectoryThreadData thread_data ; <nl> - { <nl> - std : : lock_guard < std : : mutex > lock ( open_space_mutex_ ) ; <nl> - thread_data = thread_data_ ; <nl> - } <nl> - if ( ! trajectory_updated_ ) { <nl> + if ( ! trajectory_updated_ & & data_ready_ ) { <nl> + OpenSpaceTrajectoryThreadData thread_data ; <nl> + { <nl> + std : : lock_guard < std : : mutex > lock ( open_space_mutex_ ) ; <nl> + thread_data = thread_data_ ; <nl> + } <nl> Status status = open_space_trajectory_optimizer_ - > Plan ( <nl> thread_data . stitching_trajectory , thread_data . end_pose , <nl> thread_data . XYbounds , thread_data . rotate_angle , <nl> thread_data . translate_origin , thread_data . obstacles_edges_num , <nl> thread_data . obstacles_A , thread_data . obstacles_b , <nl> - thread_data_ . obstacles_vertices_vec ) ; <nl> + thread_data . obstacles_vertices_vec ) ; <nl> if ( status = = Status : : OK ( ) ) { <nl> trajectory_updated_ . store ( true ) ; <nl> } else { <nl> mmm a / modules / planning / tasks / optimizers / open_space_trajectory_generation / open_space_trajectory_provider . h <nl> ppp b / modules / planning / tasks / optimizers / open_space_trajectory_generation / open_space_trajectory_provider . h <nl> class OpenSpaceTrajectoryProvider : public TrajectoryOptimizer { <nl> std : : future < void > task_future_ ; <nl> std : : atomic < bool > is_stop_ { false } ; <nl> std : : atomic < bool > trajectory_updated_ { false } ; <nl> + std : : atomic < bool > data_ready_ { false } ; <nl> std : : atomic < bool > trajectory_error_ { false } ; <nl> std : : atomic < bool > trajectory_skipped_ { false } ; <nl> std : : mutex open_space_mutex_ ; <nl>
Planning : fix bugs by not updated thread data
ApolloAuto/apollo
479507ebfd81f954c76b84fec0fded216c033a68
2019-03-27T21:35:54Z
mmm a / hphp / runtime / base / tv - comparisons . cpp <nl> ppp b / hphp / runtime / base / tv - comparisons . cpp <nl> namespace HPHP { <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> <nl> - extern bool collectionEquals ( ObjectData * , ObjectData * ) ; <nl> + extern bool collectionEquals ( const ObjectData * , const ObjectData * ) ; <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> <nl> struct Eq { <nl> if ( od1 = = od2 ) return true ; <nl> if ( od1 - > getVMClass ( ) ! = od2 - > getVMClass ( ) ) return false ; <nl> if ( od1 - > isCollection ( ) ) { <nl> - / / TODO constness <nl> - return collectionEquals ( const_cast < ObjectData * > ( od1 ) , <nl> - const_cast < ObjectData * > ( od2 ) ) ; <nl> + return collectionEquals ( od1 , od2 ) ; <nl> } <nl> Array ar1 ( od1 - > o_toArray ( ) ) ; <nl> Array ar2 ( od2 - > o_toArray ( ) ) ; <nl> mmm a / hphp / runtime / ext / ext_collections . cpp <nl> ppp b / hphp / runtime / ext / ext_collections . cpp <nl> void c_Vector : : OffsetUnset ( ObjectData * obj , TypedValue * key ) { <nl> throw e ; <nl> } <nl> <nl> - bool c_Vector : : Equals ( ObjectData * obj1 , ObjectData * obj2 ) { <nl> - auto vec1 = static_cast < c_Vector * > ( obj1 ) ; <nl> - auto vec2 = static_cast < c_Vector * > ( obj2 ) ; <nl> + bool c_Vector : : Equals ( const ObjectData * obj1 , const ObjectData * obj2 ) { <nl> + auto vec1 = static_cast < const c_Vector * > ( obj1 ) ; <nl> + auto vec2 = static_cast < const c_Vector * > ( obj2 ) ; <nl> uint sz = vec1 - > m_size ; <nl> if ( sz ! = vec2 - > m_size ) { <nl> return false ; <nl> void c_Map : : OffsetUnset ( ObjectData * obj , TypedValue * key ) { <nl> throwBadKeyType ( ) ; <nl> } <nl> <nl> - bool c_Map : : Equals ( ObjectData * obj1 , ObjectData * obj2 ) { <nl> - auto mp1 = static_cast < c_Map * > ( obj1 ) ; <nl> - auto mp2 = static_cast < c_Map * > ( obj2 ) ; <nl> + bool c_Map : : Equals ( const ObjectData * obj1 , const ObjectData * obj2 ) { <nl> + auto mp1 = static_cast < const c_Map * > ( obj1 ) ; <nl> + auto mp2 = static_cast < const c_Map * > ( obj2 ) ; <nl> if ( mp1 - > m_size ! = mp2 - > m_size ) return false ; <nl> for ( uint i = 0 ; i < = mp1 - > m_nLastSlot ; + + i ) { <nl> c_Map : : Bucket & p = mp1 - > m_data [ i ] ; <nl> void c_StableMap : : OffsetUnset ( ObjectData * obj , TypedValue * key ) { <nl> throwBadKeyType ( ) ; <nl> } <nl> <nl> - bool c_StableMap : : Equals ( ObjectData * obj1 , ObjectData * obj2 ) { <nl> - auto smp1 = static_cast < c_StableMap * > ( obj1 ) ; <nl> - auto smp2 = static_cast < c_StableMap * > ( obj2 ) ; <nl> + bool c_StableMap : : Equals ( const ObjectData * obj1 , const ObjectData * obj2 ) { <nl> + auto smp1 = static_cast < const c_StableMap * > ( obj1 ) ; <nl> + auto smp2 = static_cast < const c_StableMap * > ( obj2 ) ; <nl> if ( smp1 - > m_size ! = smp2 - > m_size ) return false ; <nl> auto p1 = smp1 - > m_pListHead ; <nl> auto p2 = smp2 - > m_pListHead ; <nl> void c_Set : : OffsetUnset ( ObjectData * obj , TypedValue * key ) { <nl> c_Set : : throwNoIndexAccess ( ) ; <nl> } <nl> <nl> - bool c_Set : : Equals ( ObjectData * obj1 , ObjectData * obj2 ) { <nl> - auto st1 = static_cast < c_Set * > ( obj1 ) ; <nl> - auto st2 = static_cast < c_Set * > ( obj2 ) ; <nl> + bool c_Set : : Equals ( const ObjectData * obj1 , const ObjectData * obj2 ) { <nl> + auto st1 = static_cast < const c_Set * > ( obj1 ) ; <nl> + auto st2 = static_cast < const c_Set * > ( obj2 ) ; <nl> if ( st1 - > m_size ! = st2 - > m_size ) return false ; <nl> for ( uint i = 0 ; i < = st1 - > m_nLastSlot ; + + i ) { <nl> c_Set : : Bucket & p = st1 - > m_data [ i ] ; <nl> void c_Pair : : OffsetUnset ( ObjectData * obj , TypedValue * key ) { <nl> throw e ; <nl> } <nl> <nl> - bool c_Pair : : Equals ( ObjectData * obj1 , ObjectData * obj2 ) { <nl> - auto pair1 = static_cast < c_Pair * > ( obj1 ) ; <nl> - auto pair2 = static_cast < c_Pair * > ( obj2 ) ; <nl> + bool c_Pair : : Equals ( const ObjectData * obj1 , const ObjectData * obj2 ) { <nl> + auto pair1 = static_cast < const c_Pair * > ( obj1 ) ; <nl> + auto pair2 = static_cast < const c_Pair * > ( obj2 ) ; <nl> return equal ( tvAsCVarRef ( & pair1 - > elm0 ) , tvAsCVarRef ( & pair2 - > elm0 ) ) & & <nl> equal ( tvAsCVarRef ( & pair1 - > elm1 ) , tvAsCVarRef ( & pair2 - > elm1 ) ) ; <nl> } <nl> void collectionUnserialize ( ObjectData * obj , VariableUnserializer * uns , <nl> } <nl> } <nl> <nl> - bool collectionEquals ( ObjectData * obj1 , ObjectData * obj2 ) { <nl> + bool collectionEquals ( const ObjectData * obj1 , const ObjectData * obj2 ) { <nl> int ct = obj1 - > getCollectionType ( ) ; <nl> assert ( ct = = obj2 - > getCollectionType ( ) ) ; <nl> switch ( ct ) { <nl> mmm a / hphp / runtime / ext / ext_collections . h <nl> ppp b / hphp / runtime / ext / ext_collections . h <nl> class c_Vector : public ExtObjectDataFlags < ObjectData : : VectorAttrInit | <nl> static bool OffsetContains ( ObjectData * obj , TypedValue * key ) ; <nl> static void OffsetUnset ( ObjectData * obj , TypedValue * key ) ; <nl> static void OffsetAppend ( ObjectData * obj , TypedValue * val ) ; <nl> - static bool Equals ( ObjectData * obj1 , ObjectData * obj2 ) ; <nl> + static bool Equals ( const ObjectData * obj1 , const ObjectData * obj2 ) ; <nl> static void Unserialize ( ObjectData * obj , VariableUnserializer * uns , <nl> int64_t sz , char type ) ; <nl> <nl> class c_Map : public ExtObjectDataFlags < ObjectData : : MapAttrInit | <nl> static void throwOOB ( int64_t key ) ATTRIBUTE_COLD ATTRIBUTE_NORETURN ; <nl> static void throwOOB ( StringData * key ) ATTRIBUTE_COLD ATTRIBUTE_NORETURN ; <nl> <nl> - TypedValue * at ( int64_t key ) { <nl> + TypedValue * at ( int64_t key ) const { <nl> Bucket * p = find ( key ) ; <nl> if ( LIKELY ( p ! = NULL ) ) return & p - > data ; <nl> throwOOB ( key ) ; <nl> return NULL ; <nl> } <nl> - TypedValue * get ( int64_t key ) { <nl> + TypedValue * get ( int64_t key ) const { <nl> Bucket * p = find ( key ) ; <nl> if ( p ) return & p - > data ; <nl> return NULL ; <nl> } <nl> - TypedValue * at ( StringData * key ) { <nl> + TypedValue * at ( StringData * key ) const { <nl> Bucket * p = find ( key - > data ( ) , key - > size ( ) , key - > hash ( ) ) ; <nl> if ( LIKELY ( p ! = NULL ) ) return & p - > data ; <nl> throwOOB ( key ) ; <nl> return NULL ; <nl> } <nl> - TypedValue * get ( StringData * key ) { <nl> + TypedValue * get ( StringData * key ) const { <nl> Bucket * p = find ( key - > data ( ) , key - > size ( ) , key - > hash ( ) ) ; <nl> if ( p ) return & p - > data ; <nl> return NULL ; <nl> class c_Map : public ExtObjectDataFlags < ObjectData : : MapAttrInit | <nl> + + m_version ; <nl> erase ( find ( key - > data ( ) , key - > size ( ) , key - > hash ( ) ) ) ; <nl> } <nl> - bool contains ( int64_t key ) { <nl> + bool contains ( int64_t key ) const { <nl> return find ( key ) ; <nl> } <nl> - bool contains ( StringData * key ) { <nl> + bool contains ( StringData * key ) const { <nl> return find ( key - > data ( ) , key - > size ( ) , key - > hash ( ) ) ; <nl> } <nl> void reserve ( int64_t sz ) { <nl> class c_Map : public ExtObjectDataFlags < ObjectData : : MapAttrInit | <nl> static bool OffsetContains ( ObjectData * obj , TypedValue * key ) ; <nl> static void OffsetUnset ( ObjectData * obj , TypedValue * key ) ; <nl> static void OffsetAppend ( ObjectData * obj , TypedValue * val ) ; <nl> - static bool Equals ( ObjectData * obj1 , ObjectData * obj2 ) ; <nl> + static bool Equals ( const ObjectData * obj1 , const ObjectData * obj2 ) ; <nl> static void Unserialize ( ObjectData * obj , VariableUnserializer * uns , <nl> int64_t sz , char type ) ; <nl> <nl> class c_StableMap : public ExtObjectDataFlags < ObjectData : : StableMapAttrInit | <nl> static bool OffsetContains ( ObjectData * obj , TypedValue * key ) ; <nl> static void OffsetUnset ( ObjectData * obj , TypedValue * key ) ; <nl> static void OffsetAppend ( ObjectData * obj , TypedValue * val ) ; <nl> - static bool Equals ( ObjectData * obj1 , ObjectData * obj2 ) ; <nl> + static bool Equals ( const ObjectData * obj1 , const ObjectData * obj2 ) ; <nl> static void Unserialize ( ObjectData * obj , VariableUnserializer * uns , <nl> int64_t sz , char type ) ; <nl> <nl> class c_Set : public ExtObjectDataFlags < ObjectData : : SetAttrInit | <nl> static bool OffsetContains ( ObjectData * obj , TypedValue * key ) ; <nl> static void OffsetUnset ( ObjectData * obj , TypedValue * key ) ; <nl> static void OffsetAppend ( ObjectData * obj , TypedValue * val ) ; <nl> - static bool Equals ( ObjectData * obj1 , ObjectData * obj2 ) ; <nl> + static bool Equals ( const ObjectData * obj1 , const ObjectData * obj2 ) ; <nl> static void Unserialize ( ObjectData * obj , VariableUnserializer * uns , <nl> int64_t sz , char type ) ; <nl> <nl> class c_Pair : public ExtObjectDataFlags < ObjectData : : PairAttrInit | <nl> static bool OffsetContains ( ObjectData * obj , TypedValue * key ) ; <nl> static void OffsetUnset ( ObjectData * obj , TypedValue * key ) ; <nl> static void OffsetAppend ( ObjectData * obj , TypedValue * val ) ; <nl> - static bool Equals ( ObjectData * obj1 , ObjectData * obj2 ) ; <nl> + static bool Equals ( const ObjectData * obj1 , const ObjectData * obj2 ) ; <nl> static void Unserialize ( ObjectData * obj , VariableUnserializer * uns , <nl> int64_t sz , char type ) ; <nl> int64_t size ( ) const { <nl> int64_t collectionSize ( ObjectData * obj ) ; <nl> void collectionReserve ( ObjectData * obj , int64_t sz ) ; <nl> void collectionUnserialize ( ObjectData * obj , VariableUnserializer * uns , <nl> int64_t sz , char type ) ; <nl> - bool collectionEquals ( ObjectData * obj1 , ObjectData * obj2 ) ; <nl> + bool collectionEquals ( const ObjectData * obj1 , const ObjectData * obj2 ) ; <nl> void collectionDeepCopyTV ( TypedValue * tv ) ; <nl> ArrayData * collectionDeepCopyArray ( ArrayData * arr ) ; <nl> ObjectData * collectionDeepCopyVector ( c_Vector * vec ) ; <nl>
Some const - correctness fixes for collections
facebook/hhvm
c608b6cbd1dbf122e4b679786c793772fbfabbe0
2013-08-05T21:42:16Z
mmm a / src / caffe / test / test_inner_product_layer . cpp <nl> ppp b / src / caffe / test / test_inner_product_layer . cpp <nl> TYPED_TEST ( InnerProductLayerTest , TestSetUp ) { <nl> EXPECT_EQ ( this - > blob_top_ - > channels ( ) , 10 ) ; <nl> } <nl> <nl> - / * * @ brief TestSetUp while toggling tranpose flag <nl> + / * * @ brief TestSetUp while toggling transpose flag <nl> * / <nl> - TYPED_TEST ( InnerProductLayerTest , TestSetUpTranposeFalse ) { <nl> + TYPED_TEST ( InnerProductLayerTest , TestSetUpTransposeFalse ) { <nl> typedef typename TypeParam : : Dtype Dtype ; <nl> this - > blob_bottom_vec_ . push_back ( this - > blob_bottom_ ) ; <nl> LayerParameter layer_param ; <nl> TYPED_TEST ( InnerProductLayerTest , TestSetUpTranposeFalse ) { <nl> EXPECT_EQ ( 60 , layer - > blobs ( ) [ 0 ] - > shape ( 1 ) ) ; <nl> } <nl> <nl> - / * * @ brief TestSetUp while toggling tranpose flag <nl> + / * * @ brief TestSetUp while toggling transpose flag <nl> * / <nl> - TYPED_TEST ( InnerProductLayerTest , TestSetUpTranposeTrue ) { <nl> + TYPED_TEST ( InnerProductLayerTest , TestSetUpTransposeTrue ) { <nl> typedef typename TypeParam : : Dtype Dtype ; <nl> this - > blob_bottom_vec_ . push_back ( this - > blob_bottom_ ) ; <nl> LayerParameter layer_param ; <nl> TYPED_TEST ( InnerProductLayerTest , TestBackwardTranspose ) { <nl> / / copy bottom diffs <nl> Blob < Dtype > * const bottom_diff = new Blob < Dtype > ( ) ; <nl> bottom_diff - > CopyFrom ( * this - > blob_bottom_vec_ [ 0 ] , true , true ) ; <nl> - / / repeat original top with tranposed ip <nl> + / / repeat original top with transposed ip <nl> this - > blob_top_vec_ . clear ( ) ; <nl> this - > blob_top_vec_ . push_back ( new Blob < Dtype > ( ) ) ; <nl> inner_product_param - > set_transpose ( true ) ; <nl>
Typos in test_inner_product_layer . cpp
BVLC/caffe
1fd8bd0b4a842aa5a9d7ea1ec88d4cdd7eaf3b99
2016-12-30T14:47:20Z
mmm a / Doxygen / Examples . Durham / graph1 <nl> ppp b / Doxygen / Examples . Durham / graph1 <nl> <nl> avocado > var Graph = require ( " graph " ) . Graph ; <nl> <nl> - avocado > new Graph ( db . vertices , edges . edges ) ; <nl> + avocado > new Graph ( " graph " , db . vertices , edges . edges ) ; <nl> Graph ( " vertices " , " edges " ) <nl> <nl> - avocado > new Graph ( " vertices " , " edges " ) ; <nl> + avocado > new Graph ( " graph " , " vertices " , " edges " ) ; <nl> Graph ( " vertices " , " edges " ) <nl> mmm a / Doxygen / Examples . Durham / graph10 <nl> ppp b / Doxygen / Examples . Durham / graph10 <nl> <nl> - avocado > e = g . addEdge ( v1 , v2 , " knows " , { weight : 10 } ) ; <nl> + avocado > e = g . addEdge ( v1 , v2 , null , " knows " , { weight : 10 } ) ; <nl> Edge ( < graph > , " 3999653 : 5570857 " ) <nl> <nl> avocado > e . getLabel ( ) ; <nl> mmm a / Doxygen / Examples . Durham / graph12 <nl> ppp b / Doxygen / Examples . Durham / graph12 <nl> <nl> avocado > v = g . addVertex ( 1 ) ; <nl> Vertex ( 1 ) <nl> <nl> - avocado > e = g . addEdge ( 2 , v , v , " self " , { " weight " : 10 } ) ; <nl> + avocado > e = g . addEdge ( v , v , 2 , " self " , { " weight " : 10 } ) ; <nl> Edge ( 2 ) <nl> <nl> avocado > e . getProperty ( " weight " ) ; <nl> mmm a / Doxygen / Examples . Durham / graph13 <nl> ppp b / Doxygen / Examples . Durham / graph13 <nl> <nl> avocado > v = g . addVertex ( ) ; <nl> Vertex ( < graph > , " 153246 : 2310672 " ) <nl> <nl> - avocado > e = g . addEdge ( 1 , v , v , " self " ) ; <nl> + avocado > e = g . addEdge ( v , v , 1 , " self " ) ; <nl> Edge ( 1 ) <nl> <nl> avocado > e . getId ( ) ; <nl> mmm a / Doxygen / Examples . Durham / graph14 <nl> ppp b / Doxygen / Examples . Durham / graph14 <nl> <nl> avocado > v = g . addVertex ( 1 ) ; <nl> Vertex ( 1 ) <nl> <nl> - avocado > e = g . addEdge ( 2 , v , v , " self " , { weight : 10 } ) <nl> + avocado > e = g . addEdge ( v , v , 2 , " self " , { weight : 10 } ) <nl> Edge ( 2 ) <nl> <nl> avocado > e . getPropert ( " weight " ) <nl> mmm a / Doxygen / Examples . Durham / graph15 <nl> ppp b / Doxygen / Examples . Durham / graph15 <nl> <nl> avocado > var Graph = require ( " graph " ) . Graph ; <nl> <nl> - avocado > g = new Graph ( " vertices " , " edges " ) ; <nl> + avocado > g = new Graph ( " graph " , " vertices " , " edges " ) ; <nl> <nl> avocado > v1 = g . addVertex ( ) ; <nl> Vertex ( < graph > , " 153246 : 2310672 " ) <nl> mmm a / Doxygen / Examples . Durham / graph16 <nl> ppp b / Doxygen / Examples . Durham / graph16 <nl> <nl> avocado > var Graph = require ( " graph " ) . Graph ; <nl> <nl> - avocado > g = new Graph ( " vertices " , " edges " ) ; <nl> + avocado > g = new Graph ( " graph " , " vertices " , " edges " ) ; <nl> <nl> - avocado > v1 = g . addVertex ( { name : " Hugo " } ) ; <nl> + avocado > v1 = g . addVertex ( null , { name : " Hugo " } ) ; <nl> Vertex ( < graph > , " 153246 : 2310672 " ) <nl> <nl> - avocado > v2 = g . addVertex ( { name : " Emil " } ) ; <nl> + avocado > v2 = g . addVertex ( null , { name : " Emil " } ) ; <nl> Vertex ( < graph > , " 153246 : 2310673 " ) <nl> <nl> - avocado > e = g . addEdge ( v1 , v2 , " knows " , { " weight " : 10 } ) ; <nl> + avocado > e = g . addEdge ( v1 , v2 , null , " knows " , { " weight " : 10 } ) ; <nl> Edge ( < graph > , " 3999653 : 7197720 " ) <nl> <nl> avocado > v1 . inbound ( ) ; <nl> mmm a / Doxygen / Examples . Durham / graph17 <nl> ppp b / Doxygen / Examples . Durham / graph17 <nl> <nl> avocado > var Graph = require ( " graph " ) . Graph ; <nl> <nl> - avocado > g = new Graph ( " vertices " , " edges " ) ; <nl> + avocado > g = new Graph ( " graph " , " vertices " , " edges " ) ; <nl> <nl> - avocado > v1 = g . addVertex ( { name : " Hugo " } ) ; <nl> + avocado > v1 = g . addVertex ( null , { name : " Hugo " } ) ; <nl> Vertex ( < graph > , " 153246 : 2310672 " ) <nl> <nl> - avocado > v2 = g . addVertex ( { name : " Emil " } ) ; <nl> + avocado > v2 = g . addVertex ( null , { name : " Emil " } ) ; <nl> Vertex ( < graph > , " 153246 : 2310673 " ) <nl> <nl> - avocado > e = g . addEdge ( v1 , v2 , " knows " , { " weight " : 10 } ) ; <nl> + avocado > e = g . addEdge ( v1 , v2 , null , " knows " , { " weight " : 10 } ) ; <nl> Edge ( < graph > , " 3999653 : 7197720 " ) <nl> <nl> avocado > v1 . outbound ( ) ; <nl> mmm a / Doxygen / Examples . Durham / graph18 <nl> ppp b / Doxygen / Examples . Durham / graph18 <nl> <nl> avocado > var Graph = require ( " graph " ) . Graph ; <nl> <nl> - avocado > g = new Graph ( " vertices " , " edges " ) ; <nl> + avocado > g = new Graph ( " graphs " , " vertices " , " edges " ) ; <nl> <nl> - avocado > v1 = g . addVertex ( { name : " Hugo " } ) ; <nl> + avocado > v1 = g . addVertex ( null , { name : " Hugo " } ) ; <nl> Vertex ( < graph > , " 153246 : 2310672 " ) <nl> <nl> - avocado > v2 = g . addVertex ( { name : " Emil " } ) ; <nl> + avocado > v2 = g . addVertex ( null , { name : " Emil " } ) ; <nl> Vertex ( < graph > , " 153246 : 2310673 " ) <nl> <nl> - avocado > e1 = g . addEdge ( v1 , v2 , " knows " ) ; <nl> + avocado > e1 = g . addEdge ( v1 , v2 , null , " knows " ) ; <nl> Edge ( < graph > , " 3999653 : 7360858 " ) <nl> <nl> - avocado > e2 = g . addEdge ( v1 , v2 , " hates " ) ; <nl> + avocado > e2 = g . addEdge ( v1 , v2 , null , " hates " ) ; <nl> Edge ( < graph > , " 3999653 : 7426394 " ) <nl> <nl> avocado > v2 . getInEdges ( ) ; <nl> mmm a / Doxygen / Examples . Durham / graph19 <nl> ppp b / Doxygen / Examples . Durham / graph19 <nl> <nl> avocado > var Graph = require ( " graph " ) . Graph ; <nl> <nl> - avocado > g = new Graph ( " vertices " , " edges " ) ; <nl> + avocado > g = new Graph ( " graph " , " vertices " , " edges " ) ; <nl> <nl> - avocado > v1 = g . addVertex ( { name : " Hugo " } ) ; <nl> + avocado > v1 = g . addVertex ( null , { name : " Hugo " } ) ; <nl> Vertex ( < graph > , " 153246 : 2310672 " ) <nl> <nl> - avocado > v2 = g . addVertex ( { name : " Emil " } ) ; <nl> + avocado > v2 = g . addVertex ( null , { name : " Emil " } ) ; <nl> Vertex ( < graph > , " 153246 : 2310673 " ) <nl> <nl> - avocado > e1 = g . addEdge ( v1 , v2 , " knows " ) ; <nl> + avocado > e1 = g . addEdge ( v1 , v2 , null , " knows " ) ; <nl> Edge ( < graph > , " 3999653 : 7360858 " ) <nl> <nl> - avocado > e2 = g . addEdge ( v1 , v2 , " hates " ) ; <nl> + avocado > e2 = g . addEdge ( v1 , v2 , null , " hates " ) ; <nl> Edge ( < graph > , " 3999653 : 7426394 " ) <nl> <nl> avocado > v1 . getOutEdges ( ) ; <nl> mmm a / Doxygen / Examples . Durham / graph2 <nl> ppp b / Doxygen / Examples . Durham / graph2 <nl> <nl> avocado > var Graph = require ( " graph " ) . Graph ; <nl> <nl> - avocado > g = new Graph ( " vertices " , " edges " ) ; <nl> + avocado > g = new Graph ( " graph " , " vertices " , " edges " ) ; <nl> Graph ( " vertices " , " edges " ) <nl> <nl> avocado > v = g . addVertex ( " hugo " ) ; <nl> mmm a / Doxygen / Examples . Durham / graph20 <nl> ppp b / Doxygen / Examples . Durham / graph20 <nl> <nl> avocado > v = g . addVertex ( 1 ) ; <nl> Vertex ( 1 ) <nl> <nl> - avocado > e = g . addEdge ( 2 , v , v , " knows " ) ; <nl> + avocado > e = g . addEdge ( v , v , 2 , " knows " ) ; <nl> Edge ( 2 ) <nl> <nl> avocado > e . getLabel ( ) ; <nl> mmm a / Doxygen / Examples . Durham / graph21 <nl> ppp b / Doxygen / Examples . Durham / graph21 <nl> <nl> avocado > v1 = g . addVertex ( 1 ) ; <nl> Vertex ( 1 ) <nl> <nl> - avocado > e = g . addEdge ( 2 , v , v , " self " ) ; <nl> + avocado > e = g . addEdge ( v , v , 2 , " self " ) ; <nl> Edge ( 2 ) <nl> <nl> avocado > e . getInVertex ( ) ; <nl> mmm a / Doxygen / Examples . Durham / graph22 <nl> ppp b / Doxygen / Examples . Durham / graph22 <nl> <nl> avocado > v = g . addVertex ( 1 ) ; <nl> Vertex ( 1 ) <nl> <nl> - avocado > e = g . addEdge ( 2 , v , v , " self " ) ; <nl> + avocado > e = g . addEdge ( v , v , 2 , " self " ) ; <nl> Edge ( 2 ) <nl> <nl> avocado > e . getOutVertex ( ) ; <nl> mmm a / Doxygen / Examples . Durham / graph23 <nl> ppp b / Doxygen / Examples . Durham / graph23 <nl> <nl> - avocado > v1 . addInEdge ( " K " , v2 , " knows " ) ; <nl> + avocado > v1 . addInEdge ( v2 , " K " , " knows " ) ; <nl> Edge ( " K " ) <nl> <nl> avocado > v1 . getInEdges ( ) ; <nl> mmm a / Doxygen / Examples . Durham / graph24 <nl> ppp b / Doxygen / Examples . Durham / graph24 <nl> <nl> - avocado > v1 . addInEdge ( " D " , v2 , " knows " , { data : 1 } ) ; <nl> + avocado > v1 . addInEdge ( v2 , " D " , " knows " , { data : 1 } ) ; <nl> Edge ( " D " ) <nl> <nl> avocado > v1 . getInEdges ( ) ; <nl> mmm a / Doxygen / Examples . Durham / graph25 <nl> ppp b / Doxygen / Examples . Durham / graph25 <nl> <nl> avocado > var Graph = require ( " graph " ) . Graph ; <nl> <nl> - avocado > g1 = new Graph ( " vertices " , " edges " ) ; <nl> + avocado > g1 = new Graph ( " graph " , " vertices " , " edges " ) ; <nl> Graph ( " vertices " , " edges " ) <nl> mmm a / Doxygen / Examples . Durham / graph26 <nl> ppp b / Doxygen / Examples . Durham / graph26 <nl> <nl> avocado > var Graph = require ( " graph " ) . Graph ; <nl> <nl> - avocado > g2 = new Graph ( " vertices " , " alternativeEdges " ) ; <nl> + avocado > g2 = new Graph ( " graph " , " vertices " , " alternativeEdges " ) ; <nl> Graph ( " vertices " , " alternativeEdges " ) <nl> mmm a / Doxygen / Examples . Durham / graph27 <nl> ppp b / Doxygen / Examples . Durham / graph27 <nl> <nl> avocado > v1 = g . addVertex ( ) ; <nl> Vertex ( < graph > , " 153246 : 8712055 " ) <nl> <nl> - avocado > v1 . addOutEdge ( v2 , " knows " ) ; <nl> + avocado > v1 . addOutEdge ( v2 , null , " knows " ) ; <nl> Edge ( < graph > , " 3999653 : 8974199 " ) <nl> <nl> avocado > v1 . getOutEdges ( ) ; <nl> mmm a / Doxygen / Examples . Durham / graph28 <nl> ppp b / Doxygen / Examples . Durham / graph28 <nl> Vertex ( < graph > , " 153246 : 8712055 " ) <nl> avocado > v2 = g . addVertex ( ) ; <nl> Vertex ( < graph > , " 153246 : 8777591 " ) <nl> <nl> - avocado > v1 . addOutEdge ( v2 , " knows " , { data : 1 } ) ; <nl> + avocado > v1 . addOutEdge ( v2 , null , " knows " , { data : 1 } ) ; <nl> Edge ( < graph > , " 3999653 : 8974199 " ) <nl> <nl> avocado > v1 . getOutEdges ( ) ; <nl> mmm a / Doxygen / Examples . Durham / graph29 <nl> ppp b / Doxygen / Examples . Durham / graph29 <nl> <nl> avocado > var Graph = require ( " graph " ) . Graph ; <nl> <nl> - avocado > g = new Graph ( " vertices " , " edges " ) ; <nl> + avocado > g = new Graph ( " graph " , " vertices " , " edges " ) ; <nl> Graph ( " vertices " , " edges " ) <nl> <nl> avocado > g . addVertex ( 1 ) ; <nl> mmm a / Doxygen / Examples . Durham / graph30 <nl> ppp b / Doxygen / Examples . Durham / graph30 <nl> <nl> avocado > var Graph = require ( " graph " ) . Graph ; <nl> <nl> - avocado > g = new Graph ( " vertices " , " edges " ) ; <nl> + avocado > g = new Graph ( " graph " , " vertices " , " edges " ) ; <nl> Graph ( " vertices " , " edges " ) <nl> <nl> avocado > v1 = g . addVertex ( ) ; <nl> mmm a / Doxygen / Examples . Durham / graph31 <nl> ppp b / Doxygen / Examples . Durham / graph31 <nl> <nl> - avocado > e = g . addEdge ( v1 , v2 , { name : " Emil " ) ; <nl> + avocado > e = g . addEdge ( v1 , v2 , null , " " , { name : " Emil " ) ; <nl> Edge ( < graph > , " 3999653 : 5570857 " ) <nl> <nl> avocado > e . getProperty ( " name " ) ; <nl> mmm a / Doxygen / Examples . Durham / graph32 <nl> ppp b / Doxygen / Examples . Durham / graph32 <nl> <nl> avocado > v = g . addVertex ( 1 ) ; <nl> Vertex ( 1 ) <nl> <nl> - avocado > e = g . addEdge ( 2 , v , v , " self " , { weight : 10 } ) <nl> + avocado > e = g . addEdge ( v , v , 2 , " self " , { weight : 10 } ) <nl> Edge ( 2 ) <nl> <nl> avocado > e . getPropertyKeys ( ) <nl> mmm a / Doxygen / Examples . Durham / graph33 <nl> ppp b / Doxygen / Examples . Durham / graph33 <nl> Vertex ( 1 ) <nl> avocado > v2 = g . addVertex ( 2 ) ; <nl> Vertex ( 2 ) <nl> <nl> - avocado > v1 . addInEdge ( " 2 - > 1 " , v2 ) ; <nl> + avocado > v1 . addInEdge ( v2 , null , " 2 - > 1 " ) ; <nl> Edge ( " 2 - > 1 " ) <nl> <nl> avocado > v1 . getInEdges ( ) ; <nl> mmm a / Doxygen / Examples . Durham / graph34 <nl> ppp b / Doxygen / Examples . Durham / graph34 <nl> <nl> avocado > var Graph = require ( " graph " ) . Graph ; <nl> <nl> - avocado > g = new Graph ( " vertices " , " edges " ) ; <nl> + avocado > g = new Graph ( " graph " , " vertices " , " edges " ) ; <nl> Graph ( " vertices " , " edges " ) <nl> <nl> avocado > v1 = g . addVertex ( ) ; <nl> mmm a / Doxygen / Examples . Durham / graph35 <nl> ppp b / Doxygen / Examples . Durham / graph35 <nl> <nl> avocado > var Graph = require ( " graph " ) . Graph ; <nl> <nl> - avocado > g = new Graph ( " vertices " , " edges " ) ; <nl> + avocado > g = new Graph ( " graph " , " vertices " , " edges " ) ; <nl> Graph ( " vertices " , " edges " ) <nl> <nl> avocado > f = g . getVertices ( ) ; <nl> mmm a / Doxygen / Examples . Durham / graph36 <nl> ppp b / Doxygen / Examples . Durham / graph36 <nl> <nl> avocado > var Graph = require ( " graph " ) . Graph ; <nl> <nl> - avocado > g = new Graph ( " vertices " , " edges " ) ; <nl> + avocado > g = new Graph ( " graph " , " vertices " , " edges " ) ; <nl> Graph ( " vertices " , " edges " ) <nl> <nl> avocado > f = g . getEdges ( ) ; <nl> mmm a / Doxygen / Examples . Durham / graph37 <nl> ppp b / Doxygen / Examples . Durham / graph37 <nl> <nl> avocado > var Graph = require ( " graph " ) . Graph ; <nl> <nl> - avocado > g = new Graph ( " vertices " , " edges " ) ; <nl> + avocado > g = new Graph ( " graph " , " vertices " , " edges " ) ; <nl> Graph ( " vertices " , " edges " ) <nl> <nl> avocado > v1 = g . addVertex ( ) ; <nl> mmm a / Doxygen / Examples . Durham / graph38 <nl> ppp b / Doxygen / Examples . Durham / graph38 <nl> <nl> avocado > var Graph = require ( " graph " ) . Graph ; <nl> <nl> - avocado > g = new Graph ( " vertices " , " edges " ) ; <nl> + avocado > g = new Graph ( " graph " , " vertices " , " edges " ) ; <nl> Graph ( " vertices " , " edges " ) <nl> <nl> avocado > v1 = g . addVertex ( ) ; <nl> mmm a / Doxygen / Examples . Durham / graph39 <nl> ppp b / Doxygen / Examples . Durham / graph39 <nl> <nl> avocado > var Graph = require ( " graph " ) . Graph ; <nl> <nl> - avocado > g = new Graph ( " vertices " , " edges " ) ; <nl> + avocado > g = new Graph ( " graph " , " vertices " , " edges " ) ; <nl> Graph ( " vertices " , " edges " ) <nl> mmm a / Doxygen / Examples . Durham / graph4 <nl> ppp b / Doxygen / Examples . Durham / graph4 <nl> <nl> avocado > var Graph = require ( " graph " ) . Graph ; <nl> <nl> - avocado > g = new Graph ( " vertices " , " edges " ) ; <nl> + avocado > g = new Graph ( " graph " , " vertices " , " edges " ) ; <nl> Graph ( " vertices " , " edges " ) <nl> <nl> - avocado > v = g . addVertex ( { name : " Hugo " } ) ; <nl> + avocado > v = g . addVertex ( null , { name : " Hugo " } ) ; <nl> Vertex ( < graph > , " 153246 : 2310672 " ) <nl> <nl> avocado > v . properties ( ) ; <nl> mmm a / Doxygen / Examples . Durham / graph5 <nl> ppp b / Doxygen / Examples . Durham / graph5 <nl> <nl> avocado > var Graph = require ( " graph " ) . Graph ; <nl> <nl> - avocado > g = new Graph ( " vertices " , " edges " ) ; <nl> + avocado > g = new Graph ( " graph " , " vertices " , " edges " ) ; <nl> Graph ( " vertices " , " edges " ) <nl> <nl> - avocado > v = g . addVertex ( { name : " Hugo " } ) ; <nl> + avocado > v = g . addVertex ( null , { name : " Hugo " } ) ; <nl> Vertex ( < graph > , " 153246 : 2310672 " ) <nl> <nl> avocado > v . getProperty ( " name " ) ; <nl> mmm a / Doxygen / Examples . Durham / graph6 <nl> ppp b / Doxygen / Examples . Durham / graph6 <nl> <nl> avocado > var Graph = require ( " graph " ) . Graph ; <nl> <nl> - avocado > g = new Graph ( " vertices " , " edges " ) ; <nl> + avocado > g = new Graph ( " graph " , " vertices " , " edges " ) ; <nl> Graph ( " vertices " , " edges " ) <nl> <nl> - avocado > v = g . addVertex ( { name : " Hugo " } ) ; <nl> + avocado > v = g . addVertex ( null , { name : " Hugo " } ) ; <nl> Vertex ( < graph > , " 153246 : 2310672 " ) <nl> <nl> avocado > v . getProperty ( " name " ) ; <nl> mmm a / Doxygen / Examples . Durham / graph7 <nl> ppp b / Doxygen / Examples . Durham / graph7 <nl> <nl> avocado > var Graph = require ( " graph " ) . Graph ; <nl> <nl> - avocado > g = new Graph ( " vertices " , " edges " ) ; <nl> + avocado > g = new Graph ( " graph " , " vertices " , " edges " ) ; <nl> Graph ( " vertices " , " edges " ) <nl> <nl> - avocado > v = g . addVertex ( { name : " Hugo " } ) ; <nl> + avocado > v = g . addVertex ( null , { name : " Hugo " } ) ; <nl> Vertex ( < graph > , " 153246 : 2310672 " ) <nl> <nl> avocado > v . getPropertyKeys ( ) ; <nl> mmm a / Doxygen / Examples . Durham / graph8 <nl> ppp b / Doxygen / Examples . Durham / graph8 <nl> <nl> avocado > var Graph = require ( " graph " ) . Graph ; <nl> <nl> - avocado > g = new Graph ( " vertices " , " edges " ) ; <nl> + avocado > g = new Graph ( " graph " , " vertices " , " edges " ) ; <nl> Graph ( " vertices " , " edges " ) <nl> <nl> - avocado > v = g . addVertex ( { name : " Hugo " } ) ; <nl> + avocado > v = g . addVertex ( null , { name : " Hugo " } ) ; <nl> Vertex ( < graph > , " 153246 : 2310672 " ) <nl> <nl> avocado > v . getId ( ) ; <nl> mmm a / Doxygen / Examples . Durham / graph9 <nl> ppp b / Doxygen / Examples . Durham / graph9 <nl> <nl> - avocado > e = g . addEdge ( v1 , v2 , " knows " ) ; <nl> + avocado > e = g . addEdge ( v1 , v2 , null , " knows " ) ; <nl> Edge ( < graph > , " 3999653 : 5570857 " ) <nl> <nl> avocado > e . getLabel ( ) ; <nl> mmm a / VERSION <nl> ppp b / VERSION <nl> @ @ - 1 + 1 @ @ <nl> - 0 . 3 . 12 <nl> + 0 . 3 . 13 <nl> mmm a / configure <nl> ppp b / configure <nl> <nl> # ! / bin / sh <nl> # Guess values for system - dependent variables and create Makefiles . <nl> - # Generated by GNU Autoconf 2 . 68 for triAGENS AvocadoDB 0 . 3 . 12 . <nl> + # Generated by GNU Autoconf 2 . 68 for triAGENS AvocadoDB 0 . 3 . 13 . <nl> # <nl> # Report bugs to < info @ triagens . de > . <nl> # <nl> MAKEFLAGS = <nl> # Identity of this package . <nl> PACKAGE_NAME = ' triAGENS AvocadoDB ' <nl> PACKAGE_TARNAME = ' avocado ' <nl> - PACKAGE_VERSION = ' 0 . 3 . 12 ' <nl> - PACKAGE_STRING = ' triAGENS AvocadoDB 0 . 3 . 12 ' <nl> + PACKAGE_VERSION = ' 0 . 3 . 13 ' <nl> + PACKAGE_STRING = ' triAGENS AvocadoDB 0 . 3 . 13 ' <nl> PACKAGE_BUGREPORT = ' info @ triagens . de ' <nl> PACKAGE_URL = ' http : / / www . avocadodb . org ' <nl> <nl> if test " $ ac_init_help " = " long " ; then <nl> # Omit some internal or obsolete options to make the list less imposing . <nl> # This message is too long to be a string in the A / UX 3 . 1 sh . <nl> cat < < _ACEOF <nl> - \ ` configure ' configures triAGENS AvocadoDB 0 . 3 . 12 to adapt to many kinds of systems . <nl> + \ ` configure ' configures triAGENS AvocadoDB 0 . 3 . 13 to adapt to many kinds of systems . <nl> <nl> Usage : $ 0 [ OPTION ] . . . [ VAR = VALUE ] . . . <nl> <nl> fi <nl> <nl> if test - n " $ ac_init_help " ; then <nl> case $ ac_init_help in <nl> - short | recursive ) echo " Configuration of triAGENS AvocadoDB 0 . 3 . 12 : " ; ; <nl> + short | recursive ) echo " Configuration of triAGENS AvocadoDB 0 . 3 . 13 : " ; ; <nl> esac <nl> cat < < \ _ACEOF <nl> <nl> fi <nl> test - n " $ ac_init_help " & & exit $ ac_status <nl> if $ ac_init_version ; then <nl> cat < < \ _ACEOF <nl> - triAGENS AvocadoDB configure 0 . 3 . 12 <nl> + triAGENS AvocadoDB configure 0 . 3 . 13 <nl> generated by GNU Autoconf 2 . 68 <nl> <nl> Copyright ( C ) 2010 Free Software Foundation , Inc . <nl> cat > config . log < < _ACEOF <nl> This file contains any messages produced by compilers while <nl> running configure , to aid debugging if configure makes a mistake . <nl> <nl> - It was created by triAGENS AvocadoDB $ as_me 0 . 3 . 12 , which was <nl> + It was created by triAGENS AvocadoDB $ as_me 0 . 3 . 13 , which was <nl> generated by GNU Autoconf 2 . 68 . Invocation command line was <nl> <nl> $ $ 0 $ @ <nl> fi <nl> <nl> # Define the identity of the package . <nl> PACKAGE = ' avocado ' <nl> - VERSION = ' 0 . 3 . 12 ' <nl> + VERSION = ' 0 . 3 . 13 ' <nl> <nl> <nl> cat > > confdefs . h < < _ACEOF <nl> cat > > $ CONFIG_STATUS < < \ _ACEOF | | ac_write_fail = 1 <nl> # report actual input values of CONFIG_FILES etc . instead of their <nl> # values after options handling . <nl> ac_log = " <nl> - This file was extended by triAGENS AvocadoDB $ as_me 0 . 3 . 12 , which was <nl> + This file was extended by triAGENS AvocadoDB $ as_me 0 . 3 . 13 , which was <nl> generated by GNU Autoconf 2 . 68 . Invocation command line was <nl> <nl> CONFIG_FILES = $ CONFIG_FILES <nl> _ACEOF <nl> cat > > $ CONFIG_STATUS < < _ACEOF | | ac_write_fail = 1 <nl> ac_cs_config = " ` $ as_echo " $ ac_configure_args " | sed ' s / ^ / / ; s / [ \ \ " " \ ` \ $ ] / \ \ \ \ & / g ' ` " <nl> ac_cs_version = " \ \ <nl> - triAGENS AvocadoDB config . status 0 . 3 . 12 <nl> + triAGENS AvocadoDB config . status 0 . 3 . 13 <nl> configured by $ 0 , generated by GNU Autoconf 2 . 68 , <nl> with options \ \ " \ $ ac_cs_config \ \ " <nl> <nl> mmm a / configure . ac <nl> ppp b / configure . ac <nl> dnl = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = <nl> dnl PREAMBLE triAGENS GmbH Build Environment <nl> dnl = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = <nl> <nl> - AC_INIT ( [ triAGENS AvocadoDB ] , [ 0 . 3 . 12 ] , [ info @ triagens . de ] , [ avocado ] , [ http : / / www . avocadodb . org ] ) <nl> + AC_INIT ( [ triAGENS AvocadoDB ] , [ 0 . 3 . 13 ] , [ info @ triagens . de ] , [ avocado ] , [ http : / / www . avocadodb . org ] ) <nl> <nl> dnl mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> dnl auxillary directory for install - sh and missing <nl> mmm a / js / common / modules / graph . js <nl> ppp b / js / common / modules / graph . js <nl> var internal = require ( " internal " ) , <nl> AvocadoCollection = internal . AvocadoCollection , <nl> AvocadoEdgesCollection = internal . AvocadoEdgesCollection , <nl> shallowCopy , <nl> - propertyKeys ; <nl> + propertyKeys , <nl> + findOrCreateCollectionByName , <nl> + findOrCreateEdgeCollectionByName ; <nl> <nl> / / mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - <nl> / / - - SECTION - - private methods <nl> propertyKeys = function ( props ) { <nl> return keys ; <nl> } ; <nl> <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + / / / @ brief find or create a collection by name <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + <nl> + findOrCreateCollectionByName = function ( name ) { <nl> + var col = internal . db . _collection ( name ) ; <nl> + <nl> + if ( col = = = null ) { <nl> + col = internal . db . _create ( name ) ; <nl> + } else if ( ! ( col instanceof AvocadoCollection ) ) { <nl> + throw " < " + name + " > must be a document collection " ; <nl> + } <nl> + <nl> + if ( col = = = null ) { <nl> + throw " collection ' " + name + " ' has vanished " ; <nl> + } <nl> + <nl> + return col ; <nl> + } ; <nl> + <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + / / / @ brief find or create an edge collection by name <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + <nl> + findOrCreateEdgeCollectionByName = function ( name ) { <nl> + var col = internal . edges . _collection ( name ) ; <nl> + <nl> + if ( col = = = null ) { <nl> + col = internal . edges . _create ( name ) ; <nl> + } else if ( ! ( col instanceof AvocadoEdgesCollection ) ) { <nl> + throw " < " + name + " > must be a document collection " ; <nl> + } <nl> + <nl> + if ( col = = = null ) { <nl> + throw " collection ' " + name + " ' has vanished " ; <nl> + } <nl> + <nl> + return col ; <nl> + } ; <nl> + <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> / / / @ } <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> function Edge ( graph , id ) { <nl> if ( props ) { <nl> / / extract the custom identifier , label , edges <nl> this . _properties = props ; <nl> - } <nl> - else { <nl> + } else { <nl> / / deleted <nl> throw " accessing a deleted edge " ; <nl> } <nl> Edge . prototype . _PRINT = function ( seen , path , names ) { <nl> <nl> if ( ! this . _id ) { <nl> internal . output ( " [ deleted Edge ] " ) ; <nl> - } <nl> - else if ( this . _properties . $ id ! = = undefined ) { <nl> + } else if ( this . _properties . $ id ! = = undefined ) { <nl> if ( typeof this . _properties . $ id = = = " string " ) { <nl> internal . output ( " Edge ( \ " " , this . _properties . $ id , " \ " ) " ) ; <nl> - } <nl> - else { <nl> + } else { <nl> internal . output ( " Edge ( " , this . _properties . $ id , " ) " ) ; <nl> } <nl> - } <nl> - else { <nl> + } else { <nl> internal . output ( " Edge ( < " , this . _id , " > ) " ) ; <nl> } <nl> } ; <nl> function Vertex ( graph , id ) { <nl> if ( props ) { <nl> / / extract the custom identifier <nl> this . _properties = props ; <nl> - } <nl> - else { <nl> + } else { <nl> / / deleted <nl> throw " accessing a deleted edge " ; <nl> } <nl> function Vertex ( graph , id ) { <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> / / / @ brief adds an inbound edge <nl> / / / <nl> - / / / @ FUN { @ FA { vertex } . addInEdge ( @ FA { id } , @ FA { peer } ) } <nl> + / / / @ FUN { @ FA { vertex } . addInEdge ( @ FA { peer } , @ FA { id } ) } <nl> / / / <nl> / / / Creates a new edge from @ FA { peer } to @ FA { vertex } and returns the edge <nl> / / / object . The identifier @ FA { id } must be a unique identifier or null . <nl> / / / <nl> - / / / @ FUN { @ FA { vertex } . addInEdge ( @ FA { id } , @ FA { peer } , @ FA { label } ) } <nl> + / / / @ FUN { @ FA { vertex } . addInEdge ( @ FA { peer } , @ FA { id } , @ FA { label } ) } <nl> / / / <nl> / / / Creates a new edge from @ FA { peer } to @ FA { vertex } with given label and <nl> / / / returns the edge object . <nl> / / / <nl> - / / / @ FUN { @ FA { vertex } . addInEdge ( @ FA { id } , @ FA { peer } , @ FA { label } , @ FA { data } ) } <nl> + / / / @ FUN { @ FA { vertex } . addInEdge ( @ FA { peer } , @ FA { id } , @ FA { label } , @ FA { data } ) } <nl> / / / <nl> / / / Creates a new edge from @ FA { peer } to @ FA { vertex } with given label and <nl> / / / properties defined in @ FA { data } . Returns the edge object . <nl> function Vertex ( graph , id ) { <nl> / / / @ verbinclude graph24 <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> <nl> - Vertex . prototype . addInEdge = function ( id , out , label , data ) { <nl> - return this . _graph . addEdge ( id , out , this , label , data ) ; <nl> + Vertex . prototype . addInEdge = function ( out , id , label , data ) { <nl> + return this . _graph . addEdge ( out , this , id , label , data ) ; <nl> } ; <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> Vertex . prototype . addInEdge = function ( id , out , label , data ) { <nl> / / / @ verbinclude graph28 <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> <nl> - Vertex . prototype . addOutEdge = function ( id , ine , label , data ) { <nl> - return this . _graph . addEdge ( id , this , ine , label , data ) ; <nl> + Vertex . prototype . addOutEdge = function ( ine , id , label , data ) { <nl> + return this . _graph . addEdge ( this , ine , id , label , data ) ; <nl> } ; <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> Vertex . prototype . getInEdges = function ( ) { <nl> <nl> if ( arguments . length = = = 0 ) { <nl> result = this . inbound ( ) ; <nl> - } <nl> - else { <nl> + } else { <nl> labels = { } ; <nl> <nl> for ( i = 0 ; i < arguments . length ; + + i ) { <nl> Vertex . prototype . getOutEdges = function ( ) { <nl> <nl> if ( arguments . length = = = 0 ) { <nl> result = this . outbound ( ) ; <nl> - } <nl> - else { <nl> + } else { <nl> labels = { } ; <nl> for ( i = 0 ; i < arguments . length ; + + i ) { <nl> labels [ arguments [ i ] ] = true ; <nl> Vertex . prototype . _PRINT = function ( seen , path , names ) { <nl> <nl> if ( ! this . _id ) { <nl> internal . output ( " [ deleted Vertex ] " ) ; <nl> - } <nl> - else if ( this . _properties . $ id ! = = undefined ) { <nl> + } else if ( this . _properties . $ id ! = = undefined ) { <nl> if ( typeof this . _properties . $ id = = = " string " ) { <nl> internal . output ( " Vertex ( \ " " , this . _properties . $ id , " \ " ) " ) ; <nl> - } <nl> - else { <nl> + } else { <nl> internal . output ( " Vertex ( " , this . _properties . $ id , " ) " ) ; <nl> } <nl> - } <nl> - else { <nl> + } else { <nl> internal . output ( " Vertex ( < " , this . _id , " > ) " ) ; <nl> } <nl> } ; <nl> Vertex . prototype . _PRINT = function ( seen , path , names ) { <nl> / / / @ verbinclude graph1 <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> <nl> - function Graph ( name , vertices , edges ) { <nl> - var gdb ; <nl> - var col ; <nl> - var props ; <nl> + function Graph ( name , vertices , edges ) { <nl> + var gdb , <nl> + graphProperties , <nl> + graphPropertiesId , <nl> + optionsForGraphCreation ; <nl> <nl> gdb = internal . db . _collection ( " _graph " ) ; <nl> <nl> if ( gdb = = = null ) { <nl> - gdb = internal . db . _create ( " _graph " , { waitForSync : true , isSystem : true } ) ; <nl> + optionsForGraphCreation = { waitForSync : true , isSystem : true } ; <nl> + gdb = internal . db . _create ( " _graph " , optionsForGraphCreation ) ; <nl> <nl> / / gdb . ensureUniqueConstraint ( " name " ) ; <nl> } <nl> <nl> - / / @ FUN { Graph ( @ FA { name } ) } <nl> - if ( vertices = = = undefined & & edges = = undefined ) { <nl> - props = gdb . firstExample ( ' name ' , name ) ; <nl> - <nl> - if ( props = = = null ) { <nl> - throw " no graph named ' " + name + " ' found " ; <nl> - } <nl> - <nl> - vertices = internal . db . _collection ( props . vertices ) ; <nl> - <nl> - if ( vertices = = null ) { <nl> - throw " vertex collection ' " + props . vertices + " ' has vanished " ; <nl> - } <nl> - <nl> - edges = internal . edges . _collection ( props . edges ) ; <nl> - <nl> - if ( edges = = null ) { <nl> - throw " edge collection ' " + props . edges + " ' has vanished " ; <nl> - } <nl> + if ( typeof name ! = = " string " | | name = = = " " ) { <nl> + throw " < name > must be a string " ; <nl> } <nl> <nl> - / / @ FUN { Graph ( @ FA { name } , @ FA { vertices } , @ FA { edges } ) } <nl> - else { <nl> - <nl> - / / get the vertices collection <nl> - if ( typeof vertices = = = " string " ) { <nl> - col = internal . db . _collection ( vertices ) ; <nl> + if ( vertices = = = undefined & & edges = = = undefined ) { <nl> + / / Find an existing graph <nl> <nl> - if ( col = = = null ) { <nl> - col = internal . db . _create ( vertices ) ; <nl> - } <nl> + graphProperties = gdb . firstExample ( ' name ' , name ) ; <nl> <nl> - if ( col = = null ) { <nl> - throw " vertex collection ' " + vertices + " ' has vanished " ; <nl> - } <nl> - <nl> - / / col . ensureUniqueConstraint ( " $ id " ) ; <nl> - <nl> - vertices = col ; <nl> + if ( graphProperties = = = null ) { <nl> + throw " no graph named ' " + name + " ' found " ; <nl> } <nl> <nl> - / / get the edges collection <nl> - if ( typeof edges = = = " string " ) { <nl> - col = internal . edges . _collection ( edges ) ; <nl> + vertices = internal . db . _collection ( graphProperties . vertices ) ; <nl> <nl> - if ( col = = = null ) { <nl> - col = internal . edges . _create ( edges ) ; <nl> - } <nl> - <nl> - if ( col = = null ) { <nl> - throw " edge collection ' " + edges + " ' has vanished " ; <nl> - } <nl> + if ( vertices = = = null ) { <nl> + throw " vertex collection ' " + graphProperties . vertices + " ' has vanished " ; <nl> + } <nl> <nl> - / / col . ensureUniqueConstraint ( " $ id " ) ; <nl> + edges = internal . edges . _collection ( graphProperties . edges ) ; <nl> <nl> - edges = col ; <nl> + if ( edges = = = null ) { <nl> + throw " edge collection ' " + graphProperties . edges + " ' has vanished " ; <nl> } <nl> + } else if ( typeof vertices ! = = " string " | | vertices = = = " " ) { <nl> + throw " < vertices > must be a string or null " ; <nl> + } else if ( typeof edges ! = = " string " | | edges = = = " " ) { <nl> + throw " < edges > must be a string or null " ; <nl> + } else { <nl> + / / Create a new graph or get an existing graph <nl> + vertices = findOrCreateCollectionByName ( vertices ) ; <nl> + edges = findOrCreateEdgeCollectionByName ( edges ) ; <nl> <nl> - / / find graph by name <nl> - if ( typeof name ! = = " string " | | name = = = " " ) { <nl> - throw " < name > must be a string " ; <nl> - } <nl> + / / Currently buggy : <nl> + / / edges . ensureUniqueConstraint ( " $ id " ) ; <nl> + / / vertices . ensureUniqueConstraint ( " $ id " ) ; <nl> <nl> - props = gdb . firstExample ( ' name ' , name ) ; <nl> + graphProperties = gdb . firstExample ( ' name ' , name ) ; <nl> <nl> - / / name is unknown <nl> - if ( props = = = null ) { <nl> + if ( graphProperties = = = null ) { <nl> + / / Graph doesn ' t exist yet <nl> <nl> / / check if know that graph <nl> - props = gdb . firstExample ( ' vertices ' , vertices . _id , ' edges ' , edges . _id ) ; <nl> - <nl> - if ( props = = = null ) { <nl> - d = gdb . save ( { ' vertices ' : vertices . _id , <nl> + graphProperties = gdb . firstExample ( ' vertices ' , <nl> + vertices . _id , <nl> + ' edges ' , <nl> + edges . _id <nl> + ) ; <nl> + <nl> + if ( graphProperties = = = null ) { <nl> + graphPropertiesId = gdb . save ( { ' vertices ' : vertices . _id , <nl> ' verticesName ' : vertices . name ( ) , <nl> ' edges ' : edges . _id , <nl> ' edgesName ' : edges . name ( ) , <nl> ' name ' : name } ) ; <nl> <nl> - props = gdb . document ( d ) ; <nl> - } <nl> - else { <nl> + graphProperties = gdb . document ( graphPropertiesId ) ; <nl> + } else { <nl> throw " found graph but has different < name > " ; <nl> } <nl> - } <nl> - else { <nl> - if ( props . vertices ! = = vertices . _id ) { <nl> + } else { <nl> + if ( graphProperties . vertices ! = = vertices . _id ) { <nl> throw " found graph but has different < vertices > " ; <nl> } <nl> <nl> - if ( props . edges ! = = edges . _id ) { <nl> + if ( graphProperties . edges ! = = edges . _id ) { <nl> throw " found graph but has different < edges > " ; <nl> } <nl> } <nl> } <nl> <nl> - if ( ! ( vertices instanceof AvocadoCollection ) ) { <nl> - throw " < vertices > must be a document collection " ; <nl> - } <nl> - <nl> - if ( ! ( edges instanceof AvocadoEdgesCollection ) ) { <nl> - throw " < edges > must be an edges collection " ; <nl> - } <nl> - <nl> - this . _properties = props ; <nl> + this . _properties = graphProperties ; <nl> <nl> / / and store the collections <nl> this . _vertices = vertices ; <nl> Graph . prototype . drop = function ( ) { <nl> <nl> this . _vertices . drop ( ) ; <nl> this . _edges . drop ( ) ; <nl> - } <nl> + } ; <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> / / / @ brief adds an edge to the graph <nl> / / / <nl> - / / / @ FUN { @ FA { graph } . addEdge ( @ FA { id } , @ FA { out } , @ FA { in } ) } <nl> + / / / @ FUN { @ FA { graph } . addEdge ( @ FA { out } , @ FA { in } , @ FA { id } ) } <nl> / / / <nl> / / / Creates a new edge from @ FA { out } to @ FA { in } and returns the edge object . The <nl> / / / identifier @ FA { id } must be a unique identifier or null . <nl> / / / <nl> - / / / @ FUN { @ FA { graph } . addEdge ( @ FA { id } , @ FA { out } , @ FA { in } , @ FA { label } ) } <nl> + / / / @ FUN { @ FA { graph } . addEdge ( @ FA { out } , @ FA { in } , @ FA { id } , @ FA { label } ) } <nl> / / / <nl> / / / Creates a new edge from @ FA { out } to @ FA { in } with @ FA { label } and returns the <nl> / / / edge object . <nl> / / / <nl> - / / / @ FUN { @ FA { graph } . addEdge ( @ FA { id } , @ FA { out } , @ FA { in } , @ FA { data } ) } <nl> + / / / @ FUN { @ FA { graph } . addEdge ( @ FA { out } , @ FA { in } , @ FA { id } , @ FA { data } ) } <nl> / / / <nl> / / / Creates a new edge and returns the edge object . The edge contains the <nl> / / / properties defined in @ FA { data } . <nl> / / / <nl> - / / / @ FUN { @ FA { graph } . addEdge ( @ FA { id } , @ FA { out } , @ FA { in } , @ FA { label } , @ FA { data } ) } <nl> + / / / @ FUN { @ FA { graph } . addEdge ( @ FA { out } , @ FA { in } , @ FA { id } , @ FA { label } , @ FA { data } ) } <nl> / / / <nl> / / / Creates a new edge and returns the edge object . The edge has the <nl> / / / label @ FA { label } and contains the properties defined in @ FA { data } . <nl> Graph . prototype . drop = function ( ) { <nl> / / / @ verbinclude graph10 <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> <nl> - Graph . prototype . addEdge = function ( id , out , ine , label , data ) { <nl> + Graph . prototype . addEdge = function ( out_vertex , in_vertex , id , label , data ) { <nl> var ref , <nl> shallow ; <nl> <nl> Graph . prototype . addEdge = function ( id , out , ine , label , data ) { <nl> shallow . $ id = id | | null ; <nl> shallow . $ label = label | | null ; <nl> <nl> - ref = this . _edges . save ( out . _id , ine . _id , shallow ) ; <nl> + ref = this . _edges . save ( out_vertex . _id , in_vertex . _id , shallow ) ; <nl> <nl> return this . constructEdge ( ref . _id ) ; <nl> } ; <nl> Graph . prototype . getVertex = function ( id ) { <nl> <nl> if ( ref ! = = null ) { <nl> vertex = this . constructVertex ( ref . _id ) ; <nl> - } <nl> - else { <nl> + } else { <nl> vertex = null ; <nl> } <nl> <nl> Graph . prototype . constructVertex = function ( id ) { <nl> Graph . prototype . constructEdge = function ( id ) { <nl> var edge = this . _weakEdges [ id ] ; <nl> <nl> - if ( edge = = = null ) { <nl> + if ( edge = = = undefined ) { <nl> this . _weakEdges [ id ] = edge = new Edge ( this , id ) ; <nl> } <nl> <nl> exports . Vertex = Vertex ; <nl> <nl> / / Local Variables : <nl> / / mode : outline - minor <nl> - / / outline - regexp : " ^ \ \ ( / / / @ brief \ \ | / / / @ addtogroup \ \ | / / - - SECTION - - \ \ | / / / @ page \ \ | / / / @ } \ \ ) " <nl> + / / outline - regexp : <nl> + / / " ^ \ \ ( / / / @ brief \ \ | / / / @ addtogroup \ \ | / / - - SECTION - - \ \ | / / / @ page \ \ | / / / @ } \ \ ) " <nl> / / End : <nl> mmm a / js / common / tests / shell - graph . js <nl> ppp b / js / common / tests / shell - graph . js <nl> <nl> / * jslint indent : 2 , <nl> nomen : true , <nl> maxlen : 80 * / <nl> - / * global require , db , assertEqual * / <nl> + / * global require , <nl> + db , <nl> + assertEqual , assertTrue , <nl> + print , <nl> + PRINT_OBJECT , <nl> + console , <nl> + AvocadoCollection , AvocadoEdgesCollection * / <nl> ( function ( ) { <nl> " use strict " ; <nl> <nl> <nl> / / - - SECTION - - collection methods <nl> / / mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - <nl> <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + / / / @ brief test suite : Graph Creation <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + <nl> + function graphCreationSuite ( ) { <nl> + <nl> + return { <nl> + <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + / / / @ brief test : Graph Creation <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + <nl> + testCreation : function ( ) { <nl> + var Graph = require ( " graph " ) . Graph , <nl> + graph_name = " UnitTestsCollectionGraph " , <nl> + vertex = " UnitTestsCollectionVertex " , <nl> + edge = " UnitTestsCollectionEdge " , <nl> + graph = null ; <nl> + <nl> + graph = new Graph ( graph_name , vertex , edge ) ; <nl> + <nl> + assertEqual ( graph_name , graph . _properties . name ) ; <nl> + assertTrue ( graph . _vertices instanceof AvocadoCollection ) ; <nl> + assertTrue ( graph . _edges instanceof AvocadoEdgesCollection ) ; <nl> + <nl> + graph . drop ( ) ; <nl> + } , <nl> + <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + / / / @ brief test : Find Graph <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + <nl> + testFindGraph : function ( ) { <nl> + var Graph = require ( " graph " ) . Graph , <nl> + graph_name = " UnitTestsCollectionGraph " , <nl> + vertex = " UnitTestsCollectionVertex " , <nl> + edge = " UnitTestsCollectionEdge " , <nl> + graph1 = null , <nl> + graph2 = null ; <nl> + <nl> + graph1 = new Graph ( graph_name , vertex , edge ) ; <nl> + graph2 = new Graph ( graph_name ) ; <nl> + <nl> + assertEqual ( graph1 . _properties . name , graph2 . _properties . name ) ; <nl> + assertEqual ( graph1 . _vertices , graph2 . _vertices ) ; <nl> + assertEqual ( graph1 . _edges , graph2 . _edges ) ; <nl> + <nl> + graph1 . drop ( ) ; <nl> + } <nl> + } ; <nl> + } <nl> + <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> / / / @ brief test suite : Graph Basics <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> <nl> function graphBasicsSuite ( ) { <nl> / / var ERRORS = require ( " internal " ) . errors ; <nl> - var Graph = require ( " graph " ) . Graph ; <nl> - var graph_name = " UnitTestsCollectionGraph " ; <nl> - var vertex = " UnitTestsCollectionVertex " ; <nl> - var edge = " UnitTestsCollectionEdge " ; <nl> - var graph = null ; <nl> + var Graph = require ( " graph " ) . Graph , <nl> + graph_name = " UnitTestsCollectionGraph " , <nl> + vertex = " UnitTestsCollectionVertex " , <nl> + edge = " UnitTestsCollectionEdge " , <nl> + graph = null ; <nl> <nl> return { <nl> <nl> <nl> print ( " FOUND : " ) ; <nl> PRINT_OBJECT ( graph ) ; <nl> graph . drop ( ) ; <nl> - } <nl> - catch ( err ) { <nl> + } catch ( err1 ) { <nl> } <nl> <nl> graph = new Graph ( graph_name , vertex , edge ) ; <nl> - } <nl> - catch ( err ) { <nl> - console . error ( " [ FAILED ] setup failed : " + err ) ; <nl> + } catch ( err2 ) { <nl> + console . error ( " [ FAILED ] setup failed : " + err2 ) ; <nl> } <nl> } , <nl> <nl> <nl> <nl> tearDown : function ( ) { <nl> try { <nl> - if ( graph ! = null ) { <nl> + if ( graph ! = = null ) { <nl> graph . drop ( ) ; <nl> } <nl> - } <nl> - catch ( err ) { <nl> + } catch ( err ) { <nl> console . error ( " [ FAILED ] tear - down failed : " + err ) ; <nl> } <nl> } , <nl> <nl> <nl> assertEqual ( " name2 " , v . getId ( ) ) ; <nl> assertEqual ( 23 , v . getProperty ( " age " ) ) ; <nl> + } , <nl> + <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + / / / @ brief change a property <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + <nl> + testAddEdgeWithoutInfo : function ( ) { <nl> + var v1 , <nl> + v2 , <nl> + edge ; <nl> + <nl> + v1 = graph . addVertex ( " vertex1 " ) ; <nl> + v2 = graph . addVertex ( " vertex2 " ) ; <nl> + <nl> + edge = graph . addEdge ( v1 , <nl> + v2 ) ; <nl> + <nl> + assertEqual ( null , edge . getId ( ) ) ; <nl> + } , <nl> + <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + / / / @ brief change a property <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + <nl> + testAddEdge : function ( ) { <nl> + var v1 , <nl> + v2 , <nl> + edge ; <nl> + <nl> + v1 = graph . addVertex ( " vertex1 " ) ; <nl> + v2 = graph . addVertex ( " vertex2 " ) ; <nl> + <nl> + edge = graph . addEdge ( v1 , <nl> + v2 , <nl> + " edge1 " , <nl> + " label " , <nl> + { testProperty : " testValue " } ) ; <nl> + <nl> + assertEqual ( " edge1 " , edge . getId ( ) ) ; <nl> + assertEqual ( " label " , edge . getLabel ( ) ) ; <nl> + assertEqual ( " testValue " , edge . getProperty ( " testProperty " ) ) ; <nl> + } , <nl> + <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + / / / @ brief change a property <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + <nl> + testGetEdges : function ( ) { <nl> + var v1 , <nl> + v2 , <nl> + edge1 , <nl> + edge2 ; <nl> + <nl> + v1 = graph . addVertex ( " vertex1 " ) ; <nl> + v2 = graph . addVertex ( " vertex2 " ) ; <nl> + <nl> + edge1 = graph . addEdge ( v1 , <nl> + v2 , <nl> + " edge1 " , <nl> + " label " , <nl> + { testProperty : " testValue " } ) ; <nl> + <nl> + edge2 = graph . getEdges ( ) . next ( ) ; <nl> + assertEqual ( true , graph . getEdges ( ) . hasNext ( ) ) ; <nl> + assertEqual ( edge1 . getId ( ) , edge2 . getId ( ) ) ; <nl> + } , <nl> + <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + / / / @ brief remove an edge <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + <nl> + testRemoveEdges : function ( ) { <nl> + var v1 , <nl> + v2 , <nl> + edge ; <nl> + <nl> + v1 = graph . addVertex ( " vertex1 " ) ; <nl> + v2 = graph . addVertex ( " vertex2 " ) ; <nl> + <nl> + edge = graph . addEdge ( v1 , <nl> + v2 , <nl> + " edge1 " , <nl> + " label " , <nl> + { testProperty : " testValue " } ) ; <nl> + <nl> + graph . removeEdge ( edge ) ; <nl> + assertEqual ( false , graph . getEdges ( ) . hasNext ( ) ) ; <nl> + } , <nl> + <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + / / / @ brief remove a vertex <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + <nl> + testRemoveVertex : function ( ) { <nl> + var v1 , <nl> + v1_id , <nl> + v2 , <nl> + edge ; <nl> + <nl> + v1 = graph . addVertex ( " vertex1 " ) ; <nl> + v1_id = v1 . getId ( ) ; <nl> + v2 = graph . addVertex ( " vertex2 " ) ; <nl> + <nl> + edge = graph . addEdge ( v1 , <nl> + v2 , <nl> + " edge1 " , <nl> + " label " , <nl> + { testProperty : " testValue " } ) ; <nl> + <nl> + graph . removeVertex ( v1 ) ; <nl> + <nl> + assertEqual ( null , graph . getVertex ( v1_id ) ) ; <nl> + assertEqual ( false , graph . getEdges ( ) . hasNext ( ) ) ; <nl> + } <nl> + <nl> + } ; <nl> + } <nl> + <nl> + <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + / / / @ brief test suite : Vertex <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + <nl> + function vertexSuite ( ) { <nl> + var Graph = require ( " graph " ) . Graph , <nl> + graph_name = " UnitTestsCollectionGraph " , <nl> + vertex = " UnitTestsCollectionVertex " , <nl> + edge = " UnitTestsCollectionEdge " , <nl> + graph = null ; <nl> + <nl> + return { <nl> + <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + / / / @ brief set up <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + <nl> + setUp : function ( ) { <nl> + try { <nl> + try { <nl> + graph = new Graph ( graph_name ) ; <nl> + print ( " FOUND : " ) ; <nl> + PRINT_OBJECT ( graph ) ; <nl> + graph . drop ( ) ; <nl> + } catch ( err1 ) { <nl> + } <nl> + <nl> + graph = new Graph ( graph_name , vertex , edge ) ; <nl> + } catch ( err2 ) { <nl> + console . error ( " [ FAILED ] setup failed : " + err2 ) ; <nl> + } <nl> + } , <nl> + <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + / / / @ brief tear down <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + <nl> + tearDown : function ( ) { <nl> + try { <nl> + if ( graph ! = = null ) { <nl> + graph . drop ( ) ; <nl> + } <nl> + } catch ( err ) { <nl> + console . error ( " [ FAILED ] tear - down failed : " + err ) ; <nl> + } <nl> + } , <nl> + <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + / / / @ brief add edges <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + <nl> + testAddEdges : function ( ) { <nl> + var v1 , <nl> + v2 , <nl> + v3 , <nl> + edge1 , <nl> + edge2 ; <nl> + <nl> + v1 = graph . addVertex ( graph ) ; <nl> + v2 = graph . addVertex ( graph ) ; <nl> + v3 = graph . addVertex ( graph ) ; <nl> + <nl> + edge1 = v1 . addInEdge ( v2 ) ; <nl> + edge2 = v1 . addOutEdge ( v3 ) ; <nl> + <nl> + assertEqual ( v1 . getId ( ) , edge1 . getInVertex ( ) . getId ( ) ) ; <nl> + assertEqual ( v2 . getId ( ) , edge1 . getOutVertex ( ) . getId ( ) ) ; <nl> + assertEqual ( v3 . getId ( ) , edge2 . getInVertex ( ) . getId ( ) ) ; <nl> + assertEqual ( v1 . getId ( ) , edge2 . getOutVertex ( ) . getId ( ) ) ; <nl> + } , <nl> + <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + / / / @ brief get edges <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + <nl> + testGetEdges : function ( ) { <nl> + var v1 , <nl> + v2 , <nl> + edge ; <nl> + <nl> + v1 = graph . addVertex ( graph ) ; <nl> + v2 = graph . addVertex ( graph ) ; <nl> + <nl> + edge = graph . addEdge ( v1 , v2 ) ; <nl> + <nl> + assertEqual ( edge . getId ( ) , v1 . getOutEdges ( ) [ 0 ] . getId ( ) ) ; <nl> + assertEqual ( edge . getId ( ) , v2 . getInEdges ( ) [ 0 ] . getId ( ) ) ; <nl> + assertEqual ( [ ] , v1 . getInEdges ( ) ) ; <nl> + assertEqual ( [ ] , v2 . getOutEdges ( ) ) ; <nl> + assertEqual ( edge . getId ( ) , v1 . edges ( ) [ 0 ] . getId ( ) ) ; <nl> + assertEqual ( edge . getId ( ) , v2 . edges ( ) [ 0 ] . getId ( ) ) ; <nl> + } , <nl> + <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + / / / @ brief get edges with labels <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + <nl> + testGetEdgesWithLabels : function ( ) { <nl> + var v1 , <nl> + v2 , <nl> + edge1 , <nl> + edge2 ; <nl> + <nl> + v1 = graph . addVertex ( graph ) ; <nl> + v2 = graph . addVertex ( graph ) ; <nl> + <nl> + edge1 = graph . addEdge ( v1 , v2 , null , " label_1 " ) ; <nl> + edge2 = graph . addEdge ( v1 , v2 , null , " label_2 " ) ; <nl> + <nl> + assertEqual ( edge2 . getId ( ) , v1 . getOutEdges ( " label_2 " ) [ 0 ] . getId ( ) ) ; <nl> + assertEqual ( 1 , v2 . getInEdges ( " label_2 " ) . length ) ; <nl> + } , <nl> + <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + / / / @ brief properties <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + <nl> + testProperties : function ( ) { <nl> + var v1 ; <nl> + <nl> + v1 = graph . addVertex ( graph ) ; <nl> + <nl> + v1 . setProperty ( " myProperty " , " myValue " ) ; <nl> + assertEqual ( " myValue " , v1 . getProperty ( " myProperty " ) ) ; <nl> + assertEqual ( " myProperty " , v1 . getPropertyKeys ( ) [ 0 ] ) ; <nl> + assertEqual ( 1 , v1 . getPropertyKeys ( ) . length ) ; <nl> + assertEqual ( { myProperty : " myValue " } , v1 . properties ( ) ) ; <nl> } <nl> + <nl> + <nl> } ; <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + / / / @ brief test suite : Edges <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + <nl> + function edgeSuite ( ) { <nl> + var Graph = require ( " graph " ) . Graph , <nl> + graph_name = " UnitTestsCollectionGraph " , <nl> + vertex = " UnitTestsCollectionVertex " , <nl> + edge = " UnitTestsCollectionEdge " , <nl> + graph = null ; <nl> + <nl> + return { <nl> + <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + / / / @ brief set up <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + <nl> + setUp : function ( ) { <nl> + try { <nl> + try { <nl> + graph = new Graph ( graph_name ) ; <nl> + print ( " FOUND : " ) ; <nl> + PRINT_OBJECT ( graph ) ; <nl> + graph . drop ( ) ; <nl> + } catch ( err1 ) { <nl> + } <nl> + <nl> + graph = new Graph ( graph_name , vertex , edge ) ; <nl> + } catch ( err2 ) { <nl> + console . error ( " [ FAILED ] setup failed : " + err2 ) ; <nl> + } <nl> + } , <nl> + <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + / / / @ brief tear down <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + <nl> + tearDown : function ( ) { <nl> + try { <nl> + if ( graph ! = = null ) { <nl> + graph . drop ( ) ; <nl> + } <nl> + } catch ( err ) { <nl> + console . error ( " [ FAILED ] tear - down failed : " + err ) ; <nl> + } <nl> + } , <nl> + <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + / / / @ brief get Vertices <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + <nl> + testGetVertices : function ( ) { <nl> + var v1 , <nl> + v2 , <nl> + edge ; <nl> + <nl> + v1 = graph . addVertex ( ) ; <nl> + v2 = graph . addVertex ( ) ; <nl> + edge = graph . addEdge ( v1 , v2 ) ; <nl> + <nl> + assertEqual ( v1 . getId ( ) , edge . getInVertex ( ) . getId ( ) ) ; <nl> + assertEqual ( v2 . getId ( ) , edge . getOutVertex ( ) . getId ( ) ) ; <nl> + } , <nl> + <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + / / / @ brief get Vertices <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + <nl> + testGetLabel : function ( ) { <nl> + var v1 , <nl> + v2 , <nl> + edge ; <nl> + <nl> + v1 = graph . addVertex ( ) ; <nl> + v2 = graph . addVertex ( ) ; <nl> + edge = graph . addEdge ( v1 , v2 , null , " my_label " ) ; <nl> + <nl> + assertEqual ( " my_label " , edge . getLabel ( ) ) ; <nl> + } , <nl> + <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + / / / @ brief Properties <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + <nl> + testProperties : function ( ) { <nl> + var v1 , <nl> + v2 , <nl> + edge , <nl> + properties ; <nl> + <nl> + v1 = graph . addVertex ( ) ; <nl> + v2 = graph . addVertex ( ) ; <nl> + properties = { myProperty : " myValue " } ; <nl> + edge = graph . addEdge ( v1 , v2 , null , " my_label " , properties ) ; <nl> + <nl> + assertEqual ( properties , edge . properties ( ) ) ; <nl> + assertEqual ( " myValue " , edge . getProperty ( " myProperty " ) ) ; <nl> + edge . setProperty ( " foo " , " bar " ) ; <nl> + assertEqual ( " bar " , edge . getProperty ( " foo " ) ) ; <nl> + assertEqual ( [ " foo " , " myProperty " ] , edge . getPropertyKeys ( ) ) ; <nl> + } <nl> + <nl> + <nl> + } ; <nl> + } <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> / / / @ brief executes the test suites <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> <nl> + jsunity . run ( graphCreationSuite ) ; <nl> jsunity . run ( graphBasicsSuite ) ; <nl> + jsunity . run ( vertexSuite ) ; <nl> + jsunity . run ( edgeSuite ) ; <nl> jsunity . done ( ) ; <nl> <nl> } ( ) ) ; <nl>
merged
arangodb/arangodb
797950b8b79439fa7660f81337c67263e8032201
2012-04-25T07:13:13Z
mmm a / ios / sdk / WeexSDK / Sources / Component / WXScrollerComponent . m <nl> ppp b / ios / sdk / WeexSDK / Sources / Component / WXScrollerComponent . m <nl> @ implementation WXScrollerComponent <nl> CGSize _contentSize ; <nl> BOOL _listenLoadMore ; <nl> BOOL _scrollEvent ; <nl> + BOOL _scrollStartEvent ; <nl> + BOOL _scrollEndEvent ; <nl> CGFloat _loadMoreOffset ; <nl> CGFloat _previousLoadMoreContentHeight ; <nl> CGFloat _offsetAccuracy ; <nl> - ( instancetype ) initWithRef : ( NSString * ) ref type : ( NSString * ) type styles : ( NSDicti <nl> _stickyArray = [ NSMutableArray array ] ; <nl> _listenerArray = [ NSMutableArray array ] ; <nl> _scrollEvent = NO ; <nl> + _scrollStartEvent = NO ; <nl> + _scrollEndEvent = NO ; <nl> _lastScrollEventFiredOffset = CGPointMake ( 0 , 0 ) ; <nl> _scrollDirection = attributes [ @ " scrollDirection " ] ? [ WXConvert WXScrollDirection : attributes [ @ " scrollDirection " ] ] : WXScrollDirectionVertical ; <nl> _showScrollBar = attributes [ @ " showScrollbar " ] ? [ WXConvert BOOL : attributes [ @ " showScrollbar " ] ] : YES ; <nl> - ( void ) addEvent : ( NSString * ) eventName <nl> if ( [ eventName isEqualToString : @ " scroll " ] ) { <nl> _scrollEvent = YES ; <nl> } <nl> + if ( [ eventName isEqualToString : @ " scrollStart " ] ) { <nl> + _scrollStartEvent = YES ; <nl> + } <nl> + if ( [ eventName isEqualToString : @ " scrollEnd " ] ) { <nl> + _scrollEndEvent = YES ; <nl> + } <nl> } <nl> <nl> - ( void ) removeEvent : ( NSString * ) eventName <nl> - ( void ) removeEvent : ( NSString * ) eventName <nl> if ( [ eventName isEqualToString : @ " scroll " ] ) { <nl> _scrollEvent = NO ; <nl> } <nl> + if ( [ eventName isEqualToString : @ " scrollStart " ] ) { <nl> + _scrollStartEvent = NO ; <nl> + } <nl> + if ( [ eventName isEqualToString : @ " scrollEnd " ] ) { <nl> + _scrollEndEvent = NO ; <nl> + } <nl> } <nl> <nl> # pragma mark WXScrollerProtocol <nl> - ( WXScrollDirection ) scrollDirection <nl> # pragma mark UIScrollViewDelegate <nl> - ( void ) scrollViewWillBeginDragging : ( UIScrollView * ) scrollView <nl> { <nl> - [ self fireEvent : @ " scrollstart " params : nil domChanges : nil ] ; <nl> + if ( _scrollStartEvent ) { <nl> + CGFloat scaleFactor = self . weexInstance . pixelScaleFactor ; <nl> + NSDictionary * contentSizeData = @ { @ " width " : [ NSNumber numberWithFloat : scrollView . contentSize . width / scaleFactor ] , @ " height " : [ NSNumber numberWithFloat : scrollView . contentSize . height / scaleFactor ] } ; <nl> + NSDictionary * contentOffsetData = @ { @ " x " : [ NSNumber numberWithFloat : - scrollView . contentOffset . x / scaleFactor ] , @ " y " : [ NSNumber numberWithFloat : - scrollView . contentOffset . y / scaleFactor ] } ; <nl> + [ self fireEvent : @ " scrollstart " params : @ { @ " contentSize " : contentSizeData , @ " contentOffset " : contentOffsetData } domChanges : nil ] ; <nl> + } <nl> } <nl> <nl> - ( void ) scrollViewDidScroll : ( UIScrollView * ) scrollView <nl> - ( void ) scrollViewDidEndScrollingAnimation : ( UIScrollView * ) scrollView <nl> } <nl> <nl> [ scrollView setContentInset : inset ] ; <nl> - [ self fireEvent : @ " scrollend " params : nil domChanges : nil ] ; <nl> - <nl> + if ( _scrollEndEvent ) { <nl> + CGFloat scaleFactor = self . weexInstance . pixelScaleFactor ; <nl> + NSDictionary * contentSizeData = @ { @ " width " : [ NSNumber numberWithFloat : scrollView . contentSize . width / scaleFactor ] , @ " height " : [ NSNumber numberWithFloat : scrollView . contentSize . height / scaleFactor ] } ; <nl> + NSDictionary * contentOffsetData = @ { @ " x " : [ NSNumber numberWithFloat : - scrollView . contentOffset . x / scaleFactor ] , @ " y " : [ NSNumber numberWithFloat : - scrollView . contentOffset . y / scaleFactor ] } ; <nl> + [ self fireEvent : @ " scrollend " params : @ { @ " contentSize " : contentSizeData , @ " contentOffset " : contentOffsetData } domChanges : nil ] ; <nl> + } <nl> } <nl> <nl> - ( void ) scrollViewDidEndDragging : ( UIScrollView * ) scrollView willDecelerate : ( BOOL ) decelerate <nl>
* [ ios ] fix some parameters with fireEvent of scrollstart and scrollend
apache/incubator-weex
180b9748d39b698d44e9a0bd64897f3ef42c18b9
2017-11-13T09:09:16Z
mmm a / lib / ClangImporter / ImportName . h <nl> ppp b / lib / ClangImporter / ImportName . h <nl> <nl> / / ClangImporter . <nl> / / <nl> / / = = = mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - = = = / / <nl> - <nl> - / / . . . include <nl> - <nl> # ifndef SWIFT_IMPORT_NAME_H <nl> # define SWIFT_IMPORT_NAME_H <nl> <nl>
Delete stray comment
apple/swift
6854f739b859bc1602b38d34580c97575f8d1e9a
2016-09-13T04:01:47Z
mmm a / emcc . py <nl> ppp b / emcc . py <nl> <nl> ' emscripten_idb_load_blob ' , ' emscripten_idb_store_blob ' , ' SDL_Delay ' , <nl> ' emscripten_scan_registers ' , ' emscripten_lazy_load_code ' , <nl> ' emscripten_fiber_swap ' , <nl> - ' wasi_snapshot_preview1 . fd_sync ' , ' __wasi_fd_sync ' ] <nl> + ' wasi_snapshot_preview1 . fd_sync ' , ' __wasi_fd_sync ' , ' _emval_await ' ] <nl> <nl> # Mapping of emcc opt levels to llvm opt levels . We use llvm opt level 3 in emcc <nl> # opt levels 2 and 3 ( emcc 3 is unsafe opts , so unsuitable for the only level to <nl> mmm a / site / source / docs / api_reference / val . h . rst <nl> ppp b / site / source / docs / api_reference / val . h . rst <nl> Guide material for this class can be found in : ref : ` embind - val - guide ` . <nl> : returns : * * HamishW * * - Replace with description . <nl> <nl> <nl> + . . cpp : function : : val await ( ) const <nl> + <nl> + Pauses the C + + to ` ` await ` ` the ` ` Promise ` ` / thenable . <nl> + <nl> + : returns : The fulfilled value . <nl> + <nl> + This method requires : ref : ` Asyncify ` to be enabled . <nl> + <nl> + <nl> . . cpp : type : EMSCRIPTEN_SYMBOL ( name ) <nl> <nl> * * HamishW * * - Replace with description . <nl> mmm a / site / source / docs / porting / asyncify . rst <nl> ppp b / site / source / docs / porting / asyncify . rst <nl> function ; another is to use ` ` EM_JS ` ` , which we ' ll use in this next example : <nl> # include < stdio . h > <nl> <nl> EM_JS ( void , do_fetch , ( ) , { <nl> - Asyncify . handleSleep ( function ( wakeUp ) { <nl> + Asyncify . handleAsync ( async ( ) = > { <nl> + out ( " waiting for a fetch " ) ; <nl> + const response = await fetch ( " a . html " ) ; <nl> + out ( " got the fetch response " ) ; <nl> + / / ( normally you would do something with the fetch here ) <nl> + } ) ; <nl> + } ) ; <nl> + <nl> + int main ( ) { <nl> + puts ( " before " ) ; <nl> + do_fetch ( ) ; <nl> + puts ( " after " ) ; <nl> + } <nl> + <nl> + If you can ' t use the modern ` ` async ` ` - ` ` await ` ` syntax , there is a variant with an explicit ` ` wakeUp ` ` callback too : <nl> + <nl> + . . code - block : : cpp <nl> + <nl> + / / example . c <nl> + # include < emscripten . h > <nl> + # include < stdio . h > <nl> + <nl> + EM_JS ( void , do_fetch , ( ) , { <nl> + Asyncify . handleSleep ( wakeUp = > { <nl> out ( " waiting for a fetch " ) ; <nl> fetch ( " a . html " ) . then ( response = > { <nl> out ( " got the fetch response " ) ; <nl> function ; another is to use ` ` EM_JS ` ` , which we ' ll use in this next example : <nl> puts ( " after " ) ; <nl> } <nl> <nl> - <nl> The async operation happens in the ` ` EM_JS ` ` function ` ` do_fetch ( ) ` ` , which <nl> - calls ` ` Asyncify . handleSleep ` ` . It gives that function the code to be run , and <nl> - gets a ` ` wakeUp ` ` function that it calls in the asynchronous future at the right <nl> - time . After we call ` ` wakeUp ( ) ` ` the compiled C code resumes normally . <nl> + calls ` ` Asyncify . handleAsync ` ` or ` ` Asyncify . handleSleep ` ` . It gives that <nl> + function the code to be run , and gets a ` ` wakeUp ` ` function that it calls in the <nl> + asynchronous future at the right time . After we call ` ` wakeUp ( ) ` ` the compiled C <nl> + code resumes normally . <nl> <nl> In this example the async operation is a ` ` fetch ` ` , which means we need to wait <nl> for a Promise . While that is async , note how the C code in ` ` main ( ) ` ` is <nl> You can also return values from async JS functions . Here is an example : <nl> # include < stdio . h > <nl> <nl> EM_JS ( int , get_digest_size , ( const char * str ) , { <nl> - / / Note how we return the output of handleSleep ( ) here . <nl> - return Asyncify . handleSleep ( function ( wakeUp ) { <nl> + / / Note how we return the output of handleAsync ( ) here . <nl> + return Asyncify . handleAsync ( async ( ) = > { <nl> const text = UTF8ToString ( str ) ; <nl> const encoder = new TextEncoder ( ) ; <nl> const data = encoder . encode ( text ) ; <nl> out ( " ask for digest for " + text ) ; <nl> - window . crypto . subtle . digest ( " SHA - 256 " , data ) . then ( digestValue = > { <nl> - out ( " got digest of length " + digestValue . byteLength ) ; <nl> - / / Return the value by sending it to wakeUp ( ) . It will then be returned <nl> - / / from handleSleep ( ) on the outside . <nl> - wakeUp ( digestValue . byteLength ) ; <nl> - } ) ; <nl> + const digestValue = await window . crypto . subtle . digest ( " SHA - 256 " , data ) ; <nl> + out ( " got digest of length " + digestValue . byteLength ) ; <nl> + / / Return the value as you normally would . <nl> + return digestValue . byteLength ; <nl> } ) ; <nl> } ) ; <nl> <nl> You can build this with <nl> This example calls the Promise - returning ` ` window . crypto . subtle ( ) ` ` API ( the <nl> example is based off of <nl> ` this MDN example < https : / / developer . mozilla . org / en - US / docs / Web / API / SubtleCrypto / digest # Basic_example > ` _ <nl> - ) . Note how we pass the value to be returned into ` ` wakeUp ( ) ` ` . We must also <nl> - return the value returned from ` ` handleSleep ( ) ` ` . The calling C code then <nl> + ) . <nl> + <nl> + Note that we must propagate the value returned from ` ` handleSleep ( ) ` ` . The calling C code then <nl> gets it normally , after the Promise completes . <nl> <nl> + If you ' re using the ` ` handleSleep ` ` API , the value needs to be also passed to the ` ` wakeUp ` ` callback , instead of being returned from our handler : <nl> + <nl> + . . code - block : : cpp <nl> + <nl> + / / . . . <nl> + return Asyncify . handleSleep ( wakeUp = > { <nl> + const text = UTF8ToString ( str ) ; <nl> + const encoder = new TextEncoder ( ) ; <nl> + const data = encoder . encode ( text ) ; <nl> + out ( " ask for digest for " + text ) ; <nl> + window . crypto . subtle . digest ( " SHA - 256 " , data ) . then ( digestValue = > { <nl> + out ( " got digest of length " + digestValue . byteLength ) ; <nl> + / / Return the value by sending it to wakeUp ( ) . It will then be returned <nl> + / / from handleSleep ( ) on the outside . <nl> + wakeUp ( digestValue . byteLength ) ; <nl> + } ) ; <nl> + } ) ; <nl> + / / . . . <nl> + <nl> + Usage with Embind <nl> + # # # # # # # # # # # # # # # # # <nl> + <nl> + If you ' re using : ref : ` Embind < embind - val - guide > ` for interaction with JavaScript <nl> + and want to ` ` await ` ` a dynamically retrieved ` ` Promise ` ` , you can call an <nl> + ` ` await ( ) ` ` method directly on the ` ` val ` ` instance : <nl> + <nl> + . . code - block : : cpp <nl> + <nl> + val my_object = / * . . . * / ; <nl> + val result = my_object . call ( " someAsyncMethod " ) . await ( ) ; <nl> + <nl> + In this case you don ' t need to worry about ` ` ASYNCIFY_IMPORTS ` ` , since it ' s an <nl> + internal implementation detail of ` ` val : : await ` ` and Emscripten takes care of it <nl> + automatically . <nl> + <nl> Optimizing <nl> # # # # # # # # # # <nl> <nl> mmm a / src / embind / emval . js <nl> ppp b / src / embind / emval . js <nl> var LibraryEmVal = { <nl> constructor = requireHandle ( constructor ) ; <nl> return object instanceof constructor ; <nl> } , <nl> - <nl> + <nl> _emval_is_number__deps : [ ' $ requireHandle ' ] , <nl> _emval_is_number : function ( handle ) { <nl> handle = requireHandle ( handle ) ; <nl> var LibraryEmVal = { <nl> throw object ; <nl> } , <nl> <nl> + # if ASYNCIFY <nl> + _emval_await__deps : [ ' $ requireHandle ' , ' _emval_register ' , ' $ Asyncify ' ] , <nl> + _emval_await : function ( promise ) { <nl> + return Asyncify . handleAsync ( function ( ) { <nl> + promise = requireHandle ( promise ) ; <nl> + return promise . then ( __emval_register ) ; <nl> + } ) ; <nl> + } , <nl> + # endif <nl> } ; <nl> <nl> mergeInto ( LibraryManager . library , LibraryEmVal ) ; <nl> mmm a / src / library_async . js <nl> ppp b / src / library_async . js <nl> mergeInto ( LibraryManager . library , { <nl> abort ( ' invalid state : ' + Asyncify . state ) ; <nl> } <nl> return Asyncify . handleSleepReturnValue ; <nl> - } <nl> + } , <nl> + <nl> + / / Unlike ` handleSleep ` , accepts a function returning a ` Promise ` <nl> + / / and uses the fulfilled value instead of passing in a separate callback . <nl> + / / <nl> + / / This is particularly useful for native JS ` async ` functions where the <nl> + / / returned value will " just work " and be passed back to C + + . <nl> + handleAsync : function ( startAsync ) { <nl> + return Asyncify . handleSleep ( function ( wakeUp ) { <nl> + / / TODO : add error handling as a second param when handleSleep implements it . <nl> + startAsync ( ) . then ( wakeUp ) ; <nl> + } ) ; <nl> + } , <nl> } , <nl> <nl> emscripten_sleep : function ( ms ) { <nl> mergeInto ( LibraryManager . library , { <nl> if ( WASM_BACKEND & & ASYNCIFY ) { <nl> DEFAULT_LIBRARY_FUNCS_TO_INCLUDE . push ( ' $ Asyncify ' ) ; <nl> } <nl> - <nl> mmm a / system / include / emscripten / val . h <nl> ppp b / system / include / emscripten / val . h <nl> namespace emscripten { <nl> bool _emval_in ( EM_VAL item , EM_VAL object ) ; <nl> bool _emval_delete ( EM_VAL object , EM_VAL property ) ; <nl> bool _emval_throw ( EM_VAL object ) ; <nl> + EM_VAL _emval_await ( EM_VAL promise ) ; <nl> } <nl> <nl> template < const char * address > <nl> namespace emscripten { <nl> internal : : _emval_throw ( handle ) ; <nl> } <nl> <nl> + val await ( ) const { <nl> + return val ( internal : : _emval_await ( handle ) ) ; <nl> + } <nl> + <nl> private : <nl> / / takes ownership , assumes handle already incref ' d <nl> explicit val ( internal : : EM_VAL handle ) <nl> new file mode 100644 <nl> index 00000000000 . . 490dff4f020 <nl> mmm / dev / null <nl> ppp b / tests / embind_with_asyncify . cpp <nl> <nl> + # include < assert . h > <nl> + <nl> + # include < string > <nl> + <nl> + # include < emscripten . h > <nl> + # include < emscripten / val . h > <nl> + <nl> + using namespace emscripten ; <nl> + <nl> + int main ( ) { <nl> + val fetch = val : : global ( " fetch " ) ; <nl> + std : : string url = " data : text / plain , foo " ; <nl> + val async_response = fetch ( url ) ; <nl> + val response = async_response . await ( ) ; <nl> + val async_text = response . call < val > ( " text " ) ; <nl> + std : : string text = async_text . await ( ) . as < std : : string > ( ) ; <nl> + REPORT_RESULT ( text = = " foo " ) ; <nl> + } <nl> mmm a / tests / test_browser . py <nl> ppp b / tests / test_browser . py <nl> def test_emscripten_performance_now ( self ) : <nl> def test_embind_with_pthreads ( self ) : <nl> self . btest ( ' embind_with_pthreads . cpp ' , ' 1 ' , args = [ ' - - bind ' , ' - s ' , ' USE_PTHREADS = 1 ' , ' - s ' , ' PROXY_TO_PTHREAD = 1 ' ] ) <nl> <nl> + @ no_fastcomp ( " no asyncify support " ) <nl> + def test_embind_with_asyncify ( self ) : <nl> + self . btest ( ' embind_with_asyncify . cpp ' , ' 1 ' , args = [ ' - - bind ' ] + self . get_async_args ( ) ) <nl> + <nl> # Test emscripten_console_log ( ) , emscripten_console_warn ( ) and emscripten_console_error ( ) <nl> def test_emscripten_console_log ( self ) : <nl> self . btest ( path_from_root ( ' tests ' , ' emscripten_console_log . c ' ) , ' 0 ' , args = [ ' - - pre - js ' , path_from_root ( ' tests ' , ' emscripten_console_log_pre . js ' ) ] ) <nl>
Add support for Embind + Asyncify integration ( )
emscripten-core/emscripten
8715a1bb4a6b614e3de6b842f8e57e054c2ba265
2020-06-18T19:14:37Z
mmm a / src / app / ui / document_view . cpp <nl> ppp b / src / app / ui / document_view . cpp <nl> class AppEditor : public Editor , <nl> <nl> / / EditorObserver implementation <nl> void dispose ( ) override { <nl> - App : : instance ( ) - > getMainWindow ( ) - > getPreviewEditor ( ) - > updateUsingEditor ( NULL ) ; <nl> + PreviewEditorWindow * preview = <nl> + App : : instance ( ) - > getMainWindow ( ) - > getPreviewEditor ( ) ; <nl> + <nl> + if ( preview - > relatedEditor ( ) = = this ) <nl> + updatePreviewEditor ( nullptr ) ; <nl> } <nl> <nl> void onScrollChanged ( Editor * editor ) override { <nl> - if ( current_editor = = this ) <nl> - App : : instance ( ) - > getMainWindow ( ) - > getPreviewEditor ( ) - > updateUsingEditor ( this ) ; <nl> + updatePreviewEditor ( this ) ; <nl> } <nl> <nl> void onAfterFrameChanged ( Editor * editor ) override { <nl> - App : : instance ( ) - > getMainWindow ( ) - > getPreviewEditor ( ) - > updateUsingEditor ( this ) ; <nl> + updatePreviewEditor ( this ) ; <nl> <nl> set_current_palette ( editor - > sprite ( ) - > palette ( editor - > frame ( ) ) , true ) ; <nl> } <nl> <nl> void onAfterLayerChanged ( Editor * editor ) override { <nl> - App : : instance ( ) - > getMainWindow ( ) - > getPreviewEditor ( ) - > updateUsingEditor ( this ) ; <nl> + updatePreviewEditor ( this ) ; <nl> } <nl> <nl> / / EditorCustomizationDelegate implementation <nl> class AppEditor : public Editor , <nl> } <nl> <nl> private : <nl> + <nl> + void updatePreviewEditor ( Editor * editor ) { <nl> + App : : instance ( ) - > getMainWindow ( ) - > getPreviewEditor ( ) - > updateUsingEditor ( editor ) ; <nl> + } <nl> + <nl> bool isKeyActionPressed ( KeyAction action ) { <nl> if ( Key * key = KeyboardShortcuts : : instance ( ) - > action ( action ) ) <nl> return key - > checkFromAllegroKeyArray ( ) ; <nl> mmm a / src / app / ui / preview_editor . cpp <nl> ppp b / src / app / ui / preview_editor . cpp <nl> PreviewEditorWindow : : PreviewEditorWindow ( ) <nl> , m_playButton ( new MiniPlayButton ( ) ) <nl> , m_refFrame ( 0 ) <nl> , m_aniSpeed ( 1 . 0 ) <nl> + , m_relatedEditor ( nullptr ) <nl> { <nl> child_spacing = 0 ; <nl> setAutoRemap ( false ) ; <nl> void PreviewEditorWindow : : updateUsingEditor ( Editor * editor ) <nl> { <nl> if ( ! m_isEnabled | | ! editor ) { <nl> hideWindow ( ) ; <nl> + m_relatedEditor = nullptr ; <nl> return ; <nl> } <nl> <nl> - if ( editor ! = current_editor ) <nl> + if ( ! editor - > isActive ( ) ) <nl> return ; <nl> <nl> + m_relatedEditor = editor ; <nl> + <nl> Document * document = editor - > document ( ) ; <nl> Editor * miniEditor = ( m_docView ? m_docView - > getEditor ( ) : NULL ) ; <nl> <nl> mmm a / src / app / ui / preview_editor . h <nl> ppp b / src / app / ui / preview_editor . h <nl> namespace app { <nl> void updateUsingEditor ( Editor * editor ) ; <nl> void uncheckCenterButton ( ) ; <nl> <nl> + Editor * relatedEditor ( ) const { return m_relatedEditor ; } <nl> + <nl> protected : <nl> bool onProcessMessage ( ui : : Message * msg ) override ; <nl> void onClose ( ui : : CloseEvent & ev ) override ; <nl> namespace app { <nl> MiniPlayButton * m_playButton ; <nl> doc : : frame_t m_refFrame ; <nl> double m_aniSpeed ; <nl> + Editor * m_relatedEditor ; <nl> } ; <nl> <nl> } / / namespace app <nl>
Don ' t close the Preview window if we close an unrelated Editor
aseprite/aseprite
e973aa0cc77e35cb4f6a3144b1cc5d968e097800
2015-05-19T00:28:30Z
mmm a / lib / IRGen / IRGen . cpp <nl> ppp b / lib / IRGen / IRGen . cpp <nl> static bool needsRecompile ( StringRef OutputFilename , ArrayRef < uint8_t > HashData , <nl> return true ; <nl> <nl> auto BinaryOwner = object : : createBinary ( OutputFilename ) ; <nl> - if ( ! BinaryOwner ) <nl> + if ( ! BinaryOwner ) { <nl> + consumeError ( BinaryOwner . takeError ( ) ) ; <nl> return true ; <nl> + } <nl> auto * ObjectFile = dyn_cast < object : : ObjectFile > ( BinaryOwner - > getBinary ( ) ) ; <nl> if ( ! ObjectFile ) <nl> return true ; <nl> mmm a / tools / driver / autolink_extract_main . cpp <nl> ppp b / tools / driver / autolink_extract_main . cpp <nl> int autolink_extract_main ( ArrayRef < const char * > Args , const char * Argv0 , <nl> for ( const auto & BinaryFileName : Invocation . getInputFilenames ( ) ) { <nl> auto BinaryOwner = llvm : : object : : createBinary ( BinaryFileName ) ; <nl> if ( ! BinaryOwner ) { <nl> + std : : string message ; <nl> + { <nl> + llvm : : raw_string_ostream os ( message ) ; <nl> + logAllUnhandledErrors ( BinaryOwner . takeError ( ) , os , " " ) ; <nl> + } <nl> + <nl> Instance . getDiags ( ) . diagnose ( SourceLoc ( ) , diag : : error_open_input_file , <nl> - BinaryFileName , <nl> - BinaryOwner . getError ( ) . message ( ) ) ; <nl> + BinaryFileName , message ) ; <nl> return 1 ; <nl> } <nl> <nl> mmm a / tools / swift - reflection - dump / swift - reflection - dump . cpp <nl> ppp b / tools / swift - reflection - dump / swift - reflection - dump . cpp <nl> getSectionRef ( const Binary * binaryFile , StringRef arch , <nl> static int doDumpReflectionSections ( std : : string BinaryFilename , <nl> StringRef arch ) { <nl> auto binaryOrError = llvm : : object : : createBinary ( BinaryFilename ) ; <nl> - guardError ( binaryOrError . getError ( ) ) ; <nl> + if ( ! binaryOrError ) { <nl> + logAllUnhandledErrors ( binaryOrError . takeError ( ) , llvm : : errs ( ) , <nl> + " swift - reflection - test error : " ) ; <nl> + exit ( EXIT_FAILURE ) ; <nl> + } <nl> <nl> const auto binary = binaryOrError . get ( ) . getBinary ( ) ; <nl> <nl>
Adjust for use of llvm : : Expected as llvm : : object : : createBinary result .
apple/swift
55f72405f507b73299fd9be0a177ac7d4d1aa749
2016-04-14T17:13:10Z
mmm a / tensorflow / contrib / framework / __init__ . py <nl> ppp b / tensorflow / contrib / framework / __init__ . py <nl> <nl> <nl> @ @ BoundedTensorSpec <nl> @ @ TensorSpec <nl> + <nl> + @ @ RecordInput <nl> " " " <nl> <nl> from __future__ import absolute_import <nl> <nl> from tensorflow . python . framework . smart_cond import smart_constant_value <nl> from tensorflow . python . framework . tensor_spec import BoundedTensorSpec <nl> from tensorflow . python . framework . tensor_spec import TensorSpec <nl> + from tensorflow . python . ops . data_flow_ops import RecordInput <nl> from tensorflow . python . ops . init_ops import convolutional_delta_orthogonal <nl> from tensorflow . python . ops . init_ops import convolutional_orthogonal_1d <nl> from tensorflow . python . ops . init_ops import convolutional_orthogonal_2d <nl>
Merge pull request from yongtang : 10622 - RecordInput - doc
tensorflow/tensorflow
fe7430adaa3b51ad17a1f8d0aed0fc7018b1a416
2018-08-11T01:39:02Z
mmm a / src / node / src / credentials . js <nl> ppp b / src / node / src / credentials . js <nl> var Metadata = require ( ' . / metadata . js ' ) ; <nl> <nl> var common = require ( ' . / common . js ' ) ; <nl> <nl> + var _ = require ( ' lodash ' ) ; <nl> + <nl> / * * <nl> * Create an SSL Credentials object . If using a client - side certificate , both <nl> * the second and third arguments must be passed . <nl> exports . createFromMetadataGenerator = function ( metadata_generator ) { <nl> var message = ' ' ; <nl> if ( error ) { <nl> message = error . message ; <nl> - if ( error . hasOwnProperty ( ' code ' ) ) { <nl> + if ( error . hasOwnProperty ( ' code ' ) & & _ . isFinite ( error . code ) ) { <nl> code = error . code ; <nl> } else { <nl> code = grpc . status . UNAUTHENTICATED ; <nl> mmm a / src / node / test / credentials_test . js <nl> ppp b / src / node / test / credentials_test . js <nl> var fakeSuccessfulGoogleCredentials = { <nl> var fakeFailingGoogleCredentials = { <nl> getRequestMetadata : function ( service_url , callback ) { <nl> setTimeout ( function ( ) { <nl> - callback ( new Error ( ' Authentication failure ' ) ) ; <nl> + / / Google credentials currently adds string error codes to auth errors <nl> + var error = new Error ( ' Authentication failure ' ) ; <nl> + error . code = ' ENOENT ' ; <nl> + callback ( error ) ; <nl> } , 0 ) ; <nl> } <nl> } ; <nl>
Merge pull request from murgatroid99 / node_credentials_error_code_fix
grpc/grpc
2106cd3e03e53655ede98cd77e174bd5ef5134cc
2016-08-05T22:11:58Z
mmm a / tensorflow / compiler / xla / tests / hlo_test_base . cc <nl> ppp b / tensorflow / compiler / xla / tests / hlo_test_base . cc <nl> : : testing : : AssertionResult HloTestBase : : RunAndCompare ( <nl> auto module_or_status = <nl> HloRunner : : CreateModuleFromString ( hlo_string , GetDebugOptionsForTest ( ) ) ; <nl> if ( ! module_or_status . ok ( ) ) { <nl> - return : : testing : : AssertionFailure ( ) < < " failed parsing hlo textual IR " ; <nl> + return : : testing : : AssertionFailure ( ) <nl> + < < " Error while parsing HLO text format : " <nl> + < < module_or_status . status ( ) . ToString ( ) ; <nl> } <nl> return RunAndCompare ( module_or_status . ConsumeValueOrDie ( ) , error , <nl> reference_preprocessor ) ; <nl> : : testing : : AssertionResult HloTestBase : : RunAndCompareNoHloPasses ( <nl> auto module_or_status = <nl> HloRunner : : CreateModuleFromString ( hlo_string , GetDebugOptionsForTest ( ) ) ; <nl> if ( ! module_or_status . ok ( ) ) { <nl> - return : : testing : : AssertionFailure ( ) < < " failed parsing hlo textual IR " ; <nl> + return : : testing : : AssertionFailure ( ) <nl> + < < " Error while parsing HLO text format : " <nl> + < < module_or_status . status ( ) . ToString ( ) ; <nl> } <nl> return RunAndCompareNoHloPasses ( module_or_status . ConsumeValueOrDie ( ) , error , <nl> reference_preprocessor ) ; <nl> mmm a / tensorflow / compiler / xla / tests / test_utils . cc <nl> ppp b / tensorflow / compiler / xla / tests / test_utils . cc <nl> std : : vector < HloInstruction * > FindConstrainedUses ( <nl> StatusOr < std : : unique_ptr < Literal > > CreateLiteralForConstrainedUses ( <nl> const tensorflow : : gtl : : ArraySlice < HloInstruction * > constrained_uses , <nl> const HloInstruction & param ) { <nl> - const auto count = constrained_uses . size ( ) ; <nl> - if ( count > 1 ) { <nl> - return Unimplemented ( " multiple constrained uses not yet supported " ) ; <nl> - } <nl> + HloInstruction * needs_index = nullptr ; <nl> + HloInstruction * needs_zero = nullptr ; <nl> + for ( HloInstruction * use : constrained_uses ) { <nl> + switch ( use - > opcode ( ) ) { <nl> + case HloOpcode : : kDynamicSlice : <nl> + case HloOpcode : : kDynamicUpdateSlice : <nl> + TF_RET_CHECK ( ShapeUtil : : Equal ( param . shape ( ) , use - > operand ( 0 ) - > shape ( ) ) ) ; <nl> + if ( needs_index ! = nullptr & & <nl> + ! ShapeUtil : : Equal ( needs_index - > shape ( ) , use - > shape ( ) ) ) { <nl> + return Unimplemented ( <nl> + " Conflicting operand generation slice index constraints \ n " ) ; <nl> + } <nl> + needs_index = use ; <nl> + break ; <nl> <nl> - if ( count = = 0 ) { <nl> - return MakeFakeLiteral ( param . shape ( ) ) ; <nl> - } <nl> + case HloOpcode : : kReduce : <nl> + case HloOpcode : : kReduceWindow : <nl> + case HloOpcode : : kSelectAndScatter : <nl> + needs_zero = use ; <nl> + break ; <nl> <nl> - const HloInstruction * const use = constrained_uses [ 0 ] ; <nl> - switch ( use - > opcode ( ) ) { <nl> - case HloOpcode : : kDynamicSlice : <nl> - case HloOpcode : : kDynamicUpdateSlice : <nl> - return MakeRandomNonwrappingSliceIndex ( use - > operand ( 0 ) - > shape ( ) , <nl> - use - > shape ( ) ) ; <nl> - case HloOpcode : : kReduce : <nl> - case HloOpcode : : kReduceWindow : <nl> - case HloOpcode : : kSelectAndScatter : <nl> - return Literal : : CreateFromShape ( param . shape ( ) ) ; <nl> - default : <nl> - return Unimplemented ( " constrained use given ; no equivalent literal " ) ; <nl> + default : <nl> + return Unimplemented ( <nl> + " Constrained operand generation not implemented for % s . " , <nl> + use - > ToString ( ) . c_str ( ) ) ; <nl> + } <nl> + } <nl> + if ( needs_index ! = nullptr & & needs_zero ! = nullptr ) { <nl> + return Unimplemented ( <nl> + " Conflicting operand generation constraints . \ nNeeds index : % s \ nNeeds " <nl> + " zero : % s \ n " , <nl> + needs_index - > ToString ( ) . c_str ( ) , needs_zero - > ToString ( ) . c_str ( ) ) ; <nl> + } <nl> + if ( needs_index ! = nullptr ) { <nl> + return MakeRandomNonwrappingSliceIndex ( param . shape ( ) , needs_index - > shape ( ) ) ; <nl> + } else if ( needs_zero ! = nullptr ) { <nl> + return Literal : : CreateFromShape ( param . shape ( ) ) ; <nl> + } else { <nl> + return MakeFakeLiteral ( param . shape ( ) ) ; <nl> } <nl> } <nl> <nl>
* HloTestBase now prints out the HLO parser error message when there is one .
tensorflow/tensorflow
f379d3369505457928ccda5adf72583f95709026
2017-12-12T06:35:08Z
mmm a / Code / CryEngine / CryAudioSystem / Common . h <nl> ppp b / Code / CryEngine / CryAudioSystem / Common . h <nl> enum class ESystemStates : EnumFlagsType <nl> { <nl> None = 0 , <nl> ImplShuttingDown = BIT ( 0 ) , <nl> + IsMuted = BIT ( 1 ) , <nl> # if defined ( INCLUDE_AUDIO_PRODUCTION_CODE ) <nl> - PoolsAllocated = BIT ( 1 ) , <nl> - IsMuted = BIT ( 2 ) , <nl> + IsPaused = BIT ( 2 ) , <nl> + PoolsAllocated = BIT ( 3 ) , <nl> # endif / / INCLUDE_AUDIO_PRODUCTION_CODE <nl> } ; <nl> CRY_CREATE_ENUM_FLAG_OPERATORS ( ESystemStates ) ; <nl> mmm a / Code / CryEngine / CryAudioSystem / Common / IImpl . h <nl> ppp b / Code / CryEngine / CryAudioSystem / Common / IImpl . h <nl> struct IImpl <nl> <nl> / * * <nl> * This method is called every time the main Game ( or Editor ) window loses focus . <nl> - * @ return ERequestStatus : : Success if the action was successful , ERequestStatus : : Failure otherwise . <nl> + * @ return void <nl> * @ see OnGetFocus <nl> * / <nl> - virtual ERequestStatus OnLoseFocus ( ) = 0 ; <nl> + virtual void OnLoseFocus ( ) = 0 ; <nl> <nl> / * * <nl> * This method is called every time the main Game ( or Editor ) window gets focus . <nl> - * @ return ERequestStatus : : Success if the action was successful , ERequestStatus : : Failure otherwise . <nl> + * @ return void <nl> * @ see OnLoseFocus <nl> * / <nl> - virtual ERequestStatus OnGetFocus ( ) = 0 ; <nl> + virtual void OnGetFocus ( ) = 0 ; <nl> <nl> / * * <nl> * Mute all sounds , after this call there should be no audio coming from the audio middleware . <nl> - * @ return ERequestStatus : : Success if the action was successful , ERequestStatus : : Failure otherwise . <nl> + * @ return void <nl> * @ see UnmuteAll , StopAllSounds <nl> * / <nl> - virtual ERequestStatus MuteAll ( ) = 0 ; <nl> + virtual void MuteAll ( ) = 0 ; <nl> <nl> / * * <nl> * Restore the audio output of the audio middleware after a call to MuteAll ( ) . <nl> - * @ return ERequestStatus : : Success if the action was successful , ERequestStatus : : Failure otherwise . <nl> + * @ return void <nl> * @ see MuteAll <nl> * / <nl> - virtual ERequestStatus UnmuteAll ( ) = 0 ; <nl> + virtual void UnmuteAll ( ) = 0 ; <nl> <nl> / * * <nl> * Pauses playback of all audio events . <nl> - * @ return ERequestStatus : : Success if the action was successful , ERequestStatus : : Failure otherwise . <nl> + * @ return void <nl> * @ see ResumeAll <nl> * / <nl> - virtual ERequestStatus PauseAll ( ) = 0 ; <nl> + virtual void PauseAll ( ) = 0 ; <nl> <nl> / * * <nl> * Resumes playback of all audio events . <nl> - * @ return ERequestStatus : : Success if the action was successful , ERequestStatus : : Failure otherwise . <nl> + * @ return void <nl> * @ see PauseAll <nl> * / <nl> - virtual ERequestStatus ResumeAll ( ) = 0 ; <nl> + virtual void ResumeAll ( ) = 0 ; <nl> <nl> / * * <nl> * Stop all currently playing sounds . Has no effect on anything triggered after this method is called . <nl> mmm a / Code / CryEngine / CryAudioSystem / Impl . cpp <nl> ppp b / Code / CryEngine / CryAudioSystem / Impl . cpp <nl> void CImpl : : OnAfterLibraryDataChanged ( ) <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - ERequestStatus CImpl : : OnLoseFocus ( ) <nl> + void CImpl : : OnLoseFocus ( ) <nl> { <nl> - return ERequestStatus : : Success ; <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - ERequestStatus CImpl : : OnGetFocus ( ) <nl> + void CImpl : : OnGetFocus ( ) <nl> { <nl> - return ERequestStatus : : Success ; <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - ERequestStatus CImpl : : MuteAll ( ) <nl> + void CImpl : : MuteAll ( ) <nl> { <nl> - return ERequestStatus : : Success ; <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - ERequestStatus CImpl : : UnmuteAll ( ) <nl> + void CImpl : : UnmuteAll ( ) <nl> { <nl> - return ERequestStatus : : Success ; <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - ERequestStatus CImpl : : PauseAll ( ) <nl> + void CImpl : : PauseAll ( ) <nl> { <nl> - return ERequestStatus : : Success ; <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - ERequestStatus CImpl : : ResumeAll ( ) <nl> + void CImpl : : ResumeAll ( ) <nl> { <nl> - return ERequestStatus : : Success ; <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> mmm a / Code / CryEngine / CryAudioSystem / Impl . h <nl> ppp b / Code / CryEngine / CryAudioSystem / Impl . h <nl> class CImpl final : public IImpl <nl> virtual void SetLibraryData ( XmlNodeRef const pNode , bool const isLevelSpecific ) override ; <nl> virtual void OnBeforeLibraryDataChanged ( ) override ; <nl> virtual void OnAfterLibraryDataChanged ( ) override ; <nl> - virtual ERequestStatus OnLoseFocus ( ) override ; <nl> - virtual ERequestStatus OnGetFocus ( ) override ; <nl> - virtual ERequestStatus MuteAll ( ) override ; <nl> - virtual ERequestStatus UnmuteAll ( ) override ; <nl> - virtual ERequestStatus PauseAll ( ) override ; <nl> - virtual ERequestStatus ResumeAll ( ) override ; <nl> + virtual void OnLoseFocus ( ) override ; <nl> + virtual void OnGetFocus ( ) override ; <nl> + virtual void MuteAll ( ) override ; <nl> + virtual void UnmuteAll ( ) override ; <nl> + virtual void PauseAll ( ) override ; <nl> + virtual void ResumeAll ( ) override ; <nl> virtual ERequestStatus StopAllSounds ( ) override ; <nl> virtual void SetGlobalParameter ( IParameter const * const pIParameter , float const value ) override ; <nl> virtual void SetGlobalSwitchState ( ISwitchState const * const pISwitchState ) override ; <nl> mmm a / Code / CryEngine / CryAudioSystem / MuteAllTrigger . cpp <nl> ppp b / Code / CryEngine / CryAudioSystem / MuteAllTrigger . cpp <nl> void CMuteAllTrigger : : Execute ( ) const <nl> { <nl> g_pIImpl - > MuteAll ( ) ; <nl> } <nl> - <nl> - # if defined ( INCLUDE_AUDIO_PRODUCTION_CODE ) <nl> - g_systemStates | = ESystemStates : : IsMuted ; <nl> - # endif / / INCLUDE_AUDIO_PRODUCTION_CODE <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> mmm a / Code / CryEngine / CryAudioSystem / System . cpp <nl> ppp b / Code / CryEngine / CryAudioSystem / System . cpp <nl> void CSystem : : StopTrigger ( ControlId const triggerId / * = CryAudio : : InvalidContro <nl> void CSystem : : ExecutePreviewTrigger ( ControlId const triggerId ) <nl> { <nl> # if defined ( INCLUDE_AUDIO_PRODUCTION_CODE ) <nl> - SSystemRequestData < ESystemRequestType : : ExecutePreviewTrigger > const requestData ( triggerId ) ; <nl> - CRequest const request ( & requestData ) ; <nl> - PushRequest ( request ) ; <nl> + switch ( triggerId ) <nl> + { <nl> + case LoseFocusTriggerId : <nl> + { <nl> + SSystemRequestData < ESystemRequestType : : ExecuteDefaultTrigger > const requestData ( EDefaultTriggerType : : LoseFocus ) ; <nl> + CRequest const request ( & requestData ) ; <nl> + PushRequest ( request ) ; <nl> + <nl> + break ; <nl> + } <nl> + case GetFocusTriggerId : <nl> + { <nl> + SSystemRequestData < ESystemRequestType : : ExecuteDefaultTrigger > const requestData ( EDefaultTriggerType : : GetFocus ) ; <nl> + CRequest const request ( & requestData ) ; <nl> + PushRequest ( request ) ; <nl> + <nl> + break ; <nl> + } <nl> + case MuteAllTriggerId : <nl> + { <nl> + SSystemRequestData < ESystemRequestType : : ExecuteDefaultTrigger > const requestData ( EDefaultTriggerType : : MuteAll ) ; <nl> + CRequest const request ( & requestData ) ; <nl> + PushRequest ( request ) ; <nl> + <nl> + break ; <nl> + } <nl> + case UnmuteAllTriggerId : <nl> + { <nl> + SSystemRequestData < ESystemRequestType : : ExecuteDefaultTrigger > const requestData ( EDefaultTriggerType : : UnmuteAll ) ; <nl> + CRequest const request ( & requestData ) ; <nl> + PushRequest ( request ) ; <nl> + <nl> + break ; <nl> + } <nl> + case PauseAllTriggerId : <nl> + { <nl> + SSystemRequestData < ESystemRequestType : : ExecuteDefaultTrigger > const requestData ( EDefaultTriggerType : : PauseAll ) ; <nl> + CRequest const request ( & requestData ) ; <nl> + PushRequest ( request ) ; <nl> + <nl> + break ; <nl> + } <nl> + case ResumeAllTriggerId : <nl> + { <nl> + SSystemRequestData < ESystemRequestType : : ExecuteDefaultTrigger > const requestData ( EDefaultTriggerType : : ResumeAll ) ; <nl> + CRequest const request ( & requestData ) ; <nl> + PushRequest ( request ) ; <nl> + <nl> + break ; <nl> + } <nl> + default : <nl> + { <nl> + SSystemRequestData < ESystemRequestType : : ExecutePreviewTrigger > const requestData ( triggerId ) ; <nl> + CRequest const request ( & requestData ) ; <nl> + PushRequest ( request ) ; <nl> + <nl> + break ; <nl> + } <nl> + } <nl> # endif / / INCLUDE_AUDIO_PRODUCTION_CODE <nl> } <nl> <nl> ERequestStatus CSystem : : ProcessSystemRequest ( CRequest const & request ) <nl> { <nl> case EDefaultTriggerType : : LoseFocus : <nl> { <nl> - g_loseFocusTrigger . Execute ( ) ; <nl> + if ( ( g_systemStates & ESystemStates : : IsMuted ) = = 0 ) <nl> + { <nl> + g_loseFocusTrigger . Execute ( ) ; <nl> + } <nl> + <nl> result = ERequestStatus : : Success ; <nl> <nl> break ; <nl> } <nl> case EDefaultTriggerType : : GetFocus : <nl> { <nl> - g_getFocusTrigger . Execute ( ) ; <nl> + if ( ( g_systemStates & ESystemStates : : IsMuted ) = = 0 ) <nl> + { <nl> + g_getFocusTrigger . Execute ( ) ; <nl> + } <nl> + <nl> result = ERequestStatus : : Success ; <nl> <nl> break ; <nl> ERequestStatus CSystem : : ProcessSystemRequest ( CRequest const & request ) <nl> { <nl> g_muteAllTrigger . Execute ( ) ; <nl> result = ERequestStatus : : Success ; <nl> + g_systemStates | = ESystemStates : : IsMuted ; <nl> <nl> break ; <nl> } <nl> ERequestStatus CSystem : : ProcessSystemRequest ( CRequest const & request ) <nl> { <nl> g_unmuteAllTrigger . Execute ( ) ; <nl> result = ERequestStatus : : Success ; <nl> + g_systemStates & = ~ ESystemStates : : IsMuted ; <nl> <nl> break ; <nl> } <nl> ERequestStatus CSystem : : ProcessSystemRequest ( CRequest const & request ) <nl> g_pauseAllTrigger . Execute ( ) ; <nl> result = ERequestStatus : : Success ; <nl> <nl> + # if defined ( INCLUDE_AUDIO_PRODUCTION_CODE ) <nl> + g_systemStates | = ESystemStates : : IsPaused ; <nl> + # endif / / INCLUDE_AUDIO_PRODUCTION_CODE <nl> + <nl> break ; <nl> } <nl> case EDefaultTriggerType : : ResumeAll : <nl> ERequestStatus CSystem : : ProcessSystemRequest ( CRequest const & request ) <nl> g_resumeAllTrigger . Execute ( ) ; <nl> result = ERequestStatus : : Success ; <nl> <nl> + # if defined ( INCLUDE_AUDIO_PRODUCTION_CODE ) <nl> + g_systemStates & = ~ ESystemStates : : IsPaused ; <nl> + # endif / / INCLUDE_AUDIO_PRODUCTION_CODE <nl> + <nl> break ; <nl> } <nl> default : <nl> void CSystem : : HandleDrawDebug ( ) <nl> memInfoString . Format ( " % u KiB " , memAlloc > > 10 ) ; <nl> } <nl> <nl> + char const * const szMuted = ( ( g_systemStates & ESystemStates : : IsMuted ) ! = 0 ) ? " - Muted " : " " ; <nl> + char const * const szPaused = ( ( g_systemStates & ESystemStates : : IsPaused ) ! = 0 ) ? " - Paused " : " " ; <nl> + <nl> pAuxGeom - > Draw2dLabel ( posX , posY , Debug : : g_systemHeaderFontSize , Debug : : g_globalColorHeader . data ( ) , false , <nl> - " Audio System ( Total Memory : % s ) " , memInfoString . c_str ( ) ) ; <nl> + " Audio System ( Total Memory : % s ) % s % s " , memInfoString . c_str ( ) , szMuted , szPaused ) ; <nl> <nl> if ( ( g_cvars . m_drawDebug & Debug : : EDrawFilter : : DetailedMemoryInfo ) ! = 0 ) <nl> { <nl> void CSystem : : HandleRetriggerControls ( ) <nl> { <nl> ExecuteDefaultTrigger ( EDefaultTriggerType : : MuteAll ) ; <nl> } <nl> + <nl> + if ( ( g_systemStates & ESystemStates : : IsPaused ) ! = 0 ) <nl> + { <nl> + ExecuteDefaultTrigger ( EDefaultTriggerType : : PauseAll ) ; <nl> + } <nl> } <nl> # endif / / INCLUDE_AUDIO_PRODUCTION_CODE <nl> } / / namespace CryAudio <nl> mmm a / Code / CryEngine / CryAudioSystem / UnmuteAllTrigger . cpp <nl> ppp b / Code / CryEngine / CryAudioSystem / UnmuteAllTrigger . cpp <nl> void CUnmuteAllTrigger : : Execute ( ) const <nl> { <nl> g_pIImpl - > UnmuteAll ( ) ; <nl> } <nl> - <nl> - # if defined ( INCLUDE_AUDIO_PRODUCTION_CODE ) <nl> - g_systemStates & = ~ ESystemStates : : IsMuted ; <nl> - # endif / / INCLUDE_AUDIO_PRODUCTION_CODE <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> mmm a / Code / CryEngine / CryAudioSystem / implementations / CryAudioImplAdx2 / Impl . cpp <nl> ppp b / Code / CryEngine / CryAudioSystem / implementations / CryAudioImplAdx2 / Impl . cpp <nl> void userFree ( void * const pObj , void * const pMem ) <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> CImpl : : CImpl ( ) <nl> - : m_isMuted ( false ) <nl> - , m_pAcfBuffer ( nullptr ) <nl> + : m_pAcfBuffer ( nullptr ) <nl> , m_dbasId ( CRIATOMEXDBAS_ILLEGAL_ID ) <nl> # if defined ( INCLUDE_ADX2_IMPL_PRODUCTION_CODE ) <nl> , m_name ( " Adx2 ( " CRI_ATOM_VER_NUM " ) " ) <nl> void CImpl : : OnAfterLibraryDataChanged ( ) <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - ERequestStatus CImpl : : OnLoseFocus ( ) <nl> + void CImpl : : OnLoseFocus ( ) <nl> { <nl> - if ( ! m_isMuted ) <nl> - { <nl> - MuteAllObjects ( CRI_TRUE ) ; <nl> - } <nl> - <nl> - return ERequestStatus : : Success ; <nl> + MuteAllObjects ( CRI_TRUE ) ; <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - ERequestStatus CImpl : : OnGetFocus ( ) <nl> + void CImpl : : OnGetFocus ( ) <nl> { <nl> - if ( ! m_isMuted ) <nl> - { <nl> - MuteAllObjects ( CRI_FALSE ) ; <nl> - } <nl> - <nl> - return ERequestStatus : : Success ; <nl> + MuteAllObjects ( CRI_FALSE ) ; <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - ERequestStatus CImpl : : MuteAll ( ) <nl> + void CImpl : : MuteAll ( ) <nl> { <nl> MuteAllObjects ( CRI_TRUE ) ; <nl> - m_isMuted = true ; <nl> - return ERequestStatus : : Success ; <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - ERequestStatus CImpl : : UnmuteAll ( ) <nl> + void CImpl : : UnmuteAll ( ) <nl> { <nl> MuteAllObjects ( CRI_FALSE ) ; <nl> - m_isMuted = false ; <nl> - return ERequestStatus : : Success ; <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - ERequestStatus CImpl : : PauseAll ( ) <nl> + void CImpl : : PauseAll ( ) <nl> { <nl> PauseAllObjects ( CRI_TRUE ) ; <nl> - return ERequestStatus : : Success ; <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - ERequestStatus CImpl : : ResumeAll ( ) <nl> + void CImpl : : ResumeAll ( ) <nl> { <nl> PauseAllObjects ( CRI_FALSE ) ; <nl> - return ERequestStatus : : Success ; <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> mmm a / Code / CryEngine / CryAudioSystem / implementations / CryAudioImplAdx2 / Impl . h <nl> ppp b / Code / CryEngine / CryAudioSystem / implementations / CryAudioImplAdx2 / Impl . h <nl> class CImpl final : public IImpl <nl> virtual void SetLibraryData ( XmlNodeRef const pNode , bool const isLevelSpecific ) override ; <nl> virtual void OnBeforeLibraryDataChanged ( ) override ; <nl> virtual void OnAfterLibraryDataChanged ( ) override ; <nl> - virtual ERequestStatus OnLoseFocus ( ) override ; <nl> - virtual ERequestStatus OnGetFocus ( ) override ; <nl> - virtual ERequestStatus MuteAll ( ) override ; <nl> - virtual ERequestStatus UnmuteAll ( ) override ; <nl> - virtual ERequestStatus PauseAll ( ) override ; <nl> - virtual ERequestStatus ResumeAll ( ) override ; <nl> + virtual void OnLoseFocus ( ) override ; <nl> + virtual void OnGetFocus ( ) override ; <nl> + virtual void MuteAll ( ) override ; <nl> + virtual void UnmuteAll ( ) override ; <nl> + virtual void PauseAll ( ) override ; <nl> + virtual void ResumeAll ( ) override ; <nl> virtual ERequestStatus StopAllSounds ( ) override ; <nl> virtual void SetGlobalParameter ( IParameter const * const pIParameter , float const value ) override ; <nl> virtual void SetGlobalSwitchState ( ISwitchState const * const pISwitchState ) override ; <nl> class CImpl final : public IImpl <nl> void MuteAllObjects ( CriBool const shouldMute ) ; <nl> void PauseAllObjects ( CriBool const shouldPause ) ; <nl> <nl> - bool m_isMuted ; <nl> - <nl> CryFixedStringT < MaxFilePathLength > m_regularSoundBankFolder ; <nl> CryFixedStringT < MaxFilePathLength > m_localizedSoundBankFolder ; <nl> CryFixedStringT < MaxControlNameLength > m_language ; <nl> mmm a / Code / CryEngine / CryAudioSystem / implementations / CryAudioImplFmod / Impl . cpp <nl> ppp b / Code / CryEngine / CryAudioSystem / implementations / CryAudioImplFmod / Impl . cpp <nl> CImpl : : CImpl ( ) <nl> , m_pMasterAssetsBank ( nullptr ) <nl> , m_pMasterStreamsBank ( nullptr ) <nl> , m_pMasterStringsBank ( nullptr ) <nl> - , m_isMuted ( false ) <nl> { <nl> } <nl> <nl> void CImpl : : OnAfterLibraryDataChanged ( ) <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - ERequestStatus CImpl : : OnLoseFocus ( ) <nl> + void CImpl : : OnLoseFocus ( ) <nl> { <nl> - if ( ! m_isMuted ) <nl> - { <nl> - MuteMasterBus ( true ) ; <nl> - } <nl> - <nl> - return ERequestStatus : : Success ; <nl> + MuteMasterBus ( true ) ; <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - ERequestStatus CImpl : : OnGetFocus ( ) <nl> + void CImpl : : OnGetFocus ( ) <nl> { <nl> - if ( ! m_isMuted ) <nl> - { <nl> - MuteMasterBus ( false ) ; <nl> - } <nl> - <nl> - return ERequestStatus : : Success ; <nl> + MuteMasterBus ( false ) ; <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - ERequestStatus CImpl : : MuteAll ( ) <nl> + void CImpl : : MuteAll ( ) <nl> { <nl> MuteMasterBus ( true ) ; <nl> - m_isMuted = true ; <nl> - return ERequestStatus : : Success ; <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - ERequestStatus CImpl : : UnmuteAll ( ) <nl> + void CImpl : : UnmuteAll ( ) <nl> { <nl> MuteMasterBus ( false ) ; <nl> - m_isMuted = false ; <nl> - return ERequestStatus : : Success ; <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - ERequestStatus CImpl : : PauseAll ( ) <nl> + void CImpl : : PauseAll ( ) <nl> { <nl> PauseMasterBus ( true ) ; <nl> - return ERequestStatus : : Success ; <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - ERequestStatus CImpl : : ResumeAll ( ) <nl> + void CImpl : : ResumeAll ( ) <nl> { <nl> PauseMasterBus ( false ) ; <nl> - return ERequestStatus : : Success ; <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> mmm a / Code / CryEngine / CryAudioSystem / implementations / CryAudioImplFmod / Impl . h <nl> ppp b / Code / CryEngine / CryAudioSystem / implementations / CryAudioImplFmod / Impl . h <nl> class CImpl final : public IImpl <nl> virtual void SetLibraryData ( XmlNodeRef const pNode , bool const isLevelSpecific ) override ; <nl> virtual void OnBeforeLibraryDataChanged ( ) override ; <nl> virtual void OnAfterLibraryDataChanged ( ) override ; <nl> - virtual ERequestStatus OnLoseFocus ( ) override ; <nl> - virtual ERequestStatus OnGetFocus ( ) override ; <nl> - virtual ERequestStatus MuteAll ( ) override ; <nl> - virtual ERequestStatus UnmuteAll ( ) override ; <nl> - virtual ERequestStatus PauseAll ( ) override ; <nl> - virtual ERequestStatus ResumeAll ( ) override ; <nl> + virtual void OnLoseFocus ( ) override ; <nl> + virtual void OnGetFocus ( ) override ; <nl> + virtual void MuteAll ( ) override ; <nl> + virtual void UnmuteAll ( ) override ; <nl> + virtual void PauseAll ( ) override ; <nl> + virtual void ResumeAll ( ) override ; <nl> virtual ERequestStatus StopAllSounds ( ) override ; <nl> virtual void SetGlobalParameter ( IParameter const * const pIParameter , float const value ) override ; <nl> virtual void SetGlobalSwitchState ( ISwitchState const * const pISwitchState ) override ; <nl> class CImpl final : public IImpl <nl> <nl> FMOD_RESULT LoadBankCustom ( char const * const szFileName , FMOD : : Studio : : Bank * * ppBank ) ; <nl> <nl> - bool m_isMuted ; <nl> - <nl> CryFixedStringT < MaxFilePathLength > m_regularSoundBankFolder ; <nl> CryFixedStringT < MaxFilePathLength > m_localizedSoundBankFolder ; <nl> <nl> mmm a / Code / CryEngine / CryAudioSystem / implementations / CryAudioImplPortAudio / Impl . cpp <nl> ppp b / Code / CryEngine / CryAudioSystem / implementations / CryAudioImplPortAudio / Impl . cpp <nl> void CImpl : : OnAfterLibraryDataChanged ( ) <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - ERequestStatus CImpl : : OnLoseFocus ( ) <nl> + void CImpl : : OnLoseFocus ( ) <nl> { <nl> - return ERequestStatus : : Success ; <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - ERequestStatus CImpl : : OnGetFocus ( ) <nl> + void CImpl : : OnGetFocus ( ) <nl> { <nl> - return ERequestStatus : : Success ; <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - ERequestStatus CImpl : : MuteAll ( ) <nl> + void CImpl : : MuteAll ( ) <nl> { <nl> - return ERequestStatus : : Success ; <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - ERequestStatus CImpl : : UnmuteAll ( ) <nl> + void CImpl : : UnmuteAll ( ) <nl> { <nl> - return ERequestStatus : : Success ; <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - ERequestStatus CImpl : : PauseAll ( ) <nl> + void CImpl : : PauseAll ( ) <nl> { <nl> - return ERequestStatus : : Success ; <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - ERequestStatus CImpl : : ResumeAll ( ) <nl> + void CImpl : : ResumeAll ( ) <nl> { <nl> - return ERequestStatus : : Success ; <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> mmm a / Code / CryEngine / CryAudioSystem / implementations / CryAudioImplPortAudio / Impl . h <nl> ppp b / Code / CryEngine / CryAudioSystem / implementations / CryAudioImplPortAudio / Impl . h <nl> class CImpl final : public IImpl <nl> virtual void SetLibraryData ( XmlNodeRef const pNode , bool const isLevelSpecific ) override ; <nl> virtual void OnBeforeLibraryDataChanged ( ) override ; <nl> virtual void OnAfterLibraryDataChanged ( ) override ; <nl> - virtual ERequestStatus OnLoseFocus ( ) override ; <nl> - virtual ERequestStatus OnGetFocus ( ) override ; <nl> - virtual ERequestStatus MuteAll ( ) override ; <nl> - virtual ERequestStatus UnmuteAll ( ) override ; <nl> - virtual ERequestStatus PauseAll ( ) override ; <nl> - virtual ERequestStatus ResumeAll ( ) override ; <nl> + virtual void OnLoseFocus ( ) override ; <nl> + virtual void OnGetFocus ( ) override ; <nl> + virtual void MuteAll ( ) override ; <nl> + virtual void UnmuteAll ( ) override ; <nl> + virtual void PauseAll ( ) override ; <nl> + virtual void ResumeAll ( ) override ; <nl> virtual ERequestStatus StopAllSounds ( ) override ; <nl> virtual void SetGlobalParameter ( IParameter const * const pIParameter , float const value ) override ; <nl> virtual void SetGlobalSwitchState ( ISwitchState const * const pISwitchState ) override ; <nl> mmm a / Code / CryEngine / CryAudioSystem / implementations / CryAudioImplSDLMixer / Impl . cpp <nl> ppp b / Code / CryEngine / CryAudioSystem / implementations / CryAudioImplSDLMixer / Impl . cpp <nl> void OnStandaloneFileFinished ( CryAudio : : CStandaloneFile & standaloneFile , const c <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> CImpl : : CImpl ( ) <nl> : m_pCVarFileExtension ( nullptr ) <nl> - , m_isMuted ( false ) <nl> # if defined ( INCLUDE_SDLMIXER_IMPL_PRODUCTION_CODE ) <nl> , m_name ( " SDL Mixer 2 . 0 . 2 " ) <nl> # endif / / INCLUDE_SDLMIXER_IMPL_PRODUCTION_CODE <nl> void CImpl : : OnAfterLibraryDataChanged ( ) <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - ERequestStatus CImpl : : OnLoseFocus ( ) <nl> + void CImpl : : OnLoseFocus ( ) <nl> { <nl> - if ( ! m_isMuted ) <nl> - { <nl> - SoundEngine : : Mute ( ) ; <nl> - } <nl> - <nl> - return ERequestStatus : : Success ; <nl> + SoundEngine : : Mute ( ) ; <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - ERequestStatus CImpl : : OnGetFocus ( ) <nl> + void CImpl : : OnGetFocus ( ) <nl> { <nl> - if ( ! m_isMuted ) <nl> - { <nl> - SoundEngine : : UnMute ( ) ; <nl> - } <nl> - <nl> - return ERequestStatus : : Success ; <nl> + SoundEngine : : UnMute ( ) ; <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - ERequestStatus CImpl : : MuteAll ( ) <nl> + void CImpl : : MuteAll ( ) <nl> { <nl> SoundEngine : : Mute ( ) ; <nl> - m_isMuted = true ; <nl> - return ERequestStatus : : Success ; <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - ERequestStatus CImpl : : UnmuteAll ( ) <nl> + void CImpl : : UnmuteAll ( ) <nl> { <nl> SoundEngine : : UnMute ( ) ; <nl> - m_isMuted = false ; <nl> - return ERequestStatus : : Success ; <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - ERequestStatus CImpl : : PauseAll ( ) <nl> + void CImpl : : PauseAll ( ) <nl> { <nl> SoundEngine : : Pause ( ) ; <nl> - return ERequestStatus : : Success ; <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - ERequestStatus CImpl : : ResumeAll ( ) <nl> + void CImpl : : ResumeAll ( ) <nl> { <nl> SoundEngine : : Resume ( ) ; <nl> - return ERequestStatus : : Success ; <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> mmm a / Code / CryEngine / CryAudioSystem / implementations / CryAudioImplSDLMixer / Impl . h <nl> ppp b / Code / CryEngine / CryAudioSystem / implementations / CryAudioImplSDLMixer / Impl . h <nl> class CImpl final : public IImpl <nl> virtual void SetLibraryData ( XmlNodeRef const pNode , bool const isLevelSpecific ) override ; <nl> virtual void OnBeforeLibraryDataChanged ( ) override ; <nl> virtual void OnAfterLibraryDataChanged ( ) override ; <nl> - virtual ERequestStatus OnLoseFocus ( ) override ; <nl> - virtual ERequestStatus OnGetFocus ( ) override ; <nl> - virtual ERequestStatus MuteAll ( ) override ; <nl> - virtual ERequestStatus UnmuteAll ( ) override ; <nl> - virtual ERequestStatus PauseAll ( ) override ; <nl> - virtual ERequestStatus ResumeAll ( ) override ; <nl> + virtual void OnLoseFocus ( ) override ; <nl> + virtual void OnGetFocus ( ) override ; <nl> + virtual void MuteAll ( ) override ; <nl> + virtual void UnmuteAll ( ) override ; <nl> + virtual void PauseAll ( ) override ; <nl> + virtual void ResumeAll ( ) override ; <nl> virtual ERequestStatus StopAllSounds ( ) override ; <nl> virtual void SetGlobalParameter ( IParameter const * const pIParameter , float const value ) override ; <nl> virtual void SetGlobalSwitchState ( ISwitchState const * const pISwitchState ) override ; <nl> class CImpl final : public IImpl <nl> size_t m_memoryAlignment ; <nl> string m_language ; <nl> <nl> - bool m_isMuted ; <nl> - <nl> ICVar * m_pCVarFileExtension ; <nl> <nl> # if defined ( INCLUDE_SDLMIXER_IMPL_PRODUCTION_CODE ) <nl> mmm a / Code / CryEngine / CryAudioSystem / implementations / CryAudioImplWwise / Impl . cpp <nl> ppp b / Code / CryEngine / CryAudioSystem / implementations / CryAudioImplWwise / Impl . cpp <nl> void CImpl : : OnAfterLibraryDataChanged ( ) <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - ERequestStatus CImpl : : OnLoseFocus ( ) <nl> + void CImpl : : OnLoseFocus ( ) <nl> { <nl> / / With Wwise we drive this via events . <nl> - return ERequestStatus : : Success ; <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - ERequestStatus CImpl : : OnGetFocus ( ) <nl> + void CImpl : : OnGetFocus ( ) <nl> { <nl> / / With Wwise we drive this via events . <nl> - return ERequestStatus : : Success ; <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - ERequestStatus CImpl : : MuteAll ( ) <nl> + void CImpl : : MuteAll ( ) <nl> { <nl> / / With Wwise we drive this via events . <nl> - return ERequestStatus : : Success ; <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - ERequestStatus CImpl : : UnmuteAll ( ) <nl> + void CImpl : : UnmuteAll ( ) <nl> { <nl> / / With Wwise we drive this via events . <nl> - return ERequestStatus : : Success ; <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - ERequestStatus CImpl : : PauseAll ( ) <nl> + void CImpl : : PauseAll ( ) <nl> { <nl> / / With Wwise we drive this via events . <nl> - return ERequestStatus : : Success ; <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - ERequestStatus CImpl : : ResumeAll ( ) <nl> + void CImpl : : ResumeAll ( ) <nl> { <nl> / / With Wwise we drive this via events . <nl> - return ERequestStatus : : Success ; <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> mmm a / Code / CryEngine / CryAudioSystem / implementations / CryAudioImplWwise / Impl . h <nl> ppp b / Code / CryEngine / CryAudioSystem / implementations / CryAudioImplWwise / Impl . h <nl> class CImpl final : public IImpl <nl> virtual void SetLibraryData ( XmlNodeRef const pNode , bool const isLevelSpecific ) override ; <nl> virtual void OnBeforeLibraryDataChanged ( ) override ; <nl> virtual void OnAfterLibraryDataChanged ( ) override ; <nl> - virtual ERequestStatus OnLoseFocus ( ) override ; <nl> - virtual ERequestStatus OnGetFocus ( ) override ; <nl> - virtual ERequestStatus MuteAll ( ) override ; <nl> - virtual ERequestStatus UnmuteAll ( ) override ; <nl> - virtual ERequestStatus PauseAll ( ) override ; <nl> - virtual ERequestStatus ResumeAll ( ) override ; <nl> + virtual void OnLoseFocus ( ) override ; <nl> + virtual void OnGetFocus ( ) override ; <nl> + virtual void MuteAll ( ) override ; <nl> + virtual void UnmuteAll ( ) override ; <nl> + virtual void PauseAll ( ) override ; <nl> + virtual void ResumeAll ( ) override ; <nl> virtual ERequestStatus StopAllSounds ( ) override ; <nl> virtual void SetGlobalParameter ( IParameter const * const pIParameter , float const value ) override ; <nl> virtual void SetGlobalSwitchState ( ISwitchState const * const pISwitchState ) override ; <nl>
! XB ( Audio ) ( CE - 18880 ) AUDIO : Audio will be played again after you mute it .
CRYTEK/CRYENGINE
926fab38b3e4f1c2460751db3efd7a35d4858fd4
2018-11-07T14:59:08Z
mmm a / lib / Sema / ConstraintSystem . cpp <nl> ppp b / lib / Sema / ConstraintSystem . cpp <nl> void ConstraintSystem : : openGeneric ( <nl> if ( sig = = nullptr ) <nl> return ; <nl> <nl> - auto locatorPtr = getConstraintLocator ( locator ) ; <nl> - <nl> - / / Create the type variables for the generic parameters . <nl> - for ( auto gp : sig - > getGenericParams ( ) ) { <nl> - locatorPtr = getConstraintLocator ( <nl> - locator . withPathElement ( LocatorPathElt ( gp ) ) ) ; <nl> - <nl> - auto typeVar = createTypeVariable ( locatorPtr , <nl> - TVO_PrefersSubtypeBinding ) ; <nl> - auto result = replacements . insert ( <nl> - std : : make_pair ( cast < GenericTypeParamType > ( gp - > getCanonicalType ( ) ) , <nl> - typeVar ) ) ; <nl> - assert ( result . second ) ; <nl> - ( void ) result ; <nl> - } <nl> - <nl> - / / Remember that any new constraints generated by opening this generic are <nl> - / / due to the opening . <nl> - locatorPtr = getConstraintLocator ( <nl> - locator . withPathElement ( LocatorPathElt : : getOpenedGeneric ( sig ) ) ) ; <nl> - <nl> - bindArchetypesFromContext ( * this , outerDC , locatorPtr , replacements ) ; <nl> + openGenericParameters ( outerDC , sig , replacements , locator ) ; <nl> <nl> if ( skipGenericRequirements ) <nl> return ; <nl> void ConstraintSystem : : openGeneric ( <nl> [ & ] ( Type type ) { return openType ( type , replacements ) ; } ) ; <nl> } <nl> <nl> + void ConstraintSystem : : openGenericParameters ( DeclContext * outerDC , <nl> + GenericSignature * sig , <nl> + OpenedTypeMap & replacements , <nl> + ConstraintLocatorBuilder locator ) { <nl> + assert ( sig ) ; <nl> + <nl> + / / Create the type variables for the generic parameters . <nl> + for ( auto gp : sig - > getGenericParams ( ) ) { <nl> + auto * paramLocator = <nl> + getConstraintLocator ( locator . withPathElement ( LocatorPathElt ( gp ) ) ) ; <nl> + <nl> + auto typeVar = createTypeVariable ( paramLocator , TVO_PrefersSubtypeBinding ) ; <nl> + auto result = replacements . insert ( std : : make_pair ( <nl> + cast < GenericTypeParamType > ( gp - > getCanonicalType ( ) ) , typeVar ) ) ; <nl> + <nl> + assert ( result . second ) ; <nl> + ( void ) result ; <nl> + } <nl> + <nl> + auto * baseLocator = getConstraintLocator ( <nl> + locator . withPathElement ( LocatorPathElt : : getOpenedGeneric ( sig ) ) ) ; <nl> + <nl> + bindArchetypesFromContext ( * this , outerDC , baseLocator , replacements ) ; <nl> + } <nl> + <nl> void ConstraintSystem : : openGenericRequirements ( <nl> DeclContext * outerDC , GenericSignature * signature , <nl> bool skipProtocolSelfConstraint , ConstraintLocatorBuilder locator , <nl> mmm a / lib / Sema / ConstraintSystem . h <nl> ppp b / lib / Sema / ConstraintSystem . h <nl> class ConstraintSystem { <nl> OpenedTypeMap & replacements , <nl> bool skipGenericRequirements = false ) ; <nl> <nl> + / / / Open the generic parameter list creating type variables for each of the <nl> + / / / type parameters . <nl> + void openGenericParameters ( DeclContext * outerDC , <nl> + GenericSignature * signature , <nl> + OpenedTypeMap & replacements , <nl> + ConstraintLocatorBuilder locator ) ; <nl> + <nl> / / / Given generic signature open its generic requirements , <nl> / / / using substitution function , and record them in the <nl> / / / constraint system for further processing . <nl>
[ ConstraintSystem ] Extract logic for opening generic parameters into its own method
apple/swift
ef1e5425d4f7f5eabbe88d142077b8340db30cab
2019-06-05T00:50:44Z
mmm a / dlib / dnn / loss . h <nl> ppp b / dlib / dnn / loss . h <nl> namespace dlib <nl> typename SUB_TYPE , <nl> typename label_iterator <nl> > <nl> - void to_label ( <nl> + static void to_label ( <nl> const tensor & input_tensor , <nl> const SUB_TYPE & sub , <nl> label_iterator iter <nl> - ) const <nl> + ) <nl> { <nl> DLIB_CASSERT ( sub . sample_expansion_factor ( ) = = 1 ) ; <nl> <nl> namespace dlib <nl> std : : string version ; <nl> deserialize ( version , in ) ; <nl> if ( version ! = " loss_multiclass_log_per_pixel_ " ) <nl> - throw serialization_error ( " Unexpected version found while deserializing dlib : : loss_multiclass_log_ . " ) ; <nl> + throw serialization_error ( " Unexpected version found while deserializing dlib : : loss_multiclass_log_per_pixel_ . " ) ; <nl> } <nl> <nl> friend std : : ostream & operator < < ( std : : ostream & out , const loss_multiclass_log_per_pixel_ & ) <nl> namespace dlib <nl> template < typename SUBNET > <nl> using loss_multiclass_log_per_pixel = add_loss_layer < loss_multiclass_log_per_pixel_ , SUBNET > ; <nl> <nl> + / / mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + <nl> + class loss_multiclass_log_per_pixel_weighted_ <nl> + { <nl> + public : <nl> + <nl> + struct weighted_label <nl> + { <nl> + weighted_label ( ) <nl> + { } <nl> + <nl> + weighted_label ( uint16_t label , float weight = 1 . f ) <nl> + : label ( label ) , weight ( weight ) <nl> + { } <nl> + <nl> + / / In semantic segmentation , 65536 classes ought to be enough for anybody . <nl> + uint16_t label = 0 ; <nl> + float weight = 1 . f ; <nl> + } ; <nl> + <nl> + typedef matrix < weighted_label > training_label_type ; <nl> + typedef matrix < uint16_t > output_label_type ; <nl> + <nl> + template < <nl> + typename SUB_TYPE , <nl> + typename label_iterator <nl> + > <nl> + static void to_label ( <nl> + const tensor & input_tensor , <nl> + const SUB_TYPE & sub , <nl> + label_iterator iter <nl> + ) <nl> + { <nl> + loss_multiclass_log_per_pixel_ : : to_label ( input_tensor , sub , iter ) ; <nl> + } <nl> + <nl> + template < <nl> + typename const_label_iterator , <nl> + typename SUBNET <nl> + > <nl> + double compute_loss_value_and_gradient ( <nl> + const tensor & input_tensor , <nl> + const_label_iterator truth , <nl> + SUBNET & sub <nl> + ) const <nl> + { <nl> + const tensor & output_tensor = sub . get_output ( ) ; <nl> + tensor & grad = sub . get_gradient_input ( ) ; <nl> + <nl> + DLIB_CASSERT ( sub . sample_expansion_factor ( ) = = 1 ) ; <nl> + DLIB_CASSERT ( input_tensor . num_samples ( ) ! = 0 ) ; <nl> + DLIB_CASSERT ( input_tensor . num_samples ( ) % sub . sample_expansion_factor ( ) = = 0 ) ; <nl> + DLIB_CASSERT ( input_tensor . num_samples ( ) = = grad . num_samples ( ) ) ; <nl> + DLIB_CASSERT ( input_tensor . num_samples ( ) = = output_tensor . num_samples ( ) ) ; <nl> + DLIB_CASSERT ( output_tensor . k ( ) > = 1 ) ; <nl> + DLIB_CASSERT ( output_tensor . k ( ) < std : : numeric_limits < uint16_t > : : max ( ) ) ; <nl> + DLIB_CASSERT ( output_tensor . nr ( ) = = grad . nr ( ) & & <nl> + output_tensor . nc ( ) = = grad . nc ( ) & & <nl> + output_tensor . k ( ) = = grad . k ( ) ) ; <nl> + for ( long idx = 0 ; idx < output_tensor . num_samples ( ) ; + + idx ) <nl> + { <nl> + const_label_iterator truth_matrix_ptr = ( truth + idx ) ; <nl> + DLIB_CASSERT ( truth_matrix_ptr - > nr ( ) = = output_tensor . nr ( ) & & <nl> + truth_matrix_ptr - > nc ( ) = = output_tensor . nc ( ) , <nl> + " truth size = " < < truth_matrix_ptr - > nr ( ) < < " x " < < truth_matrix_ptr - > nc ( ) < < " , " <nl> + " output size = " < < output_tensor . nr ( ) < < " x " < < output_tensor . nc ( ) ) ; <nl> + } <nl> + <nl> + tt : : softmax ( grad , output_tensor ) ; <nl> + <nl> + / / The loss we output is the weighted average loss over the mini - batch , and also over each element of the matrix output . <nl> + const double scale = 1 . 0 / ( output_tensor . num_samples ( ) * output_tensor . nr ( ) * output_tensor . nc ( ) ) ; <nl> + double loss = 0 ; <nl> + float * const g = grad . host ( ) ; <nl> + for ( long i = 0 ; i < output_tensor . num_samples ( ) ; + + i , + + truth ) <nl> + { <nl> + for ( long r = 0 ; r < output_tensor . nr ( ) ; + + r ) <nl> + { <nl> + for ( long c = 0 ; c < output_tensor . nc ( ) ; + + c ) <nl> + { <nl> + const weighted_label & weighted_label = truth - > operator ( ) ( r , c ) ; <nl> + const uint16_t y = weighted_label . label ; <nl> + const float weight = weighted_label . weight ; <nl> + / / The network must produce a number of outputs that is equal to the number <nl> + / / of labels when using this type of loss . <nl> + DLIB_CASSERT ( static_cast < long > ( y ) < output_tensor . k ( ) | | weight = = 0 . f , <nl> + " y : " < < y < < " , output_tensor . k ( ) : " < < output_tensor . k ( ) ) ; <nl> + for ( long k = 0 ; k < output_tensor . k ( ) ; + + k ) <nl> + { <nl> + const size_t idx = tensor_index ( output_tensor , i , r , c , k ) ; <nl> + if ( k = = y ) <nl> + { <nl> + loss + = weight * scale * - std : : log ( g [ idx ] ) ; <nl> + g [ idx ] = weight * scale * ( g [ idx ] - 1 ) ; <nl> + } <nl> + else <nl> + { <nl> + g [ idx ] = weight * scale * g [ idx ] ; <nl> + } <nl> + } <nl> + } <nl> + } <nl> + } <nl> + return loss ; <nl> + } <nl> + <nl> + friend void serialize ( const loss_multiclass_log_per_pixel_weighted_ & , std : : ostream & out ) <nl> + { <nl> + serialize ( " loss_multiclass_log_per_pixel_weighted_ " , out ) ; <nl> + } <nl> + <nl> + friend void deserialize ( loss_multiclass_log_per_pixel_weighted_ & , std : : istream & in ) <nl> + { <nl> + std : : string version ; <nl> + deserialize ( version , in ) ; <nl> + if ( version ! = " loss_multiclass_log_per_pixel_weighted_ " ) <nl> + throw serialization_error ( " Unexpected version found while deserializing dlib : : loss_multiclass_log_per_pixel_weighted_ . " ) ; <nl> + } <nl> + <nl> + friend std : : ostream & operator < < ( std : : ostream & out , const loss_multiclass_log_per_pixel_weighted_ & ) <nl> + { <nl> + out < < " loss_multiclass_log_per_pixel_weighted " ; <nl> + return out ; <nl> + } <nl> + <nl> + friend void to_xml ( const loss_multiclass_log_per_pixel_weighted_ & / * item * / , std : : ostream & out ) <nl> + { <nl> + out < < " < loss_multiclass_log_per_pixel_weighted / > " ; <nl> + } <nl> + <nl> + private : <nl> + static size_t tensor_index ( const tensor & t , long sample , long row , long column , long k ) <nl> + { <nl> + / / See : https : / / github . com / davisking / dlib / blob / 4dfeb7e186dd1bf6ac91273509f687293bd4230a / dlib / dnn / tensor_abstract . h # L38 <nl> + return ( ( sample * t . k ( ) + k ) * t . nr ( ) + row ) * t . nc ( ) + column ; <nl> + } <nl> + <nl> + } ; <nl> + <nl> + template < typename SUBNET > <nl> + using loss_multiclass_log_per_pixel_weighted = add_loss_layer < loss_multiclass_log_per_pixel_weighted_ , SUBNET > ; <nl> + <nl> / / mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> <nl> } <nl> mmm a / dlib / dnn / loss_abstract . h <nl> ppp b / dlib / dnn / loss_abstract . h <nl> namespace dlib <nl> template < typename SUBNET > <nl> using loss_multiclass_log_per_pixel = add_loss_layer < loss_multiclass_log_per_pixel_ , SUBNET > ; <nl> <nl> + / / mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + <nl> + class loss_multiclass_log_per_pixel_weighted_ <nl> + { <nl> + / * ! <nl> + WHAT THIS OBJECT REPRESENTS <nl> + This object implements the loss layer interface defined above by <nl> + EXAMPLE_LOSS_LAYER_ . In particular , it implements the multiclass logistic <nl> + regression loss ( e . g . negative log - likelihood loss ) , which is appropriate <nl> + for multiclass classification problems . It is basically just like <nl> + loss_multiclass_log_per_pixel_ except that it lets you define per - pixel <nl> + weights , which may be useful e . g . if you want to emphasize rare classes <nl> + while training . ( If the classification problem is difficult , a flat weight <nl> + structure may lead the network to always predict the most common label , in <nl> + particular if the degree of imbalance is high . To emphasize a certain <nl> + class or classes , simply increase the weights of the corresponding pixels , <nl> + relative to the weights of the other pixels . ) <nl> + <nl> + Note that if you set the weight to 0 whenever a pixel ' s label is equal to <nl> + loss_multiclass_log_per_pixel_ : : label_to_ignore , and to 1 otherwise , then <nl> + you essentially get loss_multiclass_log_per_pixel_ as a special case . <nl> + ! * / <nl> + public : <nl> + <nl> + struct weighted_label <nl> + { <nl> + / * ! <nl> + WHAT THIS OBJECT REPRESENTS <nl> + This object represents the truth label of a single pixel , together with <nl> + an associated weight ( the higher the weight , the more emphasis the <nl> + corresponding pixel is given during the training ) . <nl> + ! * / <nl> + <nl> + weighted_label ( ) ; <nl> + weighted_label ( uint16_t label , float weight = 1 . f ) ; <nl> + <nl> + / / The ground - truth label . In semantic segmentation , 65536 classes ought to be <nl> + / / enough for anybody . <nl> + uint16_t label = 0 ; <nl> + <nl> + / / The weight of the corresponding pixel . <nl> + float weight = 1 . f ; <nl> + } ; <nl> + <nl> + typedef matrix < weighted_label > training_label_type ; <nl> + typedef matrix < uint16_t > output_label_type ; <nl> + <nl> + template < <nl> + typename SUB_TYPE , <nl> + typename label_iterator <nl> + > <nl> + void to_label ( <nl> + const tensor & input_tensor , <nl> + const SUB_TYPE & sub , <nl> + label_iterator iter <nl> + ) const ; <nl> + / * ! <nl> + This function has the same interface as EXAMPLE_LOSS_LAYER_ : : to_label ( ) except <nl> + it has the additional calling requirements that : <nl> + - sub . get_output ( ) . num_samples ( ) = = input_tensor . num_samples ( ) <nl> + - sub . sample_expansion_factor ( ) = = 1 <nl> + and the output label is the predicted class for each classified element . The number <nl> + of possible output classes is sub . get_output ( ) . k ( ) . <nl> + ! * / <nl> + <nl> + template < <nl> + typename const_label_iterator , <nl> + typename SUBNET <nl> + > <nl> + double compute_loss_value_and_gradient ( <nl> + const tensor & input_tensor , <nl> + const_label_iterator truth , <nl> + SUBNET & sub <nl> + ) const ; <nl> + / * ! <nl> + This function has the same interface as EXAMPLE_LOSS_LAYER_ : : compute_loss_value_and_gradient ( ) <nl> + except it has the additional calling requirements that : <nl> + - sub . get_output ( ) . num_samples ( ) = = input_tensor . num_samples ( ) <nl> + - sub . sample_expansion_factor ( ) = = 1 <nl> + - all labels pointed to by truth are < sub . get_output ( ) . k ( ) , or the corresponding weight <nl> + is zero . <nl> + ! * / <nl> + <nl> + } ; <nl> + <nl> + template < typename SUBNET > <nl> + using loss_multiclass_log_per_pixel_weighted = add_loss_layer < loss_multiclass_log_per_pixel_weighted_ , SUBNET > ; <nl> + <nl> / / mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> <nl> } <nl> mmm a / dlib / test / dnn . cpp <nl> ppp b / dlib / test / dnn . cpp <nl> namespace <nl> <nl> / / mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> <nl> - void test_tensor_resize_bilienar ( long samps , long k , long nr , long nc , long onr , long onc ) <nl> + void test_loss_multiclass_per_pixel_weighted ( ) <nl> + { <nl> + / / Train with pixel - specific weights <nl> + <nl> + print_spinner ( ) ; <nl> + <nl> + constexpr int input_height = 5 ; <nl> + constexpr int input_width = 7 ; <nl> + constexpr int output_height = input_height ; <nl> + constexpr int output_width = input_width ; <nl> + const int num_samples = 1000 ; <nl> + const int num_classes = 6 ; <nl> + <nl> + : : std : : default_random_engine generator ( 16 ) ; <nl> + : : std : : uniform_real_distribution < double > u01 ( 0 . 0 , 1 . 0 ) ; <nl> + : : std : : uniform_int_distribution < uint16_t > noisy_label ( 0 , num_classes - 1 ) ; <nl> + <nl> + : : std : : vector < matrix < double > > x ( num_samples ) ; <nl> + : : std : : vector < matrix < uint16_t > > y ( num_samples ) ; <nl> + <nl> + matrix < double > xtmp ( input_height , input_width ) ; <nl> + matrix < uint16_t > ytmp ( output_height , output_width ) ; <nl> + <nl> + / / Generate input data <nl> + for ( int ii = 0 ; ii < num_samples ; + + ii ) { <nl> + for ( int jj = 0 ; jj < input_height ; + + jj ) { <nl> + for ( int kk = 0 ; kk < input_width ; + + kk ) { <nl> + xtmp ( jj , kk ) = u01 ( generator ) ; <nl> + ytmp ( jj , kk ) = noisy_label ( generator ) ; <nl> + } <nl> + } <nl> + x [ ii ] = xtmp ; <nl> + y [ ii ] = ytmp ; <nl> + } <nl> + <nl> + using net_type = loss_multiclass_log_per_pixel_weighted < con < num_classes , 1 , 1 , 1 , 1 , input < matrix < double > > > > ; <nl> + using weighted_label = loss_multiclass_log_per_pixel_weighted_ : : weighted_label ; <nl> + <nl> + : : std : : vector < matrix < weighted_label > > y_weighted ( num_samples ) ; <nl> + <nl> + for ( int weighted_class = 0 ; weighted_class < num_classes ; + + weighted_class ) { <nl> + <nl> + print_spinner ( ) ; <nl> + <nl> + / / Assign weights <nl> + for ( int ii = 0 ; ii < num_samples ; + + ii ) { <nl> + if ( weighted_class = = 0 ) { <nl> + y_weighted [ ii ] . set_size ( input_height , input_width ) ; <nl> + } <nl> + for ( int jj = 0 ; jj < input_height ; + + jj ) { <nl> + for ( int kk = 0 ; kk < input_width ; + + kk ) { <nl> + const uint16_t label = y [ ii ] ( jj , kk ) ; <nl> + const float weight <nl> + = label = = weighted_class <nl> + ? 1 . 1f <nl> + : 0 . 9f ; <nl> + y_weighted [ ii ] ( jj , kk ) = weighted_label ( label , weight ) ; <nl> + } <nl> + } <nl> + } <nl> + <nl> + net_type net ; <nl> + sgd defsolver ( 0 , 0 . 9 ) ; <nl> + dnn_trainer < net_type > trainer ( net , defsolver ) ; <nl> + trainer . set_learning_rate ( 0 . 1 ) ; <nl> + trainer . set_min_learning_rate ( 0 . 01 ) ; <nl> + trainer . set_mini_batch_size ( 10 ) ; <nl> + trainer . set_max_num_epochs ( 10 ) ; <nl> + trainer . train ( x , y_weighted ) ; <nl> + <nl> + const : : std : : vector < matrix < uint16_t > > predictions = net ( x ) ; <nl> + <nl> + int num_weighted_class = 0 ; <nl> + int num_not_weighted_class = 0 ; <nl> + <nl> + for ( int ii = 0 ; ii < num_samples ; + + ii ) { <nl> + const matrix < uint16_t > & prediction = predictions [ ii ] ; <nl> + DLIB_TEST ( prediction . nr ( ) = = output_height ) ; <nl> + DLIB_TEST ( prediction . nc ( ) = = output_width ) ; <nl> + for ( int jj = 0 ; jj < output_height ; + + jj ) <nl> + for ( int kk = 0 ; kk < output_width ; + + kk ) <nl> + if ( prediction ( jj , kk ) = = weighted_class ) <nl> + + + num_weighted_class ; <nl> + else <nl> + + + num_not_weighted_class ; <nl> + } <nl> + <nl> + DLIB_TEST_MSG ( num_weighted_class > num_not_weighted_class , <nl> + " The weighted class ( " < < weighted_class < < " ) does not dominate : " <nl> + < < num_weighted_class < < " < = " < < num_not_weighted_class ) ; <nl> + } <nl> + } <nl> + <nl> + / / mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + <nl> + void test_tensor_resize_bilinear ( long samps , long k , long nr , long nc , long onr , long onc ) <nl> { <nl> resizable_tensor img ( samps , k , nr , nc ) ; <nl> resizable_tensor out ( samps , k , onr , onc ) ; <nl> namespace <nl> compare_adam ( ) ; <nl> test_copy_tensor_gpu ( ) ; <nl> # endif <nl> - test_tensor_resize_bilienar ( 2 , 3 , 6 , 6 , 11 , 11 ) ; <nl> - test_tensor_resize_bilienar ( 2 , 3 , 6 , 6 , 3 , 4 ) ; <nl> - test_tensor_resize_bilienar ( 2 , 3 , 5 , 6 , 12 , 21 ) ; <nl> + test_tensor_resize_bilinear ( 2 , 3 , 6 , 6 , 11 , 11 ) ; <nl> + test_tensor_resize_bilinear ( 2 , 3 , 6 , 6 , 3 , 4 ) ; <nl> + test_tensor_resize_bilinear ( 2 , 3 , 5 , 6 , 12 , 21 ) ; <nl> test_max_pool ( 1 , 1 , 2 , 3 , 0 , 0 ) ; <nl> test_max_pool ( 3 , 3 , 1 , 1 , 0 , 0 ) ; <nl> test_max_pool ( 3 , 3 , 2 , 2 , 0 , 0 ) ; <nl> namespace <nl> test_loss_multiclass_per_pixel_activations_on_trivial_single_pixel_task ( ) ; <nl> test_loss_multiclass_per_pixel_outputs_on_trivial_task ( ) ; <nl> test_loss_multiclass_per_pixel_with_noise_and_pixels_to_ignore ( ) ; <nl> + test_loss_multiclass_per_pixel_weighted ( ) ; <nl> } <nl> <nl> void perform_test ( ) <nl> mmm a / setup . py <nl> ppp b / setup . py <nl> def build_dlib ( ) : <nl> # this checks the sysconfig and will correctly pick up a brewed python lib <nl> # e . g . in / usr / local / Cellar <nl> py_ver = get_python_version ( ) <nl> + # check : in some virtual environments the libpython has the form " libpython_ # m . dylib <nl> py_lib = os . path . join ( get_config_var ( ' LIBDIR ' ) , ' libpython ' + py_ver + ' . dylib ' ) <nl> + if not os . path . isfile ( py_lib ) : <nl> + py_lib = os . path . join ( get_config_var ( ' LIBDIR ' ) , ' libpython ' + py_ver + ' m . dylib ' ) <nl> + <nl> cmake_extra_arch + = [ ' - DPYTHON_LIBRARY = { lib } ' . format ( lib = py_lib ) ] <nl> <nl> if sys . platform = = " win32 " : <nl>
merged
davisking/dlib
ebdc064c61b446caf6db18b112d9e4ac5f0ca52d
2017-07-07T14:31:26Z
mmm a / src / bindings / csharp / openalpr - net / openalpr - net . cpp <nl> ppp b / src / bindings / csharp / openalpr - net / openalpr - net . cpp <nl> namespace openalprnet { <nl> <nl> ~ AlprMotionDetectionNet ( ) <nl> { <nl> - if ( this - > m_Disposed ) <nl> + if ( this - > m_disposed ) <nl> { <nl> return ; <nl> } <nl> <nl> this - > ! AlprMotionDetectionNet ( ) ; <nl> - this - > m_Disposed = true ; <nl> + this - > m_disposed = true ; <nl> } <nl> <nl> ! AlprMotionDetectionNet ( ) <nl> namespace openalprnet { <nl> <nl> private : <nl> MotionDetector * m_motionDetector ; <nl> - bool m_Disposed ; <nl> + bool m_disposed ; <nl> } ; <nl> <nl> <nl> namespace openalprnet { <nl> } <nl> } <nl> <nl> - property float minPlateSizeHeightPx { <nl> + property float MinPlateSizeHeightPx { <nl> float get ( ) <nl> { <nl> return this - > m_config - > minPlateSizeHeightPx ; <nl> namespace openalprnet { <nl> m_matches_template = plate . matches_template ; <nl> } <nl> <nl> - property System : : String ^ characters { <nl> + property System : : String ^ Characters { <nl> System : : String ^ get ( ) { <nl> return m_characters ; <nl> } <nl> } <nl> <nl> - property float overall_confidence { <nl> + property float OverallConfidence { <nl> float get ( ) { <nl> return m_overall_confidence ; <nl> } <nl> } <nl> <nl> - property bool matches_template { <nl> + property bool MatchesTemplate { <nl> bool get ( ) { <nl> return m_matches_template ; <nl> } <nl> namespace openalprnet { <nl> } <nl> } <nl> <nl> - property int requested_topn { <nl> + property int RequestedTopN { <nl> int get ( ) { <nl> return m_requested_topn ; <nl> } <nl> } <nl> <nl> - property int regionConfidence { <nl> + property int RegionConfidence { <nl> int get ( ) { <nl> return m_regionConfidence ; <nl> } <nl> } <nl> <nl> - property int plate_index { <nl> + property int PlateIndex { <nl> int get ( ) { <nl> return m_plate_index ; <nl> } <nl> } <nl> <nl> - property System : : String ^ region { <nl> + property System : : String ^ Region { <nl> System : : String ^ get ( ) { <nl> return m_region ; <nl> } <nl> } <nl> <nl> - property AlprPlateNet ^ bestPlate { <nl> + property AlprPlateNet ^ BestPlate { <nl> AlprPlateNet ^ get ( ) { <nl> return m_bestPlate ; <nl> } <nl> } <nl> <nl> - property List < System : : Drawing : : Point > ^ plate_points { <nl> + property List < System : : Drawing : : Point > ^ PlatePoints { <nl> List < System : : Drawing : : Point > ^ get ( ) { <nl> return m_plate_points ; <nl> } <nl> } <nl> <nl> - property List < AlprPlateNet ^ > ^ topNPlates { <nl> + property List < AlprPlateNet ^ > ^ TopNPlates { <nl> List < AlprPlateNet ^ > ^ get ( ) { <nl> return m_topNPlates ; <nl> } <nl> } <nl> <nl> - property float processing_time_ms { <nl> + property float ProcessingTimeMs { <nl> float get ( ) { <nl> return m_processing_time_ms ; <nl> } <nl> namespace openalprnet { <nl> m_json = AlprHelper : : ToManagedString ( json ) ; <nl> } <nl> <nl> - property long epoch_time { <nl> + property long EpochTime { <nl> long get ( ) { <nl> return m_epoch_time ; <nl> } <nl> } <nl> <nl> - property int img_width { <nl> + property int ImageWidth { <nl> int get ( ) { <nl> return m_img_width ; <nl> } <nl> } <nl> <nl> - property int img_height { <nl> + property int ImageHeight { <nl> int get ( ) { <nl> return m_img_height ; <nl> } <nl> } <nl> <nl> - property float total_processing_time_ms { <nl> + property float TotalProcessingTimeMs { <nl> float get ( ) { <nl> return m_total_processing_time_ms ; <nl> } <nl> } <nl> <nl> - property List < System : : Drawing : : Rectangle > ^ regionsOfInterest { <nl> + property List < System : : Drawing : : Rectangle > ^ RegionsOfInterest { <nl> List < System : : Drawing : : Rectangle > ^ get ( ) { <nl> return m_regionsOfInterest ; <nl> } <nl> } <nl> <nl> - property List < AlprPlateResultNet ^ > ^ plates { <nl> + property List < AlprPlateResultNet ^ > ^ Plates { <nl> List < AlprPlateResultNet ^ > ^ get ( ) { <nl> return m_plates ; <nl> } <nl> } <nl> <nl> - property System : : String ^ json { <nl> + property System : : String ^ Json { <nl> System : : String ^ get ( ) { <nl> return m_json ; <nl> } <nl> namespace openalprnet { <nl> } <nl> <nl> ~ AlprNet ( ) { <nl> - if ( this - > m_Disposed ) <nl> + if ( this - > m_disposed ) <nl> { <nl> return ; <nl> } <nl> <nl> this - > ! AlprNet ( ) ; <nl> - this - > m_Disposed = true ; <nl> + this - > m_disposed = true ; <nl> } <nl> <nl> property AlprConfigNet ^ Configuration { <nl> namespace openalprnet { <nl> / / / < summary > <nl> / / / Recognize from an image on disk <nl> / / / < / summary > <nl> - AlprResultsNet ^ recognize ( System : : String ^ filepath ) { <nl> + AlprResultsNet ^ Recognize ( System : : String ^ filepath ) { <nl> AlprResults results = m_Impl - > recognize ( marshal_as < std : : string > ( filepath ) ) ; <nl> return gcnew AlprResultsNet ( results ) ; <nl> } <nl> namespace openalprnet { <nl> / / / < summary > <nl> / / / Recognize from an image on disk <nl> / / / < / summary > <nl> - AlprResultsNet ^ recognize ( System : : String ^ filepath , List < System : : Drawing : : Rectangle > ^ regionsOfInterest ) { <nl> + AlprResultsNet ^ Recognize ( System : : String ^ filepath , List < System : : Drawing : : Rectangle > ^ regionsOfInterest ) { <nl> cv : : Mat frame = cv : : imread ( marshal_as < std : : string > ( filepath ) ) ; <nl> std : : vector < AlprRegionOfInterest > rois = AlprHelper : : ToVector ( regionsOfInterest ) ; <nl> AlprResults results = m_Impl - > recognize ( frame . data , frame . elemSize ( ) , frame . cols , frame . rows , rois ) ; <nl> namespace openalprnet { <nl> / / / < summary > <nl> / / / Recognize from a bitmap <nl> / / / < / summary > <nl> - AlprResultsNet ^ recognize ( Bitmap ^ bitmap ) <nl> + AlprResultsNet ^ Recognize ( Bitmap ^ bitmap ) <nl> { <nl> - return recognize ( bitmap , gcnew List < System : : Drawing : : Rectangle > ( ) ) ; <nl> + return Recognize ( bitmap , gcnew List < System : : Drawing : : Rectangle > ( ) ) ; <nl> } <nl> <nl> / / / < summary > <nl> / / / Recognize from a bitmap <nl> / / / < / summary > <nl> - AlprResultsNet ^ recognize ( Bitmap ^ bitmap , List < System : : Drawing : : Rectangle > ^ regionsOfInterest ) <nl> + AlprResultsNet ^ Recognize ( Bitmap ^ bitmap , List < System : : Drawing : : Rectangle > ^ regionsOfInterest ) <nl> { <nl> cv : : Mat frame = AlprHelper : : BitmapToMat ( bitmap ) ; <nl> std : : vector < AlprRegionOfInterest > rois = AlprHelper : : ToVector ( regionsOfInterest ) ; <nl> namespace openalprnet { <nl> / / / < summary > <nl> / / / Recognize from MemoryStream representing an encoded image ( e . g . , BMP , PNG , JPG , GIF etc ) . <nl> / / / < / summary > <nl> - AlprResultsNet ^ recognize ( MemoryStream ^ memoryStream ) <nl> + AlprResultsNet ^ Recognize ( MemoryStream ^ memoryStream ) <nl> { <nl> - return recognize ( memoryStream , gcnew List < System : : Drawing : : Rectangle > ( ) ) ; <nl> + return Recognize ( memoryStream , gcnew List < System : : Drawing : : Rectangle > ( ) ) ; <nl> } <nl> <nl> / / / < summary > <nl> / / / Recognize from MemoryStream representing an encoded image ( e . g . , BMP , PNG , JPG , GIF etc ) . <nl> / / / < / summary > <nl> - AlprResultsNet ^ recognize ( MemoryStream ^ memoryStream , List < System : : Drawing : : Rectangle > ^ regionsOfInterest ) <nl> + AlprResultsNet ^ Recognize ( MemoryStream ^ memoryStream , List < System : : Drawing : : Rectangle > ^ regionsOfInterest ) <nl> { <nl> std : : vector < char > p = AlprHelper : : MemoryStreamToVector ( memoryStream ) ; <nl> AlprResults results = m_Impl - > recognize ( p ) ; <nl> namespace openalprnet { <nl> / / / Recognize from byte data representing an encoded image ( e . g . , BMP , PNG , JPG , GIF etc ) . <nl> / / / < / summary > <nl> / / / < param name = " imageBuffer " > Bytes representing image data < / param > <nl> - AlprResultsNet ^ recognize ( cli : : array < Byte > ^ imageBuffer ) { <nl> - return recognize ( imageBuffer , gcnew List < System : : Drawing : : Rectangle > ( ) ) ; <nl> + AlprResultsNet ^ Recognize ( cli : : array < Byte > ^ imageBuffer ) { <nl> + return Recognize ( imageBuffer , gcnew List < System : : Drawing : : Rectangle > ( ) ) ; <nl> } <nl> <nl> / / / < summary > <nl> / / / Recognize from byte data representing an encoded image ( e . g . , BMP , PNG , JPG , GIF etc ) . <nl> / / / < / summary > <nl> / / / < param name = " imageBuffer " > Bytes representing image data < / param > <nl> - AlprResultsNet ^ recognize ( cli : : array < Byte > ^ imageBuffer , List < System : : Drawing : : Rectangle > ^ regionsOfInterest ) { <nl> + AlprResultsNet ^ Recognize ( cli : : array < Byte > ^ imageBuffer , List < System : : Drawing : : Rectangle > ^ regionsOfInterest ) { <nl> std : : vector < char > p = AlprHelper : : ToVector ( imageBuffer ) ; <nl> AlprResults results = m_Impl - > recognize ( p ) ; <nl> return gcnew AlprResultsNet ( results ) ; <nl> namespace openalprnet { <nl> / / / < summary > <nl> / / / Recognize from raw pixel data <nl> / / / < / summary > <nl> - AlprResultsNet ^ recognize ( cli : : array < Byte > ^ imageBuffer , int bytesPerPixel , int imgWidth , int imgHeight ) { <nl> - return recognize ( imageBuffer , bytesPerPixel , imgWidth , imgHeight , gcnew List < System : : Drawing : : Rectangle > ( ) ) ; <nl> + AlprResultsNet ^ Recognize ( cli : : array < Byte > ^ imageBuffer , int bytesPerPixel , int imgWidth , int imgHeight ) { <nl> + return Recognize ( imageBuffer , bytesPerPixel , imgWidth , imgHeight , gcnew List < System : : Drawing : : Rectangle > ( ) ) ; <nl> } <nl> <nl> / / / < summary > <nl> / / / Recognize from raw pixel data <nl> / / / < / summary > <nl> - AlprResultsNet ^ recognize ( cli : : array < Byte > ^ imageBuffer , int bytesPerPixel , int imgWidth , int imgHeight , List < System : : Drawing : : Rectangle > ^ regionsOfInterest ) { <nl> + AlprResultsNet ^ Recognize ( cli : : array < Byte > ^ imageBuffer , int bytesPerPixel , int imgWidth , int imgHeight , List < System : : Drawing : : Rectangle > ^ regionsOfInterest ) { <nl> unsigned char * p = AlprHelper : : ToCharPtr ( imageBuffer ) ; <nl> std : : vector < AlprRegionOfInterest > rois = AlprHelper : : ToVector ( regionsOfInterest ) ; <nl> AlprResults results = m_Impl - > recognize ( p , bytesPerPixel , imgWidth , imgHeight , rois ) ; <nl> namespace openalprnet { <nl> return gcnew AlprResultsNet ( results ) ; <nl> } <nl> <nl> - bool isLoaded ( ) { <nl> + bool IsLoaded ( ) { <nl> return m_Impl - > isLoaded ( ) ; <nl> } <nl> <nl> - static System : : String ^ getVersion ( ) { <nl> + static System : : String ^ GetVersion ( ) { <nl> return AlprHelper : : ToManagedString ( Alpr : : getVersion ( ) ) ; <nl> } <nl> <nl> namespace openalprnet { <nl> int m_topN ; <nl> bool m_detectRegion ; <nl> System : : String ^ m_defaultRegion ; <nl> - bool m_Disposed ; <nl> + bool m_disposed ; <nl> } ; <nl> } <nl> mmm a / src / bindings / csharp / openalprnet - cli / Program . cs <nl> ppp b / src / bindings / csharp / openalprnet - cli / Program . cs <nl> private static void Main ( string [ ] args ) <nl> val = > { if ( val . Any ( ) ) filename = val . First ( ) . Trim ( ) ; } ) <nl> ) ; <nl> <nl> - Console . WriteLine ( " OpenAlpr Version : { 0 } " , AlprNet . getVersion ( ) ) ; <nl> + Console . WriteLine ( " OpenAlpr Version : { 0 } " , AlprNet . GetVersion ( ) ) ; <nl> var config = Path . Combine ( AssemblyDirectory , " openalpr . conf " ) ; <nl> var runtime_data = Path . Combine ( AssemblyDirectory , " runtime_data " ) ; <nl> var alpr = new AlprNet ( region , config , runtime_data ) ; <nl> - if ( ! alpr . isLoaded ( ) ) <nl> + if ( ! alpr . IsLoaded ( ) ) <nl> { <nl> Console . WriteLine ( " OpenAlpr failed to load ! " ) ; <nl> return ; <nl> private static void PerformAlpr ( AlprNet alpr , string filename , bool benchmark , b <nl> private static void PerformAlpr ( AlprNet alpr , byte [ ] buffer , bool benchmark , bool writeJson ) <nl> { <nl> var sw = Stopwatch . StartNew ( ) ; <nl> - var results = alpr . recognize ( buffer ) ; <nl> + var results = alpr . Recognize ( buffer ) ; <nl> sw . Stop ( ) ; <nl> if ( benchmark ) <nl> { <nl> private static void PerformAlpr ( AlprNet alpr , byte [ ] buffer , bool benchmark , boo <nl> else <nl> { <nl> var i = 0 ; <nl> - foreach ( var result in results . plates ) <nl> + foreach ( var result in results . Plates ) <nl> { <nl> - Console . WriteLine ( " Plate { 0 } : { 1 } result ( s ) " , i + + , result . topNPlates . Count ) ; <nl> - Console . WriteLine ( " Processing Time : { 0 } msec ( s ) " , result . processing_time_ms ) ; <nl> - foreach ( var plate in result . topNPlates ) <nl> + Console . WriteLine ( " Plate { 0 } : { 1 } result ( s ) " , i + + , result . TopNPlates . Count ) ; <nl> + Console . WriteLine ( " Processing Time : { 0 } msec ( s ) " , result . ProcessingTimeMs ) ; <nl> + foreach ( var plate in result . TopNPlates ) <nl> { <nl> - Console . WriteLine ( " - { 0 } \ t Confidence : { 1 } \ tMatches Template : { 2 } " , plate . characters , <nl> - plate . overall_confidence , plate . matches_template ) ; <nl> + Console . WriteLine ( " - { 0 } \ t Confidence : { 1 } \ tMatches Template : { 2 } " , plate . Characters , <nl> + plate . OverallConfidence , plate . MatchesTemplate ) ; <nl> } <nl> } <nl> } <nl> mmm a / src / bindings / csharp / openalprnet - windemo / Form1 . cs <nl> ppp b / src / bindings / csharp / openalprnet - windemo / Form1 . cs <nl> <nl> using System . IO ; <nl> using System . Linq ; <nl> using System . Reflection ; <nl> + using System . Threading . Tasks ; <nl> using System . Windows . Forms ; <nl> using openalprnet ; <nl> <nl> private void processImageFile ( string fileName ) <nl> String runtime_data_dir = Path . Combine ( AssemblyDirectory , " runtime_data " ) ; <nl> using ( var alpr = new AlprNet ( region , config_file , runtime_data_dir ) ) <nl> { <nl> - if ( ! alpr . isLoaded ( ) ) <nl> + if ( ! alpr . IsLoaded ( ) ) <nl> { <nl> lbxPlates . Items . Add ( " Error initializing OpenALPR " ) ; <nl> return ; <nl> private void processImageFile ( string fileName ) <nl> picOriginal . ImageLocation = fileName ; <nl> picOriginal . Load ( ) ; <nl> <nl> - var results = alpr . recognize ( fileName ) ; <nl> + var motionDetection = new AlprMotionDetectionNet ( ) ; <nl> + var b = File . ReadAllBytes ( fileName ) ; <nl> + Parallel . For ( 0 , int . MaxValue , ( x ) = > <nl> + { <nl> + motionDetection . MotionDetect ( b ) ; <nl> + } ) ; <nl> + <nl> + var results = alpr . Recognize ( fileName ) ; <nl> <nl> - var images = new List < Image > ( results . plates . Count ( ) ) ; <nl> + var images = new List < Image > ( results . Plates . Count ( ) ) ; <nl> var i = 1 ; <nl> - foreach ( var result in results . plates ) <nl> + foreach ( var result in results . Plates ) <nl> { <nl> - var rect = boundingRectangle ( result . plate_points ) ; <nl> + var rect = boundingRectangle ( result . PlatePoints ) ; <nl> var img = Image . FromFile ( fileName ) ; <nl> var cropped = cropImage ( img , rect ) ; <nl> images . Add ( cropped ) ; <nl> <nl> lbxPlates . Items . Add ( " \ t \ t - - Plate # " + i + + + " - - " ) ; <nl> - foreach ( var plate in result . topNPlates ) <nl> + foreach ( var plate in result . TopNPlates ) <nl> { <nl> lbxPlates . Items . Add ( string . Format ( @ " { 0 } { 1 } % { 2 } " , <nl> - plate . characters . PadRight ( 12 ) , <nl> - plate . overall_confidence . ToString ( " N1 " ) . PadLeft ( 8 ) , <nl> - plate . matches_template . ToString ( ) . PadLeft ( 8 ) ) ) ; <nl> + plate . Characters . PadRight ( 12 ) , <nl> + plate . OverallConfidence . ToString ( " N1 " ) . PadLeft ( 8 ) , <nl> + plate . MatchesTemplate . ToString ( ) . PadLeft ( 8 ) ) ) ; <nl> } <nl> } <nl> <nl>
Capitalize variables / function in order to adhere to . NET naming convention .
openalpr/openalpr
c3fb3d950306af702915acbf8383f0e226b07381
2015-07-09T17:40:19Z
mmm a / include / swift / SIL / SILType . h <nl> ppp b / include / swift / SIL / SILType . h <nl> class SILType { <nl> / / / * NOTE * Only call on SILTypes for metatype types . <nl> SILType getMetatypeInstanceType ( ) const ; <nl> <nl> + / / / Returns true if this type is a native object type . <nl> + bool isBuiltinNativeObjectType ( ) const { <nl> + return isa < BuiltinNativeObjectType > ( getSwiftRValueType ( ) ) ; <nl> + } <nl> + <nl> + / / / Returns true if this type is a raw pointer type . <nl> + bool isBuiltinRawPointerType ( ) const { <nl> + return isa < BuiltinRawPointerType > ( getSwiftRValueType ( ) ) ; <nl> + } <nl> + <nl> + / / / Returns true if this SILType is any builtin type . <nl> + bool isBuiltinType ( ) const { <nl> + return isa < BuiltinType > ( getSwiftRValueType ( ) ) ; <nl> + } <nl> + <nl> + / / / Returns true if this SILType is an unknown object type . An unknown object <nl> + / / / type is a builtin opaque Objective - C pointer type . <nl> + bool isBuiltinUnknownObjectType ( ) const { <nl> + return isa < BuiltinUnknownObjectType > ( getSwiftRValueType ( ) ) ; <nl> + } <nl> + <nl> + / / / Returns true if this SILType is a builtin integer type . <nl> + bool isBuiltinIntegerType ( ) const { <nl> + return isa < BuiltinIntegerType > ( getSwiftRValueType ( ) ) ; <nl> + } <nl> + <nl> + / / / Returns true if this SILType is a builtin float type . <nl> + bool isBuiltinFloatType ( ) const { <nl> + return isa < BuiltinFloatType > ( getSwiftRValueType ( ) ) ; <nl> + } <nl> + <nl> + / / / Returns true if this SILType has references to unbound generic types . <nl> + bool isGenericType ( ) const ; <nl> + <nl> + / / / Returns true if this SILType is an aggregate that contains \ p Ty <nl> + bool aggregateContainsRecord ( SILType Ty , SILModule & SILMod ) const ; <nl> + <nl> / / <nl> / / Accessors for types used in SIL instructions : <nl> / / <nl> mmm a / lib / SIL / SILType . cpp <nl> ppp b / lib / SIL / SILType . cpp <nl> SILType SILType : : getMetatypeInstanceType ( ) const { <nl> <nl> return SILType : : getPrimitiveObjectType ( instanceType - > getCanonicalType ( ) ) ; <nl> } <nl> + <nl> + bool SILType : : isGenericType ( ) const { <nl> + return getSwiftRValueType ( ) . findIf ( [ ] ( Type type ) - > bool { <nl> + return isa < ArchetypeType > ( type . getPointer ( ) ) ; <nl> + } ) ; <nl> + } <nl> + <nl> + bool SILType : : aggregateContainsRecord ( SILType Record , SILModule & Mod ) const { <nl> + assert ( ! isGenericType ( ) & & " Agg should be proven to not be generic " <nl> + " before passed to this function . " ) ; <nl> + assert ( ! Record . isGenericType ( ) & & " Record should be proven to not be generic " <nl> + " before passed to this function . " ) ; <nl> + <nl> + llvm : : SmallVector < SILType , 8 > Worklist ; <nl> + Worklist . push_back ( * this ) ; <nl> + <nl> + / / For each " subrecord " of agg in the worklist . . . <nl> + while ( ! Worklist . empty ( ) ) { <nl> + SILType Ty = Worklist . pop_back_val ( ) ; <nl> + <nl> + / / If it is record , we succeeded . Return true . <nl> + if ( Ty = = Record ) <nl> + return true ; <nl> + <nl> + / / Otherwise , we gather up sub - records that need to be checked for <nl> + / / checking . . . First handle the tuple case . <nl> + if ( CanTupleType TT = Ty . getAs < TupleType > ( ) ) { <nl> + for ( unsigned i = 0 , e = TT - > getNumElements ( ) ; i ! = e ; + + i ) <nl> + Worklist . push_back ( Ty . getTupleElementType ( i ) ) ; <nl> + continue ; <nl> + } <nl> + <nl> + / / Then if we have an enum . . . <nl> + if ( EnumDecl * E = Ty . getEnumOrBoundGenericEnum ( ) ) { <nl> + for ( auto Elt : E - > getAllElements ( ) ) <nl> + if ( Elt - > hasArgumentType ( ) ) <nl> + Worklist . push_back ( Ty . getEnumElementType ( Elt , Mod ) ) ; <nl> + continue ; <nl> + } <nl> + <nl> + / / Then if we have a struct address . . . <nl> + if ( StructDecl * S = Ty . getStructOrBoundGenericStruct ( ) ) <nl> + for ( VarDecl * Var : S - > getStoredProperties ( ) ) <nl> + Worklist . push_back ( Ty . getFieldType ( Var , Mod ) ) ; <nl> + <nl> + / / If we have a class address , it is a pointer so it can not contain other <nl> + / / types . <nl> + <nl> + / / If we reached this point , then this type has no subrecords . Since it does <nl> + / / not equal our record , we can skip it . <nl> + } <nl> + <nl> + / / Could not find the record in the aggregate . <nl> + return false ; <nl> + } <nl>
[ sil - aa ] Add predicates to SILType that will be used in TBAA so that TBAA does not need to touch the AST .
apple/swift
9f5d430e933b0aa16ee338d689dd97ad4ee7ae78
2014-05-16T00:21:41Z
mmm a / Code / Sandbox / EditorQt / AssetSystem / LegacyAssetTypes . h <nl> ppp b / Code / Sandbox / EditorQt / AssetSystem / LegacyAssetTypes . h <nl> class CScriptType : public CAssetType <nl> public : <nl> DECLARE_ASSET_TYPE_DESC ( CScriptType ) ; <nl> <nl> - virtual const char * GetTypeName ( ) const override { return " Script " ; } <nl> - virtual const char * GetUiTypeName ( ) const override { return QT_TR_NOOP ( " Script " ) ; } <nl> - virtual const char * GetFileExtension ( ) const override { return " lua " ; } <nl> - virtual bool IsImported ( ) const override { return false ; } <nl> - virtual bool CanBeEdited ( ) const override { return false ; } <nl> - virtual QColor GetThumbnailColor ( ) const override { return QColor ( 201 , 96 , 191 ) ; } <nl> + virtual const char * GetTypeName ( ) const override { return " Script " ; } <nl> + virtual const char * GetUiTypeName ( ) const override { return QT_TR_NOOP ( " Script " ) ; } <nl> + virtual const char * GetFileExtension ( ) const override { return " lua " ; } <nl> + virtual bool IsImported ( ) const override { return false ; } <nl> + virtual bool CanBeEdited ( ) const override { return false ; } <nl> + virtual bool CanAutoRepairMetadata ( ) const override { return false ; } <nl> + virtual QColor GetThumbnailColor ( ) const override { return QColor ( 201 , 96 , 191 ) ; } <nl> <nl> private : <nl> virtual CryIcon GetIconInternal ( ) const override <nl> class CXmlType : public CAssetType <nl> public : <nl> DECLARE_ASSET_TYPE_DESC ( CXmlType ) ; <nl> <nl> - virtual const char * GetTypeName ( ) const override { return " Xml " ; } <nl> - virtual const char * GetUiTypeName ( ) const override { return QT_TR_NOOP ( " Xml " ) ; } <nl> - virtual const char * GetFileExtension ( ) const override { return " xml " ; } <nl> - virtual bool IsImported ( ) const override { return false ; } <nl> - virtual bool CanBeEdited ( ) const override { return true ; } <nl> - virtual QColor GetThumbnailColor ( ) const override { return QColor ( 201 , 96 , 191 ) ; } <nl> + virtual const char * GetTypeName ( ) const override { return " Xml " ; } <nl> + virtual const char * GetUiTypeName ( ) const override { return QT_TR_NOOP ( " Xml " ) ; } <nl> + virtual const char * GetFileExtension ( ) const override { return " xml " ; } <nl> + virtual bool IsImported ( ) const override { return false ; } <nl> + virtual bool CanBeEdited ( ) const override { return true ; } <nl> + virtual bool CanAutoRepairMetadata ( ) const override { return false ; } <nl> + virtual QColor GetThumbnailColor ( ) const override { return QColor ( 201 , 96 , 191 ) ; } <nl> <nl> private : <nl> virtual CryIcon GetIconInternal ( ) const override <nl> class CMeshAnimationType : public CAssetType <nl> return CryIcon ( " icons : common / assets_animation . ico " ) ; <nl> } <nl> } ; <nl> - <nl> mmm a / Code / Sandbox / EditorQt / LevelEditor / LevelAssetType . h <nl> ppp b / Code / Sandbox / EditorQt / LevelEditor / LevelAssetType . h <nl> class CLevelType : public CAssetType <nl> public : <nl> DECLARE_ASSET_TYPE_DESC ( CLevelType ) <nl> <nl> - virtual const char * GetTypeName ( ) const override { return " Level " ; } <nl> - virtual const char * GetUiTypeName ( ) const override { return QT_TR_NOOP ( " Level " ) ; } <nl> - virtual const char * GetFileExtension ( ) const override { return GetFileExtensionStatic ( ) ; } <nl> - virtual bool IsImported ( ) const override { return false ; } <nl> - virtual bool CanBeCreated ( ) const override { return true ; } <nl> - virtual bool CanBeEdited ( ) const override { return true ; } <nl> - virtual QColor GetThumbnailColor ( ) const override { return QColor ( 230 , 230 , 230 ) ; } <nl> + virtual const char * GetTypeName ( ) const override { return " Level " ; } <nl> + virtual const char * GetUiTypeName ( ) const override { return QT_TR_NOOP ( " Level " ) ; } <nl> + virtual const char * GetFileExtension ( ) const override { return GetFileExtensionStatic ( ) ; } <nl> + virtual bool IsImported ( ) const override { return false ; } <nl> + virtual bool CanBeCreated ( ) const override { return true ; } <nl> + virtual bool CanBeEdited ( ) const override { return true ; } <nl> + virtual bool CanAutoRepairMetadata ( ) const override { return false ; } / / ! Levels is a special case when the cryasset is next to the level folder . <nl> + virtual QColor GetThumbnailColor ( ) const override { return QColor ( 230 , 230 , 230 ) ; } <nl> <nl> virtual CAssetEditor * Edit ( CAsset * pAsset ) const override ; <nl> virtual bool DeleteAssetFiles ( const CAsset & asset , bool bDeleteSourceFile , size_t & numberOfFilesDeleted ) const override ; <nl> class CLevelType : public CAssetType <nl> protected : <nl> / / ! \ sa CLevelType : : SCreateParams <nl> virtual bool OnCreate ( INewAsset & asset , const void * pTypeSpecificParameter ) const override ; <nl> - static void UpdateDependencies ( IEditableAsset & editAsset ) ; <nl> + static void UpdateDependencies ( IEditableAsset & editAsset ) ; <nl> <nl> protected : <nl> mutable std : : future < bool > m_asyncAction ; <nl> mmm a / Code / Sandbox / Plugins / EditorCommon / AssetSystem / AssetGenerator . cpp <nl> ppp b / Code / Sandbox / Plugins / EditorCommon / AssetSystem / AssetGenerator . cpp <nl> CAssetGenerator : : CAssetGenerator ( ) <nl> m_rcSettings . Append ( " / overwriteextension = cryasset / assettypes = \ " " ) ; <nl> for ( CAssetType * pType : types ) <nl> { <nl> - / / Ignore fallback asset type . <nl> - if ( strcmp ( pType - > GetTypeName ( ) , " cryasset " ) = = 0 ) <nl> - { <nl> - continue ; <nl> - } <nl> - <nl> - / / Ignore deprecated asset types . <nl> - if ( strcmp ( pType - > GetTypeName ( ) , " Xml " ) = = 0 | | strcmp ( pType - > GetTypeName ( ) , " Script " ) = = 0 ) <nl> - { <nl> - continue ; <nl> - } <nl> - <nl> - / / Ignore levels , since this is a special case when the cryasset is next to the level folder . <nl> - if ( strcmp ( pType - > GetTypeName ( ) , " Level " ) = = 0 ) <nl> + if ( ! pType - > CanAutoRepairMetadata ( ) ) <nl> { <nl> continue ; <nl> } <nl> void CAssetGenerator : : GenerateCryasset ( const string & filePath ) <nl> <nl> if ( GetISystem ( ) - > GetIPak ( ) - > IsFileExist ( filePath ) ) <nl> { <nl> + / / TODO : Move the implementation to a virtual function of CAssetType . Thus , each asset would override the default implementation . <nl> + <nl> CResourceCompilerHelper : : CallResourceCompiler ( <nl> filePath . c_str ( ) , <nl> m_rcSettings . c_str ( ) , <nl> mmm a / Code / Sandbox / Plugins / EditorCommon / AssetSystem / AssetType . cpp <nl> ppp b / Code / Sandbox / Plugins / EditorCommon / AssetSystem / AssetType . cpp <nl> bool CAssetType : : CopyAsset ( CAsset * pAsset , const char * szNewPath ) const <nl> return true ; <nl> } <nl> <nl> - / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - <nl> / / Fallback asset type if the actual type is not registered . <nl> class CCryAssetType : public CAssetType <nl> { <nl> public : <nl> DECLARE_ASSET_TYPE_DESC ( CCryAssetType ) ; <nl> <nl> - virtual const char * GetTypeName ( ) const override { return " cryasset " ; } <nl> - virtual const char * GetUiTypeName ( ) const override { return " cryasset " ; } <nl> - virtual const char * GetFileExtension ( ) const override { return " unregistered " ; } <nl> - virtual bool IsImported ( ) const override { return false ; } <nl> - virtual bool CanBeEdited ( ) const override { return false ; } <nl> + virtual const char * GetTypeName ( ) const override { return " cryasset " ; } <nl> + virtual const char * GetUiTypeName ( ) const override { return " cryasset " ; } <nl> + virtual const char * GetFileExtension ( ) const override { return " unregistered " ; } <nl> + virtual bool IsImported ( ) const override { return false ; } <nl> + virtual bool CanBeEdited ( ) const override { return false ; } <nl> + virtual bool CanAutoRepairMetadata ( ) const override { return false ; } <nl> <nl> / / Cannot be picked <nl> virtual bool IsUsingGenericPropertyTreePicker ( ) const override { return false ; } <nl> mmm a / Code / Sandbox / Plugins / EditorCommon / AssetSystem / AssetType . h <nl> ppp b / Code / Sandbox / Plugins / EditorCommon / AssetSystem / AssetType . h <nl> class EDITOR_COMMON_API CAssetType : public IClassDesc <nl> / / ! Assign the editor as the instant editor . <nl> void SetInstantEditor ( CAssetEditor * pEditor ) ; <nl> <nl> + / / ! Returns true if the asset type supports automatic generation and repairing missing or broken * . cryasset file . <nl> + / / ! The default implementation returns true . <nl> + / / ! \ sa CAssetGenerator : : GenerateCryasset <nl> + virtual bool CanAutoRepairMetadata ( ) const { return true ; } <nl> + <nl> protected : <nl> / / ! Helper function that parses a string and returns a variant of a type corresponding to \ p pAttrib - > GetType ( ) . <nl> / / ! If conversion fails , a default - constructed varient of that type is returned . ( see QVariant : : value ) . <nl> mmm a / Code / Sandbox / Plugins / EditorSubstance / AssetTypes / SubstanceArchive . h <nl> ppp b / Code / Sandbox / Plugins / EditorSubstance / AssetTypes / SubstanceArchive . h <nl> class CSubstanceArchiveType : public CAssetType <nl> <nl> } ; <nl> <nl> - virtual const char * GetTypeName ( ) const override { return " SubstanceDefinition " ; } <nl> - virtual const char * GetUiTypeName ( ) const override { return QT_TR_NOOP ( " Substance Archive " ) ; } <nl> - virtual bool IsImported ( ) const override { return true ; } <nl> - virtual bool CanBeEdited ( ) const override { return true ; } <nl> + virtual const char * GetTypeName ( ) const override { return " SubstanceDefinition " ; } <nl> + virtual const char * GetUiTypeName ( ) const override { return QT_TR_NOOP ( " Substance Archive " ) ; } <nl> + virtual bool IsImported ( ) const override { return true ; } <nl> + virtual bool CanBeEdited ( ) const override { return true ; } <nl> + virtual bool CanAutoRepairMetadata ( ) const override { return false ; } / / ! The metadata file has built - in import options that can not be restored . <nl> virtual CryIcon GetIcon ( ) const override ; <nl> - virtual bool HasThumbnail ( ) const override { return false ; } <nl> - virtual const char * GetFileExtension ( ) const override { return " sbsar " ; } <nl> - virtual QColor GetThumbnailColor ( ) const override { return QColor ( 79 , 187 , 185 ) ; } <nl> + virtual bool HasThumbnail ( ) const override { return false ; } <nl> + virtual const char * GetFileExtension ( ) const override { return " sbsar " ; } <nl> + virtual QColor GetThumbnailColor ( ) const override { return QColor ( 79 , 187 , 185 ) ; } <nl> <nl> virtual void AppendContextMenuActions ( const std : : vector < CAsset * > & assets , CAbstractMenu * menu ) const override ; <nl> virtual CAssetEditor * Edit ( CAsset * asset ) const override ; <nl> mmm a / Code / Sandbox / Plugins / EditorSubstance / AssetTypes / SubstanceInstance . h <nl> ppp b / Code / Sandbox / Plugins / EditorSubstance / AssetTypes / SubstanceInstance . h <nl> class CSubstanceInstanceType : public CAssetType <nl> public : <nl> DECLARE_ASSET_TYPE_DESC ( CSubstanceInstanceType ) ; <nl> <nl> - virtual const char * GetTypeName ( ) const override { return " SubstanceInstance " ; } <nl> - virtual const char * GetUiTypeName ( ) const override { return QT_TR_NOOP ( " Substance Instance " ) ; } <nl> - virtual bool IsImported ( ) const override { return false ; } <nl> - virtual bool CanBeEdited ( ) const override { return true ; } <nl> + virtual const char * GetTypeName ( ) const override { return " SubstanceInstance " ; } <nl> + virtual const char * GetUiTypeName ( ) const override { return QT_TR_NOOP ( " Substance Instance " ) ; } <nl> + virtual bool IsImported ( ) const override { return false ; } <nl> + virtual bool CanBeEdited ( ) const override { return true ; } <nl> + virtual bool CanAutoRepairMetadata ( ) const override { return false ; } / / ! The metadata file has built - in import options that can not be restored . <nl> virtual CryIcon GetIcon ( ) const override ; <nl> - virtual bool HasThumbnail ( ) const override { return false ; } <nl> - virtual QColor GetThumbnailColor ( ) const override { return QColor ( 79 , 187 , 185 ) ; } <nl> - virtual const char * GetFileExtension ( ) const override { return " crysub " ; } <nl> + virtual bool HasThumbnail ( ) const override { return false ; } <nl> + virtual QColor GetThumbnailColor ( ) const override { return QColor ( 79 , 187 , 185 ) ; } <nl> + virtual const char * GetFileExtension ( ) const override { return " crysub " ; } <nl> <nl> virtual CAssetEditor * Edit ( CAsset * asset ) const override ; <nl> virtual void AppendContextMenuActions ( const std : : vector < CAsset * > & assets , CAbstractMenu * menu ) const override ; <nl> new file mode 100644 <nl> index 0000000000 . . 3baa01c215 <nl> mmm / dev / null <nl> ppp b / Code / Sandbox / Plugins / PrefabAssetType / PrefabAssetType . h <nl> <nl> + / / Copyright 2001 - 2018 Crytek GmbH / Crytek Group . All rights reserved . <nl> + <nl> + # pragma once <nl> + <nl> + # include < AssetSystem / AssetType . h > <nl> + <nl> + / / ! Prefabs are groups of objects that can be placed in the level as instances . <nl> + class CPrefabAssetType : public CAssetType <nl> + { <nl> + public : <nl> + DECLARE_ASSET_TYPE_DESC ( CPrefabAssetType ) ; <nl> + <nl> + virtual const char * GetTypeName ( ) const { return " Prefab " ; } <nl> + virtual const char * GetUiTypeName ( ) const { return " Prefab " ; } <nl> + virtual const char * GetFileExtension ( ) const { return " prefab " ; } <nl> + virtual QColor GetThumbnailColor ( ) const override { return QColor ( 179 , 179 , 179 ) ; } <nl> + virtual bool CanBeCreated ( ) const { return true ; } <nl> + virtual bool CanBeEdited ( ) const { return false ; } <nl> + virtual bool CanAutoRepairMetadata ( ) const override { return false ; } / / ! Prefab assets use unique guids that can not be automatically restored . <nl> + virtual const char * GetObjectClassName ( ) const { return " Prefab " ; } <nl> + virtual string GetObjectFilePath ( const CAsset * pAsset ) const ; <nl> + <nl> + private : <nl> + <nl> + / / ! Initializes the asset . Creates all the necessary asset files . <nl> + / / ! \ param pCreateParams Points to an instance of ISelectionGroup or nullptr . <nl> + / / ! \ sa ISelectionGroup <nl> + virtual bool OnCreate ( INewAsset & asset , const void * pCreateParams ) const override ; <nl> + virtual CryIcon GetIconInternal ( ) const override ; <nl> + } ; <nl>
! B ( CE - 17460 ) ( Sandbox ) Unable to " create new instance " via substance asset context menu
CRYTEK/CRYENGINE
8bb80fc4f8c99317909f67d3333997515643f392
2018-07-13T12:27:09Z
mmm a / utils / buildbot - release - notes . txt <nl> ppp b / utils / buildbot - release - notes . txt <nl> <nl> Latest <nl> mmmmmmmmm - <nl> <nl> + * NSArray * parameters and result types of Objective - C APIs are now <nl> + imported as AnyObject [ ] ! , i . e . , an implicitly unwrapped optional <nl> + array storing AnyObject values . For example , NSView ' s constraints <nl> + property <nl> + <nl> + @ property ( readonly ) NSArray * constraints ; <nl> + <nl> + is now imported as <nl> + <nl> + var constraints : AnyObject [ ] ! <nl> + <nl> + Note that one can implicitly convert between an AnyObject [ ] and an <nl> + NSArray ( in both directions ) , so ( for example ) one can still <nl> + explicitly use NSArray if desired : <nl> + <nl> + var array : NSArray = view . constraints <nl> + <nl> + Swift arrays bridge to NSArray similarly to the way Swift <nl> + strings bridge to NSString . <nl> + <nl> * UnsafePointer ( and ObjCMutablePointer ) " set ( ) " and " get ( ) " <nl> have been replaced with a property called " pointee " . <nl> <nl>
Release notes for the NSArray * - > AnyObject [ ] ! import .
apple/swift
2488ba07468d3458c67090cb58065e9868945d53
2014-05-16T15:00:12Z
mmm a / tensorflow / contrib / data / python / ops / readers . py <nl> ppp b / tensorflow / contrib / data / python / ops / readers . py <nl> def read_batch_features ( file_pattern , <nl> dataset = dataset . shuffle ( capacity ) <nl> dataset = dataset . batch ( batch_size ) <nl> dataset = dataset . map ( lambda x : parsing_ops . parse_example ( x , features ) ) <nl> + dataset = dataset . prefetch ( 1 ) <nl> iterator = dataset . make_one_shot_iterator ( ) <nl> outputs = iterator . get_next ( ) <nl> return outputs <nl>
Add a single capacity prefetch to ` tf . contrib . data . read_batch_features ` .
tensorflow/tensorflow
0946c24012c9e26670d2e29679df13bf6f002fa8
2017-12-04T23:30:14Z
mmm a / bitcoin - qt . pro <nl> ppp b / bitcoin - qt . pro <nl> TEMPLATE = app <nl> TARGET = <nl> VERSION = 0 . 6 . 99 <nl> INCLUDEPATH + = src src / json src / qt <nl> - DEFINES + = QT_GUI BOOST_THREAD_USE_LIB <nl> + DEFINES + = QT_GUI BOOST_THREAD_USE_LIB USE_IPV6 <nl> CONFIG + = no_include_pwd <nl> <nl> # for boost 1 . 37 , add - mt to the boost libraries <nl> mmm a / doc / build - osx . txt <nl> ppp b / doc / build - osx . txt <nl> sudo port install qrencode <nl> 4 . Now you should be able to build bitcoind : <nl> <nl> cd bitcoin / src <nl> - make - f makefile . osx <nl> + make - f makefile . osx USE_IPV6 = 1 <nl> <nl> Run : <nl> . / bitcoind - - help # for a list of command - line options . <nl> mmm a / doc / build - unix . txt <nl> ppp b / doc / build - unix . txt <nl> your package manager . Set USE_QRCODE to control this : <nl> USE_QRCODE = 0 ( the default ) No QRCode support - libarcode not required <nl> USE_QRCODE = 1 QRCode support enabled <nl> <nl> + IPv6 support may be enabled by setting <nl> + USE_IPV6 = 1 Enable IPv6 support <nl> + <nl> Licenses of statically linked libraries : <nl> Berkeley DB New BSD license with additional requirement that linked <nl> software must be free open source <nl> emerge - av1 - - noreplace boost glib openssl sys - libs / db : 4 . 8 <nl> <nl> Take the following steps to build ( no UPnP support ) : <nl> cd $ { BITCOIN_DIR } / src <nl> - make - f makefile . unix USE_UPNP = BDB_INCLUDE_PATH = ' / usr / include / db4 . 8 ' <nl> + make - f makefile . unix USE_UPNP = USE_IPV6 = 1 BDB_INCLUDE_PATH = ' / usr / include / db4 . 8 ' <nl> strip bitcoind <nl> <nl> <nl> mmm a / src / init . cpp <nl> ppp b / src / init . cpp <nl> bool AppInit ( int argc , char * argv [ ] ) <nl> return fRet ; <nl> } <nl> <nl> + bool static Bind ( const CService & addr ) { <nl> + if ( IsLimited ( addr ) ) <nl> + return false ; <nl> + std : : string strError ; <nl> + if ( ! BindListenPort ( addr , strError ) ) <nl> + { <nl> + ThreadSafeMessageBox ( strError , _ ( " Bitcoin " ) , wxOK | wxMODAL ) ; <nl> + return false ; <nl> + } <nl> + return true ; <nl> + } <nl> + <nl> bool AppInit2 ( int argc , char * argv [ ] ) <nl> { <nl> # ifdef _MSC_VER <nl> bool AppInit2 ( int argc , char * argv [ ] ) <nl> " - timeout = < n > \ t " + _ ( " Specify connection timeout ( in milliseconds ) " ) + " \ n " + <nl> " - proxy = < ip : port > \ t " + _ ( " Connect through socks proxy " ) + " \ n " + <nl> " - socks = < n > \ t " + _ ( " Select the version of socks proxy to use ( 4 or 5 , 5 is default ) " ) + " \ n " + <nl> + " - noproxy = < net > \ t " + _ ( " Do not use proxy for connections to network net ( ipv4 or ipv6 ) " ) + " \ n " + <nl> " - dns \ t " + _ ( " Allow DNS lookups for - addnode , - seednode and - connect " ) + " \ n " + <nl> " - proxydns \ t " + _ ( " Pass DNS requests to ( SOCKS5 ) proxy " ) + " \ n " + <nl> " - port = < port > \ t \ t " + _ ( " Listen for connections on < port > ( default : 8333 or testnet : 18333 ) " ) + " \ n " + <nl> bool AppInit2 ( int argc , char * argv [ ] ) <nl> " - connect = < ip > \ t \ t " + _ ( " Connect only to the specified node " ) + " \ n " + <nl> " - seednode = < ip > \ t \ t " + _ ( " Connect to a node to retrieve peer addresses , and disconnect " ) + " \ n " + <nl> " - externalip = < ip > \ t " + _ ( " Specify your own public address " ) + " \ n " + <nl> + " - blocknet = < net > \ t " + _ ( " Do not connect to addresses in network net ( ipv4 , ipv6 ) " ) + " \ n " + <nl> " - discover \ t " + _ ( " Try to discover public IP address ( default : 1 ) " ) + " \ n " + <nl> " - irc \ t " + _ ( " Find peers using internet relay chat ( default : 0 ) " ) + " \ n " + <nl> " - listen \ t " + _ ( " Accept connections from outside ( default : 1 ) " ) + " \ n " + <nl> + " - bind = < addr > \ t " + _ ( " Bind to given address . Use [ host ] : port notation for IPv6 " ) + " \ n " + <nl> # ifdef QT_GUI <nl> " - lang = < lang > \ t \ t " + _ ( " Set language , for example \ " de_DE \ " ( default : system locale ) " ) + " \ n " + <nl> # endif <nl> bool AppInit2 ( int argc , char * argv [ ] ) <nl> } <nl> } <nl> <nl> + if ( mapArgs . count ( " - noproxy " ) ) <nl> + { <nl> + BOOST_FOREACH ( std : : string snet , mapMultiArgs [ " - noproxy " ] ) { <nl> + enum Network net = ParseNetwork ( snet ) ; <nl> + if ( net = = NET_UNROUTABLE ) { <nl> + ThreadSafeMessageBox ( _ ( " Unknown network specified in - noproxy " ) , _ ( " Bitcoin " ) , wxOK | wxMODAL ) ; <nl> + return false ; <nl> + } <nl> + SetNoProxy ( net ) ; <nl> + } <nl> + } <nl> + <nl> if ( mapArgs . count ( " - connect " ) ) <nl> SoftSetBoolArg ( " - dnsseed " , false ) ; <nl> - <nl> + <nl> + / / even in Tor mode , if - bind is specified , you really want - listen <nl> + if ( mapArgs . count ( " - bind " ) ) <nl> + SoftSetBoolArg ( " - listen " , true ) ; <nl> + <nl> bool fTor = ( fUseProxy & & addrProxy . GetPort ( ) = = 9050 ) ; <nl> if ( fTor ) <nl> { <nl> bool AppInit2 ( int argc , char * argv [ ] ) <nl> SoftSetBoolArg ( " - discover " , false ) ; <nl> } <nl> <nl> + if ( mapArgs . count ( " - blocknet " ) ) { <nl> + BOOST_FOREACH ( std : : string snet , mapMultiArgs [ " - blocknet " ] ) { <nl> + enum Network net = ParseNetwork ( snet ) ; <nl> + if ( net = = NET_UNROUTABLE ) { <nl> + ThreadSafeMessageBox ( _ ( " Unknown network specified in - blocknet " ) , _ ( " Bitcoin " ) , wxOK | wxMODAL ) ; <nl> + return false ; <nl> + } <nl> + SetLimited ( net ) ; <nl> + } <nl> + } <nl> + <nl> fNameLookup = GetBoolArg ( " - dns " ) ; <nl> fProxyNameLookup = GetBoolArg ( " - proxydns " ) ; <nl> if ( fProxyNameLookup ) <nl> bool AppInit2 ( int argc , char * argv [ ] ) <nl> const char * pszP2SH = " / P2SH / " ; <nl> COINBASE_FLAGS < < std : : vector < unsigned char > ( pszP2SH , pszP2SH + strlen ( pszP2SH ) ) ; <nl> <nl> + bool fBound = false ; <nl> if ( ! fNoListen ) <nl> { <nl> std : : string strError ; <nl> - if ( ! BindListenPort ( strError ) ) <nl> - { <nl> - ThreadSafeMessageBox ( strError , _ ( " Bitcoin " ) , wxOK | wxMODAL ) ; <nl> - return false ; <nl> + if ( mapArgs . count ( " - bind " ) ) { <nl> + BOOST_FOREACH ( std : : string strBind , mapMultiArgs [ " - bind " ] ) { <nl> + fBound | = Bind ( CService ( strBind , GetDefaultPort ( ) , false ) ) ; <nl> + } <nl> + } else { <nl> + struct in_addr inaddr_any = { s_addr : INADDR_ANY } ; <nl> + fBound | = Bind ( CService ( inaddr_any , GetDefaultPort ( ) ) ) ; <nl> + # ifdef USE_IPV6 <nl> + fBound | = Bind ( CService ( in6addr_any , GetDefaultPort ( ) ) ) ; <nl> + # endif <nl> } <nl> + if ( ! fBound ) <nl> + return false ; <nl> } <nl> <nl> if ( mapArgs . count ( " - externalip " ) ) <nl> mmm a / src / irc . cpp <nl> ppp b / src / irc . cpp <nl> void ThreadIRCSeed2 ( void * parg ) <nl> return ; <nl> } <nl> <nl> - CNetAddr addrLocal ; <nl> + CService addrLocal ; <nl> string strMyName ; <nl> if ( GetLocal ( addrLocal , & addrConnect ) ) <nl> strMyName = EncodeAddress ( GetLocalAddress ( & addrConnect ) ) ; <nl> mmm a / src / main . cpp <nl> ppp b / src / main . cpp <nl> bool static ProcessMessage ( CNode * pfrom , string strCommand , CDataStream & vRecv ) <nl> } <nl> <nl> / / Store the new addresses <nl> + vector < CAddress > vAddrOk ; <nl> int64 nNow = GetAdjustedTime ( ) ; <nl> int64 nSince = nNow - 10 * 60 ; <nl> BOOST_FOREACH ( CAddress & addr , vAddr ) <nl> { <nl> if ( fShutdown ) <nl> return true ; <nl> - / / ignore IPv6 for now , since it isn ' t implemented anyway <nl> - if ( ! addr . IsIPv4 ( ) ) <nl> - continue ; <nl> if ( addr . nTime < = 100000000 | | addr . nTime > nNow + 10 * 60 ) <nl> addr . nTime = nNow - 5 * 24 * 60 * 60 ; <nl> pfrom - > AddAddressKnown ( addr ) ; <nl> + bool fReachable = IsReachable ( addr ) ; <nl> if ( addr . nTime > nSince & & ! pfrom - > fGetAddr & & vAddr . size ( ) < = 10 & & addr . IsRoutable ( ) ) <nl> { <nl> / / Relay to a limited number of other nodes <nl> bool static ProcessMessage ( CNode * pfrom , string strCommand , CDataStream & vRecv ) <nl> hashKey = Hash ( BEGIN ( hashKey ) , END ( hashKey ) ) ; <nl> mapMix . insert ( make_pair ( hashKey , pnode ) ) ; <nl> } <nl> - int nRelayNodes = 2 ; <nl> + int nRelayNodes = fReachable ? 2 : 1 ; / / limited relaying of addresses outside our network ( s ) <nl> for ( multimap < uint256 , CNode * > : : iterator mi = mapMix . begin ( ) ; mi ! = mapMix . end ( ) & & nRelayNodes - - > 0 ; + + mi ) <nl> ( ( * mi ) . second ) - > PushAddress ( addr ) ; <nl> } <nl> } <nl> + / / Do not store addresses outside our network <nl> + if ( fReachable ) <nl> + vAddrOk . push_back ( addr ) ; <nl> } <nl> - addrman . Add ( vAddr , pfrom - > addr , 2 * 60 * 60 ) ; <nl> + addrman . Add ( vAddrOk , pfrom - > addr , 2 * 60 * 60 ) ; <nl> if ( vAddr . size ( ) < 1000 ) <nl> pfrom - > fGetAddr = false ; <nl> if ( pfrom - > fOneShot ) <nl> mmm a / src / makefile . linux - mingw <nl> ppp b / src / makefile . linux - mingw <nl> LIBS = \ <nl> - l ssl \ <nl> - l crypto <nl> <nl> - DEFS = - D_MT - DWIN32 - D_WINDOWS - DBOOST_THREAD_USE_LIB <nl> + DEFS = - D_MT - DWIN32 - D_WINDOWS - DBOOST_THREAD_USE_LIB - DUSE_IPV6 <nl> DEBUGFLAGS = - g <nl> CFLAGS = - O2 - w - Wall - Wextra - Wformat - Wformat - security - Wno - unused - parameter $ ( DEBUGFLAGS ) $ ( DEFS ) $ ( INCLUDEPATHS ) <nl> <nl> mmm a / src / makefile . mingw <nl> ppp b / src / makefile . mingw <nl> LIBS = \ <nl> - l ssl \ <nl> - l crypto <nl> <nl> - DEFS = - DWIN32 - D_WINDOWS - DBOOST_THREAD_USE_LIB <nl> + DEFS = - DWIN32 - D_WINDOWS - DBOOST_THREAD_USE_LIB - DUSE_IPV6 <nl> DEBUGFLAGS = - g <nl> CFLAGS = - mthreads - O2 - w - Wall - Wextra - Wformat - Wformat - security - Wno - unused - parameter $ ( DEBUGFLAGS ) $ ( DEFS ) $ ( INCLUDEPATHS ) <nl> <nl> mmm a / src / makefile . osx <nl> ppp b / src / makefile . osx <nl> LIBS + = \ <nl> TESTDEFS + = - DBOOST_TEST_DYN_LINK <nl> endif <nl> <nl> - DEFS = - DMAC_OSX - DMSG_NOSIGNAL = 0 <nl> + DEFS = - DMAC_OSX - DMSG_NOSIGNAL = 0 - DUSE_IPV6 <nl> <nl> ifdef RELEASE <nl> # Compile for maximum compatibility and smallest size . <nl> mmm a / src / makefile . unix <nl> ppp b / src / makefile . unix <nl> <nl> <nl> USE_UPNP : = 0 <nl> <nl> - DEFS = <nl> + DEFS = - DUSE_IPV6 <nl> <nl> DEFS + = $ ( addprefix - I , $ ( CURDIR ) $ ( CURDIR ) / obj $ ( BOOST_INCLUDE_PATH ) $ ( BDB_INCLUDE_PATH ) $ ( OPENSSL_INCLUDE_PATH ) ) <nl> LIBS = $ ( addprefix - L , $ ( BOOST_LIB_PATH ) $ ( BDB_LIB_PATH ) $ ( OPENSSL_LIB_PATH ) ) <nl> mmm a / src / net . cpp <nl> ppp b / src / net . cpp <nl> bool OpenNetworkConnection ( const CAddress & addrConnect , CSemaphoreGrant * grantOu <nl> bool fClient = false ; <nl> static bool fUseUPnP = false ; <nl> uint64 nLocalServices = ( fClient ? 0 : NODE_NETWORK ) ; <nl> - CCriticalSection cs_mapLocalHost ; <nl> - map < CNetAddr , int > mapLocalHost ; <nl> + static CCriticalSection cs_mapLocalHost ; <nl> + static map < CService , int > mapLocalHost ; <nl> + static bool vfReachable [ NET_MAX ] = { } ; <nl> + static bool vfLimited [ NET_MAX ] = { } ; <nl> static CNode * pnodeLocalHost = NULL ; <nl> uint64 nLocalHostNonce = 0 ; <nl> array < int , THREAD_MAX > vnThreadsRunning ; <nl> - static SOCKET hListenSocket = INVALID_SOCKET ; <nl> + static std : : vector < SOCKET > vhListenSocket ; <nl> CAddrMan addrman ; <nl> <nl> vector < CNode * > vNodes ; <nl> void CNode : : PushGetBlocks ( CBlockIndex * pindexBegin , uint256 hashEnd ) <nl> } <nl> <nl> / / find ' best ' local address for a particular peer <nl> - bool GetLocal ( CNetAddr & addr , const CNetAddr * paddrPeer ) <nl> + bool GetLocal ( CService & addr , const CNetAddr * paddrPeer ) <nl> { <nl> if ( fUseProxy | | mapArgs . count ( " - connect " ) | | fNoListen ) <nl> return false ; <nl> bool GetLocal ( CNetAddr & addr , const CNetAddr * paddrPeer ) <nl> int nBestReachability = - 1 ; <nl> { <nl> LOCK ( cs_mapLocalHost ) ; <nl> - for ( map < CNetAddr , int > : : iterator it = mapLocalHost . begin ( ) ; it ! = mapLocalHost . end ( ) ; it + + ) <nl> + for ( map < CService , int > : : iterator it = mapLocalHost . begin ( ) ; it ! = mapLocalHost . end ( ) ; it + + ) <nl> { <nl> int nCount = ( * it ) . second ; <nl> int nReachability = ( * it ) . first . GetReachabilityFrom ( paddrPeer ) ; <nl> bool GetLocal ( CNetAddr & addr , const CNetAddr * paddrPeer ) <nl> CAddress GetLocalAddress ( const CNetAddr * paddrPeer ) <nl> { <nl> CAddress ret ( CService ( " 0 . 0 . 0 . 0 " , 0 ) , 0 ) ; <nl> - CNetAddr addr ; <nl> + CService addr ; <nl> if ( GetLocal ( addr , paddrPeer ) ) <nl> { <nl> - ret . SetIP ( addr ) ; <nl> - ret . SetPort ( GetListenPort ( ) ) ; <nl> + ret = CAddress ( addr ) ; <nl> ret . nServices = nLocalServices ; <nl> ret . nTime = GetAdjustedTime ( ) ; <nl> } <nl> void static AdvertizeLocal ( ) <nl> if ( pnode - > fSuccessfullyConnected ) <nl> { <nl> CAddress addrLocal = GetLocalAddress ( & pnode - > addr ) ; <nl> - if ( addrLocal . IsRoutable ( ) & & ( CNetAddr ) addrLocal ! = ( CNetAddr ) pnode - > addrLocal ) <nl> + if ( addrLocal . IsRoutable ( ) & & ( CService ) addrLocal ! = ( CService ) pnode - > addrLocal ) <nl> { <nl> pnode - > PushAddress ( addrLocal ) ; <nl> pnode - > addrLocal = addrLocal ; <nl> void static AdvertizeLocal ( ) <nl> } <nl> <nl> / / learn a new local address <nl> - bool AddLocal ( const CNetAddr & addr , int nScore ) <nl> + bool AddLocal ( const CService & addr , int nScore ) <nl> { <nl> if ( ! addr . IsRoutable ( ) ) <nl> return false ; <nl> bool AddLocal ( const CNetAddr & addr , int nScore ) <nl> { <nl> LOCK ( cs_mapLocalHost ) ; <nl> mapLocalHost [ addr ] = std : : max ( nScore , mapLocalHost [ addr ] ) + ( mapLocalHost . count ( addr ) ? 1 : 0 ) ; <nl> + enum Network net = addr . GetNetwork ( ) ; <nl> + vfReachable [ net ] = true ; <nl> + if ( net = = NET_IPV6 ) vfReachable [ NET_IPV4 ] = true ; <nl> } <nl> <nl> AdvertizeLocal ( ) ; <nl> bool AddLocal ( const CNetAddr & addr , int nScore ) <nl> return true ; <nl> } <nl> <nl> - / / vote for a local address <nl> - bool SeenLocal ( const CNetAddr & addr ) <nl> + bool AddLocal ( const CNetAddr & addr , int nScore , int port ) <nl> + { <nl> + if ( port = = - 1 ) <nl> + port = GetListenPort ( ) ; <nl> + return AddLocal ( CService ( addr , port ) , nScore ) ; <nl> + } <nl> + <nl> + / * * Make a particular network entirely off - limits ( no automatic connects to it ) * / <nl> + void SetLimited ( enum Network net , bool fLimited ) <nl> + { <nl> + LOCK ( cs_mapLocalHost ) ; <nl> + vfLimited [ net ] = fLimited ; <nl> + } <nl> + <nl> + bool IsLimited ( const CNetAddr & addr ) <nl> + { <nl> + LOCK ( cs_mapLocalHost ) ; <nl> + return vfLimited [ addr . GetNetwork ( ) ] ; <nl> + } <nl> + <nl> + / * * vote for a local address * / <nl> + bool SeenLocal ( const CService & addr ) <nl> { <nl> { <nl> LOCK ( cs_mapLocalHost ) ; <nl> bool SeenLocal ( const CNetAddr & addr ) <nl> return true ; <nl> } <nl> <nl> - / / check whether a given address is potentially local <nl> - bool IsLocal ( const CNetAddr & addr ) <nl> + / * * check whether a given address is potentially local * / <nl> + bool IsLocal ( const CService & addr ) <nl> { <nl> LOCK ( cs_mapLocalHost ) ; <nl> return mapLocalHost . count ( addr ) > 0 ; <nl> } <nl> <nl> + / * * check whether a given address is in a network we can probably connect to * / <nl> + bool IsReachable ( const CNetAddr & addr ) <nl> + { <nl> + LOCK ( cs_mapLocalHost ) ; <nl> + enum Network net = addr . GetNetwork ( ) ; <nl> + return vfReachable [ net ] & & ! vfLimited [ net ] ; <nl> + } <nl> <nl> bool GetMyExternalIP2 ( const CService & addrConnect , const char * pszGet , const char * pszKeyword , CNetAddr & ipRet ) <nl> { <nl> void ThreadSocketHandler2 ( void * parg ) <nl> FD_ZERO ( & fdsetError ) ; <nl> SOCKET hSocketMax = 0 ; <nl> <nl> - if ( hListenSocket ! = INVALID_SOCKET ) <nl> + BOOST_FOREACH ( SOCKET hListenSocket , vhListenSocket ) { <nl> FD_SET ( hListenSocket , & fdsetRecv ) ; <nl> - hSocketMax = max ( hSocketMax , hListenSocket ) ; <nl> + hSocketMax = max ( hSocketMax , hListenSocket ) ; <nl> + } <nl> { <nl> LOCK ( cs_vNodes ) ; <nl> BOOST_FOREACH ( CNode * pnode , vNodes ) <nl> void ThreadSocketHandler2 ( void * parg ) <nl> / / <nl> / / Accept new connections <nl> / / <nl> + BOOST_FOREACH ( SOCKET hListenSocket , vhListenSocket ) <nl> if ( hListenSocket ! = INVALID_SOCKET & & FD_ISSET ( hListenSocket , & fdsetRecv ) ) <nl> { <nl> - struct sockaddr_in sockaddr ; <nl> + # ifdef USE_IPV6 <nl> + struct sockaddr_storage sockaddr ; <nl> + # else <nl> + struct sockaddr sockaddr ; <nl> + # endif <nl> socklen_t len = sizeof ( sockaddr ) ; <nl> SOCKET hSocket = accept ( hListenSocket , ( struct sockaddr * ) & sockaddr , & len ) ; <nl> CAddress addr ; <nl> int nInbound = 0 ; <nl> <nl> if ( hSocket ! = INVALID_SOCKET ) <nl> - addr = CAddress ( sockaddr ) ; <nl> + if ( ! addr . SetSockAddr ( ( const struct sockaddr * ) & sockaddr ) ) <nl> + printf ( " warning : unknown socket family \ n " ) ; <nl> <nl> { <nl> LOCK ( cs_vNodes ) ; <nl> void ThreadOpenConnections2 ( void * parg ) <nl> CAddress addr = addrman . Select ( 10 + min ( nOutbound , 8 ) * 10 ) ; <nl> <nl> / / if we selected an invalid address , restart <nl> - if ( ! addr . IsIPv4 ( ) | | ! addr . IsValid ( ) | | setConnected . count ( addr . GetGroup ( ) ) | | IsLocal ( addr ) ) <nl> + if ( ! addr . IsValid ( ) | | setConnected . count ( addr . GetGroup ( ) ) | | IsLocal ( addr ) ) <nl> break ; <nl> <nl> nTries + + ; <nl> <nl> + if ( IsLimited ( addr ) ) <nl> + continue ; <nl> + <nl> / / only consider very recently tried nodes after 30 failed attempts <nl> if ( nANow - addr . nLastTry < 600 & & nTries < 30 ) <nl> continue ; <nl> void ThreadMessageHandler2 ( void * parg ) <nl> <nl> <nl> <nl> - bool BindListenPort ( string & strError ) <nl> + bool BindListenPort ( const CService & addrBind , string & strError ) <nl> { <nl> strError = " " ; <nl> int nOne = 1 ; <nl> bool BindListenPort ( string & strError ) <nl> # endif <nl> <nl> / / Create socket for listening for incoming connections <nl> - hListenSocket = socket ( AF_INET , SOCK_STREAM , IPPROTO_TCP ) ; <nl> + # ifdef USE_IPV6 <nl> + struct sockaddr_storage sockaddr ; <nl> + # else <nl> + struct sockaddr sockaddr ; <nl> + # endif <nl> + socklen_t len = sizeof ( sockaddr ) ; <nl> + if ( ! addrBind . GetSockAddr ( ( struct sockaddr * ) & sockaddr , & len ) ) <nl> + { <nl> + strError = strprintf ( " Error : bind address family for % s not supported " , addrBind . ToString ( ) . c_str ( ) ) ; <nl> + printf ( " % s \ n " , strError . c_str ( ) ) ; <nl> + return false ; <nl> + } <nl> + <nl> + SOCKET hListenSocket = socket ( ( ( struct sockaddr * ) & sockaddr ) - > sa_family , SOCK_STREAM , IPPROTO_TCP ) ; <nl> if ( hListenSocket = = INVALID_SOCKET ) <nl> { <nl> strError = strprintf ( " Error : Couldn ' t open socket for incoming connections ( socket returned error % d ) " , WSAGetLastError ( ) ) ; <nl> bool BindListenPort ( string & strError ) <nl> setsockopt ( hListenSocket , SOL_SOCKET , SO_REUSEADDR , ( void * ) & nOne , sizeof ( int ) ) ; <nl> # endif <nl> <nl> + <nl> # ifdef WIN32 <nl> / / Set to nonblocking , incoming connections will also inherit this <nl> if ( ioctlsocket ( hListenSocket , FIONBIO , ( u_long * ) & nOne ) = = SOCKET_ERROR ) <nl> bool BindListenPort ( string & strError ) <nl> return false ; <nl> } <nl> <nl> - / / The sockaddr_in structure specifies the address family , <nl> - / / IP address , and port for the socket that is being bound <nl> - struct sockaddr_in sockaddr ; <nl> - memset ( & sockaddr , 0 , sizeof ( sockaddr ) ) ; <nl> - sockaddr . sin_family = AF_INET ; <nl> - sockaddr . sin_addr . s_addr = INADDR_ANY ; / / bind to all IPs on this computer <nl> - sockaddr . sin_port = htons ( GetListenPort ( ) ) ; <nl> - if ( : : bind ( hListenSocket , ( struct sockaddr * ) & sockaddr , sizeof ( sockaddr ) ) = = SOCKET_ERROR ) <nl> + # ifdef USE_IPV6 <nl> + / / some systems don ' t have IPV6_V6ONLY but are always v6only ; others do have the option <nl> + / / and enable it by default or not . Try to enable it , if possible . <nl> + if ( addrBind . IsIPv6 ( ) ) { <nl> + # ifdef IPV6_V6ONLY <nl> + setsockopt ( hListenSocket , IPPROTO_IPV6 , IPV6_V6ONLY , ( void * ) & nOne , sizeof ( int ) ) ; <nl> + # endif <nl> + # ifdef WIN32 <nl> + int nProtLevel = 10 / * PROTECTION_LEVEL_UNRESTRICTED * / ; <nl> + int nParameterId = 23 / * IPV6_PROTECTION_LEVEl * / ; <nl> + / / this call is allowed to fail <nl> + setsockopt ( hListenSocket , IPPROTO_IPV6 , nParameterId , ( const char * ) & nProtLevel , sizeof ( int ) ) ; <nl> + # endif <nl> + } <nl> + # endif <nl> + <nl> + if ( : : bind ( hListenSocket , ( struct sockaddr * ) & sockaddr , len ) = = SOCKET_ERROR ) <nl> { <nl> int nErr = WSAGetLastError ( ) ; <nl> if ( nErr = = WSAEADDRINUSE ) <nl> - strError = strprintf ( _ ( " Unable to bind to port % d on this computer . Bitcoin is probably already running . " ) , ntohs ( sockaddr . sin_port ) ) ; <nl> + strError = strprintf ( _ ( " Unable to bind to % s on this computer . Bitcoin is probably already running . " ) , addrBind . ToString ( ) . c_str ( ) ) ; <nl> else <nl> - strError = strprintf ( " Error : Unable to bind to port % d on this computer ( bind returned error % d ) " , ntohs ( sockaddr . sin_port ) , nErr ) ; <nl> + strError = strprintf ( _ ( " Unable to bind to % s on this computer ( bind returned error % d , % s ) " ) , addrBind . ToString ( ) . c_str ( ) , nErr , strerror ( nErr ) ) ; <nl> printf ( " % s \ n " , strError . c_str ( ) ) ; <nl> return false ; <nl> } <nl> - printf ( " Bound to port % d \ n " , ntohs ( sockaddr . sin_port ) ) ; <nl> + printf ( " Bound to % s \ n " , addrBind . ToString ( ) . c_str ( ) ) ; <nl> <nl> / / Listen for incoming connections <nl> if ( listen ( hListenSocket , SOMAXCONN ) = = SOCKET_ERROR ) <nl> bool BindListenPort ( string & strError ) <nl> return false ; <nl> } <nl> <nl> + vhListenSocket . push_back ( hListenSocket ) ; <nl> + <nl> + if ( addrBind . IsRoutable ( ) & & GetBoolArg ( " - discover " , true ) ) <nl> + AddLocal ( addrBind , LOCAL_BIND ) ; <nl> + <nl> return true ; <nl> } <nl> <nl> void static Discover ( ) <nl> if ( ( ifa - > ifa_flags & IFF_UP ) = = 0 ) continue ; <nl> if ( strcmp ( ifa - > ifa_name , " lo " ) = = 0 ) continue ; <nl> if ( strcmp ( ifa - > ifa_name , " lo0 " ) = = 0 ) continue ; <nl> - char pszIP [ 100 ] ; <nl> if ( ifa - > ifa_addr - > sa_family = = AF_INET ) <nl> { <nl> struct sockaddr_in * s4 = ( struct sockaddr_in * ) ( ifa - > ifa_addr ) ; <nl> - if ( inet_ntop ( ifa - > ifa_addr - > sa_family , ( void * ) & ( s4 - > sin_addr ) , pszIP , sizeof ( pszIP ) ) ! = NULL ) <nl> - printf ( " ipv4 % s : % s \ n " , ifa - > ifa_name , pszIP ) ; <nl> - <nl> - / / Take the first IP that isn ' t loopback 127 . x . x . x <nl> CNetAddr addr ( s4 - > sin_addr ) ; <nl> - AddLocal ( addr , LOCAL_IF ) ; <nl> + if ( AddLocal ( addr , LOCAL_IF ) ) <nl> + printf ( " ipv4 % s : % s \ n " , ifa - > ifa_name , addr . ToString ( ) . c_str ( ) ) ; <nl> } <nl> + # ifdef USE_IPV6 <nl> else if ( ifa - > ifa_addr - > sa_family = = AF_INET6 ) <nl> { <nl> struct sockaddr_in6 * s6 = ( struct sockaddr_in6 * ) ( ifa - > ifa_addr ) ; <nl> - if ( inet_ntop ( ifa - > ifa_addr - > sa_family , ( void * ) & ( s6 - > sin6_addr ) , pszIP , sizeof ( pszIP ) ) ! = NULL ) <nl> - printf ( " ipv6 % s : % s \ n " , ifa - > ifa_name , pszIP ) ; <nl> - <nl> - # ifdef USE_IPV6 <nl> CNetAddr addr ( s6 - > sin6_addr ) ; <nl> - AddLocal ( addr , LOCAL_IF ) ; <nl> - # endif <nl> + if ( AddLocal ( addr , LOCAL_IF ) ) <nl> + printf ( " ipv6 % s : % s \ n " , ifa - > ifa_name , addr . ToString ( ) . c_str ( ) ) ; <nl> } <nl> + # endif <nl> } <nl> freeifaddrs ( myaddrs ) ; <nl> } <nl> class CNetCleanup <nl> BOOST_FOREACH ( CNode * pnode , vNodes ) <nl> if ( pnode - > hSocket ! = INVALID_SOCKET ) <nl> closesocket ( pnode - > hSocket ) ; <nl> - if ( hListenSocket ! = INVALID_SOCKET ) <nl> - if ( closesocket ( hListenSocket ) = = SOCKET_ERROR ) <nl> - printf ( " closesocket ( hListenSocket ) failed with error % d \ n " , WSAGetLastError ( ) ) ; <nl> + BOOST_FOREACH ( SOCKET hListenSocket , vhListenSocket ) <nl> + if ( hListenSocket ! = INVALID_SOCKET ) <nl> + if ( closesocket ( hListenSocket ) = = SOCKET_ERROR ) <nl> + printf ( " closesocket ( hListenSocket ) failed with error % d \ n " , WSAGetLastError ( ) ) ; <nl> <nl> # ifdef WIN32 <nl> / / Shutdown Windows Sockets <nl> mmm a / src / net . h <nl> ppp b / src / net . h <nl> CNode * FindNode ( const CNetAddr & ip ) ; <nl> CNode * FindNode ( const CService & ip ) ; <nl> CNode * ConnectNode ( CAddress addrConnect , const char * strDest = NULL , int64 nTimeout = 0 ) ; <nl> void MapPort ( bool fMapPort ) ; <nl> - bool BindListenPort ( std : : string & strError = REF ( std : : string ( ) ) ) ; <nl> + bool BindListenPort ( const CService & bindAddr , std : : string & strError = REF ( std : : string ( ) ) ) ; <nl> void StartNode ( void * parg ) ; <nl> bool StopNode ( ) ; <nl> <nl> enum <nl> { <nl> - LOCAL_NONE , <nl> - LOCAL_IF , <nl> - LOCAL_UPNP , <nl> - LOCAL_IRC , <nl> - LOCAL_HTTP , <nl> - LOCAL_MANUAL , <nl> + LOCAL_NONE , / / unknown <nl> + LOCAL_IF , / / address a local interface listens on <nl> + LOCAL_BIND , / / address explicit bound to <nl> + LOCAL_UPNP , / / address reported by UPnP <nl> + LOCAL_IRC , / / address reported by IRC ( deprecated ) <nl> + LOCAL_HTTP , / / address reported by whatismyip . com and similars <nl> + LOCAL_MANUAL , / / address explicitly specified ( - externalip = ) <nl> <nl> LOCAL_MAX <nl> } ; <nl> <nl> - bool AddLocal ( const CNetAddr & addr , int nScore = LOCAL_NONE ) ; <nl> - bool SeenLocal ( const CNetAddr & addr ) ; <nl> - bool IsLocal ( const CNetAddr & addr ) ; <nl> - bool GetLocal ( CNetAddr & addr , const CNetAddr * paddrPeer = NULL ) ; <nl> + void SetLimited ( enum Network net , bool fLimited = true ) ; <nl> + bool IsLimited ( const CNetAddr & addr ) ; <nl> + bool AddLocal ( const CService & addr , int nScore = LOCAL_NONE ) ; <nl> + bool AddLocal ( const CNetAddr & addr , int nScore = LOCAL_NONE , int port = - 1 ) ; <nl> + bool SeenLocal ( const CService & addr ) ; <nl> + bool IsLocal ( const CService & addr ) ; <nl> + bool GetLocal ( CService & addr , const CNetAddr * paddrPeer = NULL ) ; <nl> + bool IsReachable ( const CNetAddr & addr ) ; <nl> CAddress GetLocalAddress ( const CNetAddr * paddrPeer = NULL ) ; <nl> <nl> + <nl> enum <nl> { <nl> MSG_TX = 1 , <nl> class CNode <nl> unsigned int nMessageStart ; <nl> CAddress addr ; <nl> std : : string addrName ; <nl> - CNetAddr addrLocal ; <nl> + CService addrLocal ; <nl> int nVersion ; <nl> std : : string strSubVer ; <nl> bool fOneShot ; <nl> mmm a / src / netbase . cpp <nl> ppp b / src / netbase . cpp <nl> bool fProxyNameLookup = false ; <nl> bool fNameLookup = false ; <nl> CService addrProxy ( " 127 . 0 . 0 . 1 " , 9050 ) ; <nl> int nConnectTimeout = 5000 ; <nl> + static bool vfNoProxy [ NET_MAX ] = { } ; <nl> <nl> <nl> static const unsigned char pchIPv4 [ 12 ] = { 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0xff , 0xff } ; <nl> <nl> + enum Network ParseNetwork ( std : : string net ) { <nl> + if ( net = = " ipv4 " ) return NET_IPV4 ; <nl> + if ( net = = " ipv6 " ) return NET_IPV6 ; <nl> + if ( net = = " tor " ) return NET_TOR ; <nl> + if ( net = = " i2p " ) return NET_I2P ; <nl> + return NET_UNROUTABLE ; <nl> + } <nl> + <nl> + void SetNoProxy ( enum Network net , bool fNoProxy ) { <nl> + assert ( net > = 0 & & net < NET_MAX ) ; <nl> + vfNoProxy [ net ] = fNoProxy ; <nl> + } <nl> + <nl> bool static LookupIntern ( const char * pszName , std : : vector < CNetAddr > & vIP , unsigned int nMaxSolutions , bool fAllowLookup ) <nl> { <nl> vIP . clear ( ) ; <nl> bool static Socks4 ( const CService & addrDest , SOCKET & hSocket ) <nl> } <nl> char pszSocks4IP [ ] = " \ 4 \ 1 \ 0 \ 0 \ 0 \ 0 \ 0 \ 0user " ; <nl> struct sockaddr_in addr ; <nl> - addrDest . GetSockAddr ( & addr ) ; <nl> + socklen_t len = sizeof ( addr ) ; <nl> + if ( ! addrDest . GetSockAddr ( ( struct sockaddr * ) & addr , & len ) | | addr . sin_family ! = AF_INET ) <nl> + { <nl> + closesocket ( hSocket ) ; <nl> + return error ( " Cannot get proxy destination address " ) ; <nl> + } <nl> memcpy ( pszSocks4IP + 2 , & addr . sin_port , 2 ) ; <nl> memcpy ( pszSocks4IP + 4 , & addr . sin_addr , 4 ) ; <nl> char * pszSocks4 = pszSocks4IP ; <nl> bool static ConnectSocketDirectly ( const CService & addrConnect , SOCKET & hSocketRe <nl> { <nl> hSocketRet = INVALID_SOCKET ; <nl> <nl> - SOCKET hSocket = socket ( AF_INET , SOCK_STREAM , IPPROTO_TCP ) ; <nl> + # ifdef USE_IPV6 <nl> + struct sockaddr_storage sockaddr ; <nl> + # else <nl> + struct sockaddr sockaddr ; <nl> + # endif <nl> + socklen_t len = sizeof ( sockaddr ) ; <nl> + if ( ! addrConnect . GetSockAddr ( ( struct sockaddr * ) & sockaddr , & len ) ) { <nl> + printf ( " Cannot connect to % s : unsupported network \ n " , addrConnect . ToString ( ) . c_str ( ) ) ; <nl> + return false ; <nl> + } <nl> + <nl> + SOCKET hSocket = socket ( ( ( struct sockaddr * ) & sockaddr ) - > sa_family , SOCK_STREAM , IPPROTO_TCP ) ; <nl> if ( hSocket = = INVALID_SOCKET ) <nl> return false ; <nl> # ifdef SO_NOSIGPIPE <nl> bool static ConnectSocketDirectly ( const CService & addrConnect , SOCKET & hSocketRe <nl> setsockopt ( hSocket , SOL_SOCKET , SO_NOSIGPIPE , ( void * ) & set , sizeof ( int ) ) ; <nl> # endif <nl> <nl> - struct sockaddr_in sockaddr ; <nl> - if ( ! addrConnect . GetSockAddr ( & sockaddr ) ) <nl> - { <nl> - closesocket ( hSocket ) ; <nl> - return false ; <nl> - } <nl> - <nl> # ifdef WIN32 <nl> u_long fNonblock = 1 ; <nl> if ( ioctlsocket ( hSocket , FIONBIO , & fNonblock ) = = SOCKET_ERROR ) <nl> bool static ConnectSocketDirectly ( const CService & addrConnect , SOCKET & hSocketRe <nl> return false ; <nl> } <nl> <nl> - if ( connect ( hSocket , ( struct sockaddr * ) & sockaddr , sizeof ( sockaddr ) ) = = SOCKET_ERROR ) <nl> + if ( connect ( hSocket , ( struct sockaddr * ) & sockaddr , len ) = = SOCKET_ERROR ) <nl> { <nl> / / WSAEINVAL is here because some legacy version of winsock uses it <nl> if ( WSAGetLastError ( ) = = WSAEINPROGRESS | | WSAGetLastError ( ) = = WSAEWOULDBLOCK | | WSAGetLastError ( ) = = WSAEINVAL ) <nl> bool static ConnectSocketDirectly ( const CService & addrConnect , SOCKET & hSocketRe <nl> bool ConnectSocket ( const CService & addrDest , SOCKET & hSocketRet , int nTimeout ) <nl> { <nl> SOCKET hSocket = INVALID_SOCKET ; <nl> - bool fProxy = ( fUseProxy & & addrDest . IsRoutable ( ) ) ; <nl> + bool fProxy = ( fUseProxy & & addrDest . IsRoutable ( ) & & ! vfNoProxy [ addrDest . GetNetwork ( ) ] ) ; <nl> <nl> if ( ! ConnectSocketDirectly ( fProxy ? addrProxy : addrDest , hSocket , nTimeout ) ) <nl> return false ; <nl> bool CNetAddr : : IsIPv4 ( ) const <nl> return ( memcmp ( ip , pchIPv4 , sizeof ( pchIPv4 ) ) = = 0 ) ; <nl> } <nl> <nl> + bool CNetAddr : : IsIPv6 ( ) const <nl> + { <nl> + return ( ! IsIPv4 ( ) ) ; <nl> + } <nl> + <nl> bool CNetAddr : : IsRFC1918 ( ) const <nl> { <nl> return IsIPv4 ( ) & & ( <nl> bool CNetAddr : : IsRFC4843 ( ) const <nl> return ( GetByte ( 15 ) = = 0x20 & & GetByte ( 14 ) = = 0x01 & & GetByte ( 13 ) = = 0x00 & & ( GetByte ( 12 ) & 0xF0 ) = = 0x10 ) ; <nl> } <nl> <nl> + bool CNetAddr : : IsOnionCat ( ) const <nl> + { <nl> + static const unsigned char pchOnionCat [ ] = { 0xFD , 0x87 , 0xD8 , 0x7E , 0xEB , 0x43 } ; <nl> + return ( memcmp ( ip , pchOnionCat , sizeof ( pchOnionCat ) ) = = 0 ) ; <nl> + } <nl> + <nl> + bool CNetAddr : : IsGarliCat ( ) const <nl> + { <nl> + static const unsigned char pchGarliCat [ ] = { 0xFD , 0x60 , 0xDB , 0x4D , 0xDD , 0xB5 } ; <nl> + return ( memcmp ( ip , pchGarliCat , sizeof ( pchGarliCat ) ) = = 0 ) ; <nl> + } <nl> + <nl> bool CNetAddr : : IsLocal ( ) const <nl> { <nl> / / IPv4 loopback <nl> bool CNetAddr : : IsValid ( ) const <nl> <nl> bool CNetAddr : : IsRoutable ( ) const <nl> { <nl> - return IsValid ( ) & & ! ( IsRFC1918 ( ) | | IsRFC3927 ( ) | | IsRFC4862 ( ) | | IsRFC4193 ( ) | | IsRFC4843 ( ) | | IsLocal ( ) ) ; <nl> + return IsValid ( ) & & ! ( IsRFC1918 ( ) | | IsRFC3927 ( ) | | IsRFC4862 ( ) | | ( IsRFC4193 ( ) & & ! IsOnionCat ( ) & & ! IsGarliCat ( ) ) | | IsRFC4843 ( ) | | IsLocal ( ) ) ; <nl> + } <nl> + <nl> + enum Network CNetAddr : : GetNetwork ( ) const <nl> + { <nl> + if ( ! IsRoutable ( ) ) <nl> + return NET_UNROUTABLE ; <nl> + <nl> + if ( IsIPv4 ( ) ) <nl> + return NET_IPV4 ; <nl> + <nl> + if ( IsOnionCat ( ) ) <nl> + return NET_TOR ; <nl> + <nl> + if ( IsGarliCat ( ) ) <nl> + return NET_I2P ; <nl> + <nl> + return NET_IPV6 ; <nl> } <nl> <nl> std : : string CNetAddr : : ToStringIP ( ) const <nl> bool CNetAddr : : GetIn6Addr ( struct in6_addr * pipv6Addr ) const <nl> std : : vector < unsigned char > CNetAddr : : GetGroup ( ) const <nl> { <nl> std : : vector < unsigned char > vchRet ; <nl> - int nClass = 0 ; / / 0 = IPv6 , 1 = IPv4 , 254 = local , 255 = unroutable <nl> + int nClass = NET_IPV6 ; <nl> int nStartByte = 0 ; <nl> int nBits = 16 ; <nl> <nl> / / all local addresses belong to the same group <nl> if ( IsLocal ( ) ) <nl> { <nl> - nClass = 254 ; <nl> + nClass = 255 ; <nl> nBits = 0 ; <nl> } <nl> <nl> / / all unroutable addresses belong to the same group <nl> if ( ! IsRoutable ( ) ) <nl> { <nl> - nClass = 255 ; <nl> + nClass = NET_UNROUTABLE ; <nl> nBits = 0 ; <nl> } <nl> / / for IPv4 addresses , ' 1 ' + the 16 higher - order bits of the IP <nl> / / includes mapped IPv4 , SIIT translated IPv4 , and the well - known prefix <nl> else if ( IsIPv4 ( ) | | IsRFC6145 ( ) | | IsRFC6052 ( ) ) <nl> { <nl> - nClass = 1 ; <nl> + nClass = NET_IPV4 ; <nl> nStartByte = 12 ; <nl> } <nl> / / for 6to4 tunneled addresses , use the encapsulated IPv4 address <nl> else if ( IsRFC3964 ( ) ) <nl> { <nl> - nClass = 1 ; <nl> + nClass = NET_IPV4 ; <nl> nStartByte = 2 ; <nl> } <nl> / / for Teredo - tunneled IPv6 addresses , use the encapsulated IPv4 address <nl> else if ( IsRFC4380 ( ) ) <nl> { <nl> - vchRet . push_back ( 1 ) ; <nl> + vchRet . push_back ( NET_IPV4 ) ; <nl> vchRet . push_back ( GetByte ( 3 ) ^ 0xFF ) ; <nl> vchRet . push_back ( GetByte ( 2 ) ^ 0xFF ) ; <nl> return vchRet ; <nl> CService : : CService ( const struct sockaddr_in6 & addr ) : CNetAddr ( addr . sin6_addr ) , <nl> } <nl> # endif <nl> <nl> + bool CService : : SetSockAddr ( const struct sockaddr * paddr ) <nl> + { <nl> + switch ( paddr - > sa_family ) { <nl> + case AF_INET : <nl> + * this = CService ( * ( const struct sockaddr_in * ) paddr ) ; <nl> + return true ; <nl> + # ifdef USE_IPV6 <nl> + case AF_INET6 : <nl> + * this = CService ( * ( const struct sockaddr_in6 * ) paddr ) ; <nl> + return true ; <nl> + # endif <nl> + default : <nl> + return false ; <nl> + } <nl> + } <nl> + <nl> CService : : CService ( const char * pszIpPort , bool fAllowLookup ) <nl> { <nl> Init ( ) ; <nl> bool operator < ( const CService & a , const CService & b ) <nl> return ( CNetAddr ) a < ( CNetAddr ) b | | ( ( CNetAddr ) a = = ( CNetAddr ) b & & a . port < b . port ) ; <nl> } <nl> <nl> - bool CService : : GetSockAddr ( struct sockaddr_in * paddr ) const <nl> + bool CService : : GetSockAddr ( struct sockaddr * paddr , socklen_t * addrlen ) const <nl> { <nl> - if ( ! IsIPv4 ( ) ) <nl> - return false ; <nl> - memset ( paddr , 0 , sizeof ( struct sockaddr_in ) ) ; <nl> - if ( ! GetInAddr ( & paddr - > sin_addr ) ) <nl> - return false ; <nl> - paddr - > sin_family = AF_INET ; <nl> - paddr - > sin_port = htons ( port ) ; <nl> - return true ; <nl> - } <nl> - <nl> + if ( IsIPv4 ( ) ) { <nl> + if ( * addrlen < sizeof ( struct sockaddr_in ) ) <nl> + return false ; <nl> + * addrlen = sizeof ( struct sockaddr_in ) ; <nl> + struct sockaddr_in * paddrin = ( struct sockaddr_in * ) paddr ; <nl> + memset ( paddrin , 0 , * addrlen ) ; <nl> + if ( ! GetInAddr ( & paddrin - > sin_addr ) ) <nl> + return false ; <nl> + paddrin - > sin_family = AF_INET ; <nl> + paddrin - > sin_port = htons ( port ) ; <nl> + return true ; <nl> + } <nl> # ifdef USE_IPV6 <nl> - bool CService : : GetSockAddr6 ( struct sockaddr_in6 * paddr ) const <nl> - { <nl> - memset ( paddr , 0 , sizeof ( struct sockaddr_in6 ) ) ; <nl> - if ( ! GetIn6Addr ( & paddr - > sin6_addr ) ) <nl> - return false ; <nl> - paddr - > sin6_family = AF_INET6 ; <nl> - paddr - > sin6_port = htons ( port ) ; <nl> - return true ; <nl> - } <nl> + if ( IsIPv6 ( ) ) { <nl> + if ( * addrlen < sizeof ( struct sockaddr_in6 ) ) <nl> + return false ; <nl> + * addrlen = sizeof ( struct sockaddr_in6 ) ; <nl> + struct sockaddr_in6 * paddrin6 = ( struct sockaddr_in6 * ) paddr ; <nl> + memset ( paddrin6 , 0 , * addrlen ) ; <nl> + if ( ! GetIn6Addr ( & paddrin6 - > sin6_addr ) ) <nl> + return false ; <nl> + paddrin6 - > sin6_family = AF_INET6 ; <nl> + paddrin6 - > sin6_port = htons ( port ) ; <nl> + return true ; <nl> + } <nl> # endif <nl> + return false ; <nl> + } <nl> <nl> std : : vector < unsigned char > CService : : GetKey ( ) const <nl> { <nl> std : : vector < unsigned char > CService : : GetKey ( ) const <nl> <nl> std : : string CService : : ToStringPort ( ) const <nl> { <nl> - return strprintf ( " : % i " , port ) ; <nl> + return strprintf ( " % i " , port ) ; <nl> } <nl> <nl> std : : string CService : : ToStringIPPort ( ) const <nl> { <nl> - return ToStringIP ( ) + ToStringPort ( ) ; <nl> + if ( IsIPv4 ( ) ) { <nl> + return ToStringIP ( ) + " : " + ToStringPort ( ) ; <nl> + } else { <nl> + return " [ " + ToStringIP ( ) + " ] : " + ToStringPort ( ) ; <nl> + } <nl> } <nl> <nl> std : : string CService : : ToString ( ) const <nl> mmm a / src / netbase . h <nl> ppp b / src / netbase . h <nl> extern int nConnectTimeout ; <nl> # undef SetPort <nl> # endif <nl> <nl> + enum Network <nl> + { <nl> + NET_UNROUTABLE , <nl> + NET_IPV4 , <nl> + NET_IPV6 , <nl> + NET_TOR , <nl> + NET_I2P , <nl> + <nl> + NET_MAX <nl> + } ; <nl> + <nl> + enum Network ParseNetwork ( std : : string net ) ; <nl> + void SetNoProxy ( enum Network net , bool fNoProxy = true ) ; <nl> + <nl> / * * IP address ( IPv6 , or IPv4 using mapped IPv6 range ( : : FFFF : 0 : 0 / 96 ) ) * / <nl> class CNetAddr <nl> { <nl> class CNetAddr <nl> void Init ( ) ; <nl> void SetIP ( const CNetAddr & ip ) ; <nl> bool IsIPv4 ( ) const ; / / IPv4 mapped address ( : : FFFF : 0 : 0 / 96 , 0 . 0 . 0 . 0 / 0 ) <nl> + bool IsIPv6 ( ) const ; / / IPv6 address ( not IPv4 ) <nl> bool IsRFC1918 ( ) const ; / / IPv4 private networks ( 10 . 0 . 0 . 0 / 8 , 192 . 168 . 0 . 0 / 16 , 172 . 16 . 0 . 0 / 12 ) <nl> bool IsRFC3849 ( ) const ; / / IPv6 documentation address ( 2001 : 0DB8 : : / 32 ) <nl> bool IsRFC3927 ( ) const ; / / IPv4 autoconfig ( 169 . 254 . 0 . 0 / 16 ) <nl> class CNetAddr <nl> bool IsRFC4862 ( ) const ; / / IPv6 autoconfig ( FE80 : : / 64 ) <nl> bool IsRFC6052 ( ) const ; / / IPv6 well - known prefix ( 64 : FF9B : : / 96 ) <nl> bool IsRFC6145 ( ) const ; / / IPv6 IPv4 - translated address ( : : FFFF : 0 : 0 : 0 / 96 ) <nl> + bool IsOnionCat ( ) const ; <nl> + bool IsGarliCat ( ) const ; <nl> bool IsLocal ( ) const ; <nl> bool IsRoutable ( ) const ; <nl> bool IsValid ( ) const ; <nl> bool IsMulticast ( ) const ; <nl> + enum Network GetNetwork ( ) const ; <nl> std : : string ToString ( ) const ; <nl> std : : string ToStringIP ( ) const ; <nl> int GetByte ( int n ) const ; <nl> class CService : public CNetAddr <nl> void Init ( ) ; <nl> void SetPort ( unsigned short portIn ) ; <nl> unsigned short GetPort ( ) const ; <nl> - bool GetSockAddr ( struct sockaddr_in * paddr ) const ; <nl> + bool GetSockAddr ( struct sockaddr * paddr , socklen_t * addrlen ) const ; <nl> + bool SetSockAddr ( const struct sockaddr * paddr ) ; <nl> friend bool operator = = ( const CService & a , const CService & b ) ; <nl> friend bool operator ! = ( const CService & a , const CService & b ) ; <nl> friend bool operator < ( const CService & a , const CService & b ) ; <nl> class CService : public CNetAddr <nl> <nl> # ifdef USE_IPV6 <nl> CService ( const struct in6_addr & ipv6Addr , unsigned short port ) ; <nl> - bool GetSockAddr6 ( struct sockaddr_in6 * paddr ) const ; <nl> CService ( const struct sockaddr_in6 & addr ) ; <nl> # endif <nl> <nl>
Merge pull request from sipa / ipv6
bitcoin/bitcoin
a3878873f3317d3d3ee0eee4b030490ad39b3f81
2012-05-12T01:23:56Z
mmm a / tensorflow / python / ops / image_ops_impl . py <nl> ppp b / tensorflow / python / ops / image_ops_impl . py <nl> def transpose ( image , name = None ) : <nl> <nl> Raises : <nl> ValueError : if the shape of ` image ` not supported . <nl> + <nl> + Usage Example : <nl> + import tensorflow as tf <nl> + x = tf . random . normal ( shape = ( 256 , 256 , 3 ) ) <nl> + tf . image . transpose ( x ) <nl> " " " <nl> with ops . name_scope ( name , ' transpose ' , [ image ] ) : <nl> image = ops . convert_to_tensor ( image , name = ' image ' ) <nl>
Update image_ops_impl . py
tensorflow/tensorflow
151fdbf69f4de90928489ddf88b61a3657516b09
2019-12-15T03:21:36Z
mmm a / include / simdjson / dom / array - inl . h <nl> ppp b / include / simdjson / dom / array - inl . h <nl> inline size_t array : : size ( ) const noexcept { <nl> } <nl> inline simdjson_result < element > array : : at_pointer ( std : : string_view json_pointer ) const noexcept { <nl> if ( json_pointer [ 0 ] ! = ' / ' ) { <nl> - if ( json_pointer . size ( ) = = 0 ) { / / an empty string means that we return the current node <nl> + if ( json_pointer . empty ( ) ) { / / an empty string means that we return the current node <nl> return element ( this - > tape ) ; / / copy the current node <nl> } else { / / otherwise there is an error <nl> return INVALID_JSON_POINTER ; <nl> mmm a / include / simdjson / dom / element - inl . h <nl> ppp b / include / simdjson / dom / element - inl . h <nl> inline simdjson_result < element > element : : at_pointer ( std : : string_view json_pointe <nl> case internal : : tape_type : : START_ARRAY : <nl> return array ( tape ) . at_pointer ( json_pointer ) ; <nl> default : { <nl> - if ( json_pointer . empty ( ) ) { / / an empty string means that we return the current node <nl> + if ( ! json_pointer . empty ( ) ) { / / a non - empty string is invalid on an atom <nl> return INVALID_JSON_POINTER ; <nl> } <nl> + / / an empty string means that we return the current node <nl> dom : : element copy ( * this ) ; <nl> return simdjson_result < element > ( std : : move ( copy ) ) ; <nl> } <nl> mmm a / include / simdjson / dom / object - inl . h <nl> ppp b / include / simdjson / dom / object - inl . h <nl> inline simdjson_result < element > object : : operator [ ] ( const char * key ) const noexce <nl> } <nl> inline simdjson_result < element > object : : at_pointer ( std : : string_view json_pointer ) const noexcept { <nl> if ( json_pointer [ 0 ] ! = ' / ' ) { <nl> - if ( json_pointer . size ( ) = = 0 ) { / / an empty string means that we return the current node <nl> + if ( json_pointer . empty ( ) ) { / / an empty string means that we return the current node <nl> return element ( this - > tape ) ; / / copy the current node <nl> } else { / / otherwise there is an error <nl> return INVALID_JSON_POINTER ; <nl> mmm a / tests / pointercheck . cpp <nl> ppp b / tests / pointercheck . cpp <nl> bool modern_support ( ) { <nl> # endif <nl> return true ; <nl> } <nl> + bool issue1142 ( ) { <nl> + # if SIMDJSON_EXCEPTIONS <nl> + std : : cout < < " issue 1142 " < < std : : endl ; <nl> + auto example_json = R " ( [ 1 , 2 , { " 1 " : " bla " } ] ) " _padded ; <nl> + dom : : parser parser ; <nl> + dom : : element example = parser . parse ( example_json ) ; <nl> + auto e0 = dom : : array ( example ) . at ( 0 ) . at_pointer ( " " ) ; <nl> + ASSERT_EQUAL ( std : : string ( " 1 " ) , simdjson : : minify ( e0 ) ) <nl> + auto o = dom : : array ( example ) . at ( 2 ) . at_pointer ( " " ) ; <nl> + ASSERT_EQUAL ( std : : string ( R " ( { " 1 " : " bla " } ) " ) , simdjson : : minify ( o ) ) <nl> + std : : string_view s0 = dom : : array ( example ) . at ( 2 ) . at_pointer ( " / 1 " ) . at_pointer ( " " ) ; <nl> + if ( s0 ! = " bla " ) { <nl> + std : : cerr < < s0 < < std : : endl ; <nl> + return false ; <nl> + } <nl> + auto example_json2 = R " ( " just a string " ) " _padded ; <nl> + dom : : element example2 = parser . parse ( example_json2 ) . at_pointer ( " " ) ; <nl> + if ( std : : string_view ( example2 ) ! = " just a string " ) { <nl> + std : : cerr < < std : : string_view ( example2 ) < < std : : endl ; <nl> + return false ; <nl> + } <nl> + <nl> + <nl> + # endif <nl> + return true ; <nl> + } <nl> + <nl> <nl> int main ( ) { <nl> if ( true <nl> & & demo ( ) <nl> + & & issue1142 ( ) <nl> & & legacy_support ( ) <nl> & & modern_support ( ) <nl> & & json_pointer_success_test ( TEST_RFC_JSON , " " , R " ( { " foo " : [ " bar " , " baz " ] , " " : 0 , " a / b " : 1 , " c % d " : 2 , " e ^ f " : 3 , " g | h " : 4 , " i \ \ j " : 5 , " k \ " l " : 6 , " " : 7 , " m ~ n " : 8 } ) " ) <nl>
Adding a tests and a fix for empty strings in at_pointer ( )
simdjson/simdjson
7aea774b215946af931166e47d1713edf7220aeb
2020-09-02T21:04:56Z
mmm a / src / assembler . cc <nl> ppp b / src / assembler . cc <nl> ExternalReference ExternalReference : : date_cache_stamp ( Isolate * isolate ) { <nl> } <nl> <nl> <nl> + ExternalReference ExternalReference : : stress_deopt_count ( Isolate * isolate ) { <nl> + return ExternalReference ( isolate - > stress_deopt_count_address ( ) ) ; <nl> + } <nl> + <nl> + <nl> ExternalReference ExternalReference : : transcendental_cache_array_address ( <nl> Isolate * isolate ) { <nl> return ExternalReference ( <nl> mmm a / src / assembler . h <nl> ppp b / src / assembler . h <nl> class ExternalReference BASE_EMBEDDED { <nl> reinterpret_cast < ExternalReferenceRedirectorPointer * > ( redirector ) ) ; <nl> } <nl> <nl> + static ExternalReference stress_deopt_count ( Isolate * isolate ) ; <nl> + <nl> private : <nl> explicit ExternalReference ( void * address ) <nl> : address_ ( address ) { } <nl> mmm a / src / flag - definitions . h <nl> ppp b / src / flag - definitions . h <nl> DEFINE_int ( deopt_every_n_times , <nl> DEFINE_int ( deopt_every_n_garbage_collections , <nl> 0 , <nl> " deoptimize every n garbage collections " ) <nl> + DEFINE_bool ( print_deopt_stress , false , " print number of possible deopt points " ) <nl> DEFINE_bool ( trap_on_deopt , false , " put a break point before deoptimizing " ) <nl> DEFINE_bool ( deoptimize_uncommon_cases , true , " deoptimize uncommon cases " ) <nl> DEFINE_bool ( polymorphic_inlining , true , " polymorphic inlining " ) <nl> mmm a / src / heap . cc <nl> ppp b / src / heap . cc <nl> MaybeObject * Heap : : AllocateSharedFunctionInfo ( Object * name ) { <nl> share - > set_inferred_name ( empty_string ( ) , SKIP_WRITE_BARRIER ) ; <nl> share - > set_initial_map ( undefined_value ( ) , SKIP_WRITE_BARRIER ) ; <nl> share - > set_ast_node_count ( 0 ) ; <nl> - share - > set_stress_deopt_counter ( FLAG_deopt_every_n_times ) ; <nl> share - > set_counters ( 0 ) ; <nl> <nl> / / Set integer fields ( smi or int , depending on the architecture ) . <nl> mmm a / src / ia32 / lithium - codegen - ia32 . cc <nl> ppp b / src / ia32 / lithium - codegen - ia32 . cc <nl> void LCodeGen : : DeoptimizeIf ( Condition cc , <nl> } <nl> <nl> if ( FLAG_deopt_every_n_times ! = 0 & & ! info ( ) - > IsStub ( ) ) { <nl> - Handle < SharedFunctionInfo > shared ( info ( ) - > shared_info ( ) ) ; <nl> + ExternalReference count = ExternalReference : : stress_deopt_count ( isolate ( ) ) ; <nl> Label no_deopt ; <nl> __ pushfd ( ) ; <nl> __ push ( eax ) ; <nl> - __ push ( ebx ) ; <nl> - __ mov ( ebx , shared ) ; <nl> - __ mov ( eax , <nl> - FieldOperand ( ebx , SharedFunctionInfo : : kStressDeoptCounterOffset ) ) ; <nl> - __ sub ( Operand ( eax ) , Immediate ( Smi : : FromInt ( 1 ) ) ) ; <nl> + __ mov ( eax , Operand : : StaticVariable ( count ) ) ; <nl> + __ sub ( eax , Immediate ( 1 ) ) ; <nl> __ j ( not_zero , & no_deopt , Label : : kNear ) ; <nl> if ( FLAG_trap_on_deopt ) __ int3 ( ) ; <nl> - __ mov ( eax , Immediate ( Smi : : FromInt ( FLAG_deopt_every_n_times ) ) ) ; <nl> - __ mov ( FieldOperand ( ebx , SharedFunctionInfo : : kStressDeoptCounterOffset ) , <nl> - eax ) ; <nl> - __ pop ( ebx ) ; <nl> + __ mov ( eax , Immediate ( FLAG_deopt_every_n_times ) ) ; <nl> + __ mov ( Operand : : StaticVariable ( count ) , eax ) ; <nl> __ pop ( eax ) ; <nl> __ popfd ( ) ; <nl> ASSERT ( frame_is_built_ ) ; <nl> __ call ( entry , RelocInfo : : RUNTIME_ENTRY ) ; <nl> - <nl> __ bind ( & no_deopt ) ; <nl> - __ mov ( FieldOperand ( ebx , SharedFunctionInfo : : kStressDeoptCounterOffset ) , <nl> - eax ) ; <nl> - __ pop ( ebx ) ; <nl> + __ mov ( Operand : : StaticVariable ( count ) , eax ) ; <nl> __ pop ( eax ) ; <nl> __ popfd ( ) ; <nl> } <nl> mmm a / src / isolate . cc <nl> ppp b / src / isolate . cc <nl> Isolate : : Isolate ( ) <nl> optimizing_compiler_thread_ ( this ) , <nl> marking_thread_ ( NULL ) , <nl> sweeper_thread_ ( NULL ) , <nl> - callback_table_ ( NULL ) { <nl> + callback_table_ ( NULL ) , <nl> + stress_deopt_count_ ( 0 ) { <nl> id_ = NoBarrier_AtomicIncrement ( & isolate_counter_ , 1 ) ; <nl> TRACE_ISOLATE ( constructor ) ; <nl> <nl> void Isolate : : Deinit ( ) { <nl> <nl> if ( FLAG_hydrogen_stats ) GetHStatistics ( ) - > Print ( ) ; <nl> <nl> + if ( FLAG_print_deopt_stress ) { <nl> + PrintF ( stdout , " = = = Stress deopt counter : % u \ n " , stress_deopt_count_ ) ; <nl> + } <nl> + <nl> / / We must stop the logger before we tear down other components . <nl> Sampler * sampler = logger_ - > sampler ( ) ; <nl> if ( sampler & & sampler - > IsActive ( ) ) sampler - > Stop ( ) ; <nl> bool Isolate : : Init ( Deserializer * des ) { <nl> ASSERT ( Isolate : : Current ( ) = = this ) ; <nl> TRACE_ISOLATE ( init ) ; <nl> <nl> + stress_deopt_count_ = FLAG_deopt_every_n_times ; <nl> + <nl> if ( function_entry_hook ( ) ! = NULL ) { <nl> / / When function entry hooking is in effect , we have to create the code <nl> / / stubs from scratch to get entry hooks , rather than loading the previously <nl> mmm a / src / isolate . h <nl> ppp b / src / isolate . h <nl> class Isolate { <nl> function_entry_hook_ = function_entry_hook ; <nl> } <nl> <nl> + void * stress_deopt_count_address ( ) { return & stress_deopt_count_ ; } <nl> + <nl> private : <nl> Isolate ( ) ; <nl> <nl> class Isolate { <nl> SweeperThread * * sweeper_thread_ ; <nl> CallbackTable * callback_table_ ; <nl> <nl> + / / Counts deopt points if deopt_every_n_times is enabled . <nl> + unsigned int stress_deopt_count_ ; <nl> + <nl> friend class ExecutionAccess ; <nl> friend class HandleScopeImplementer ; <nl> friend class IsolateInitializer ; <nl> mmm a / src / objects - inl . h <nl> ppp b / src / objects - inl . h <nl> SMI_ACCESSORS ( SharedFunctionInfo , compiler_hints , <nl> kCompilerHintsOffset ) <nl> SMI_ACCESSORS ( SharedFunctionInfo , opt_count , kOptCountOffset ) <nl> SMI_ACCESSORS ( SharedFunctionInfo , counters , kCountersOffset ) <nl> - SMI_ACCESSORS ( SharedFunctionInfo , <nl> - stress_deopt_counter , <nl> - kStressDeoptCounterOffset ) <nl> + <nl> # else <nl> <nl> # define PSEUDO_SMI_ACCESSORS_LO ( holder , name , offset ) \ <nl> PSEUDO_SMI_ACCESSORS_HI ( SharedFunctionInfo , <nl> PSEUDO_SMI_ACCESSORS_LO ( SharedFunctionInfo , opt_count , kOptCountOffset ) <nl> <nl> PSEUDO_SMI_ACCESSORS_HI ( SharedFunctionInfo , counters , kCountersOffset ) <nl> - PSEUDO_SMI_ACCESSORS_LO ( SharedFunctionInfo , <nl> - stress_deopt_counter , <nl> - kStressDeoptCounterOffset ) <nl> + <nl> # endif <nl> <nl> <nl> mmm a / src / objects . h <nl> ppp b / src / objects . h <nl> class SharedFunctionInfo : public HeapObject { <nl> inline int ast_node_count ( ) ; <nl> inline void set_ast_node_count ( int count ) ; <nl> <nl> - / / A counter used to determine when to stress the deoptimizer with a <nl> - / / deopt . <nl> - inline int stress_deopt_counter ( ) ; <nl> - inline void set_stress_deopt_counter ( int counter ) ; <nl> - <nl> inline int profiler_ticks ( ) ; <nl> <nl> / / Inline cache age is used to infer whether the function survived a context <nl> class SharedFunctionInfo : public HeapObject { <nl> kFunctionTokenPositionOffset + kPointerSize ; <nl> static const int kOptCountOffset = kCompilerHintsOffset + kPointerSize ; <nl> static const int kCountersOffset = kOptCountOffset + kPointerSize ; <nl> - static const int kStressDeoptCounterOffset = kCountersOffset + kPointerSize ; <nl> <nl> / / Total size . <nl> - static const int kSize = kStressDeoptCounterOffset + kPointerSize ; <nl> + static const int kSize = kCountersOffset + kPointerSize ; <nl> # else <nl> / / The only reason to use smi fields instead of int fields <nl> / / is to allow iteration without maps decoding during <nl> class SharedFunctionInfo : public HeapObject { <nl> static const int kOptCountOffset = kCompilerHintsOffset + kIntSize ; <nl> <nl> static const int kCountersOffset = kOptCountOffset + kIntSize ; <nl> - static const int kStressDeoptCounterOffset = kCountersOffset + kIntSize ; <nl> <nl> / / Total size . <nl> - static const int kSize = kStressDeoptCounterOffset + kIntSize ; <nl> + static const int kSize = kCountersOffset + kIntSize ; <nl> <nl> # endif <nl> <nl>
Make deoptimization stress count global .
v8/v8
f24997c1c79238a02003b5fa3bf27918149011e4
2013-07-18T08:12:01Z
mmm a / src / idl_gen_python . cpp <nl> ppp b / src / idl_gen_python . cpp <nl> static void GetScalarFieldOfTable ( const StructDef & struct_def , <nl> getter = " bool ( " + getter + " ) " ; <nl> } <nl> code + = Indent + Indent + Indent + " return " + getter + " \ n " ; <nl> - auto defaultValue = ( is_bool ? " False " : field . value . constant ) ; <nl> - code + = Indent + Indent + " return " + defaultValue + " \ n \ n " ; <nl> + std : : string default_value ; <nl> + if ( is_bool ) { <nl> + default_value = field . value . constant = = " 0 " ? " False " : " True " ; <nl> + } else { <nl> + default_value = field . value . constant ; <nl> + } <nl> + code + = Indent + Indent + " return " + default_value + " \ n \ n " ; <nl> } <nl> <nl> / / Get a struct by initializing an existing struct . <nl>
Python : fix default bool value . ( )
google/flatbuffers
ab3b721a540f2279267235ee21767b7f705699a0
2018-06-07T19:02:35Z
mmm a / src / python / grpcio / grpc / _cython / _cygrpc / aio / server . pxd . pxi <nl> ppp b / src / python / grpcio / grpc / _cython / _cygrpc / aio / server . pxd . pxi <nl> cdef class AioServer : <nl> cdef object _shutdown_lock # asyncio . Lock <nl> cdef object _shutdown_completed # asyncio . Future <nl> cdef CallbackWrapper _shutdown_callback_wrapper <nl> + cdef object _crash_exception # Exception <nl> mmm a / src / python / grpcio / grpc / _cython / _cygrpc / aio / server . pyx . pxi <nl> ppp b / src / python / grpcio / grpc / _cython / _cygrpc / aio / server . pyx . pxi <nl> cdef class AioServer : <nl> self . _shutdown_callback_wrapper = CallbackWrapper ( <nl> self . _shutdown_completed , <nl> SERVER_SHUTDOWN_FAILURE_HANDLER ) <nl> + self . _crash_exception = None <nl> <nl> if interceptors : <nl> raise NotImplementedError ( ) <nl> cdef class AioServer : <nl> rpc_state , <nl> self . _loop ) ) <nl> <nl> + def _serving_task_crash_handler ( self , object task ) : <nl> + " " " Shutdown the server immediately if unexpectedly exited . " " " <nl> + if task . exception ( ) is None : <nl> + return <nl> + if self . _status ! = AIO_SERVER_STATUS_STOPPING : <nl> + self . _crash_exception = task . exception ( ) <nl> + _LOGGER . exception ( self . _crash_exception ) <nl> + self . _loop . create_task ( self . shutdown ( None ) ) <nl> + <nl> async def start ( self ) : <nl> if self . _status = = AIO_SERVER_STATUS_RUNNING : <nl> return <nl> cdef class AioServer : <nl> self . _status = AIO_SERVER_STATUS_RUNNING <nl> cdef object server_started = self . _loop . create_future ( ) <nl> self . _serving_task = self . _loop . create_task ( self . _server_main_loop ( server_started ) ) <nl> + self . _serving_task . add_done_callback ( self . _serving_task_crash_handler ) <nl> # Needs to explicitly wait for the server to start up . <nl> # Otherwise , the actual start time of the server is un - controllable . <nl> await server_started <nl> cdef class AioServer : <nl> <nl> async with self . _shutdown_lock : <nl> if self . _status = = AIO_SERVER_STATUS_RUNNING : <nl> - await self . _start_shutting_down ( ) <nl> self . _server . is_shutting_down = True <nl> self . _status = AIO_SERVER_STATUS_STOPPING <nl> + await self . _start_shutting_down ( ) <nl> <nl> if grace is None : <nl> # Directly cancels all calls <nl> cdef class AioServer : <nl> try : <nl> await asyncio . wait_for ( self . _shutdown_completed , timeout ) <nl> except asyncio . TimeoutError : <nl> + if self . _crash_exception is not None : <nl> + raise self . _crash_exception <nl> return False <nl> + if self . _crash_exception is not None : <nl> + raise self . _crash_exception <nl> return True <nl> <nl> def __dealloc__ ( self ) : <nl>
Propagate unexpected error to application
grpc/grpc
9289d34df0fdcfed239899e0ebc840be18383015
2019-11-05T22:03:29Z
mmm a / format . cc <nl> ppp b / format . cc <nl> struct IntChecker < true > { <nl> } ; <nl> <nl> # ifdef _WIN32 <nl> - uint8_t win32_colors [ ] = <nl> - { <nl> - 0 , <nl> - FOREGROUND_RED | FOREGROUND_INTENSITY , <nl> - FOREGROUND_GREEN | FOREGROUND_INTENSITY , <nl> - FOREGROUND_GREEN | FOREGROUND_RED | FOREGROUND_INTENSITY , <nl> - FOREGROUND_BLUE | FOREGROUND_INTENSITY , <nl> - FOREGROUND_BLUE | FOREGROUND_RED | FOREGROUND_INTENSITY , <nl> - FOREGROUND_INTENSITY | FOREGROUND_GREEN | FOREGROUND_BLUE , <nl> - FOREGROUND_GREEN | FOREGROUND_RED | FOREGROUND_BLUE | FOREGROUND_INTENSITY <nl> - } ; <nl> + const uint8_t WIN32_COLORS [ ] = { <nl> + 0 , <nl> + FOREGROUND_RED | FOREGROUND_INTENSITY , <nl> + FOREGROUND_GREEN | FOREGROUND_INTENSITY , <nl> + FOREGROUND_GREEN | FOREGROUND_RED | FOREGROUND_INTENSITY , <nl> + FOREGROUND_BLUE | FOREGROUND_INTENSITY , <nl> + FOREGROUND_BLUE | FOREGROUND_RED | FOREGROUND_INTENSITY , <nl> + FOREGROUND_GREEN | FOREGROUND_BLUE | FOREGROUND_INTENSITY , <nl> + FOREGROUND_GREEN | FOREGROUND_RED | FOREGROUND_BLUE | FOREGROUND_INTENSITY <nl> + } ; <nl> # else <nl> - const char RESET_COLOR [ ] = " \ x1b [ 0m " ; <nl> + const char RESET_COLOR [ ] = " \ x1b [ 0m " ; <nl> # endif <nl> <nl> typedef void ( * FormatFunc ) ( fmt : : Writer & , int , fmt : : StringRef ) ; <nl> FMT_FUNC void fmt : : print ( std : : ostream & os , StringRef format_str , ArgList args ) { <nl> FMT_FUNC void fmt : : print_colored ( Color c , StringRef format , ArgList args ) { <nl> # ifdef _WIN32 <nl> HANDLE handle = GetStdHandle ( STD_OUTPUT_HANDLE ) ; <nl> - if ( handle = = INVALID_HANDLE_VALUE ) <nl> + if ( handle = = INVALID_HANDLE_VALUE ) <nl> FMT_THROW ( GetLastError ( ) , " cannot get output handle " ) ; <nl> - CONSOLE_SCREEN_BUFFER_INFO infoCon ; <nl> - if ( ! GetConsoleScreenBufferInfo ( handle , & infoCon ) ) <nl> - FMT_THROW ( GetLastError ( ) , " cannot get console informations " ) ; <nl> - WORD reset_color = infoCon . wAttributes ; <nl> - WORD color = static_cast < int > ( c ) > = ARRAYSIZE ( win32_colors ) ? reset_color : win32_colors [ c ] ; <nl> - if ( ! SetConsoleTextAttribute ( handle , color ) ) <nl> + CONSOLE_SCREEN_BUFFER_INFO info_con ; <nl> + if ( ! GetConsoleScreenBufferInfo ( handle , & info_con ) ) <nl> + FMT_THROW ( GetLastError ( ) , " cannot get console information " ) ; <nl> + WORD reset_color = info_con . wAttributes ; <nl> + WORD color = static_cast < int > ( c ) > = ARRAYSIZE ( WIN32_COLORS ) ? reset_color : WIN32_COLORS [ c ] ; <nl> + if ( ! SetConsoleTextAttribute ( handle , color ) ) <nl> + FMT_THROW ( GetLastError ( ) , " cannot set console color " ) ; <nl> + print ( format , args ) ; <nl> + if ( ! SetConsoleTextAttribute ( handle , reset_color ) ) <nl> FMT_THROW ( GetLastError ( ) , " cannot set console color " ) ; <nl> # else <nl> char escape [ ] = " \ x1b [ 30m " ; <nl> escape [ 3 ] = ' 0 ' + static_cast < char > ( c ) ; <nl> std : : fputs ( escape , stdout ) ; <nl> - # endif <nl> print ( format , args ) ; <nl> - # ifdef _WIN32 <nl> - if ( ! SetConsoleTextAttribute ( handle , reset_color ) ) <nl> - FMT_THROW ( GetLastError ( ) , " cannot set console color " ) ; <nl> - # else <nl> std : : fputs ( RESET_COLOR , stdout ) ; <nl> # endif <nl> } <nl>
Update the contributed code to the used coding conventions
fmtlib/fmt
7004d1edf6021bcbe09814e973c68fb3c0287a26
2015-02-09T03:54:39Z
mmm a / include / swift / Basic / LangOptions . h <nl> ppp b / include / swift / Basic / LangOptions . h <nl> namespace swift { <nl> Target . getOSVersion ( major , minor , revision ) ; <nl> } else if ( Target . isOSLinux ( ) | | Target . isOSFreeBSD ( ) | | <nl> Target . isAndroid ( ) | | Target . isOSWindows ( ) | | <nl> - Target . getTriple ( ) . empty ( ) ) <nl> - { <nl> + Target . isPS4 ( ) | | Target . getTriple ( ) . empty ( ) ) { <nl> major = minor = revision = 0 ; <nl> } else { <nl> llvm_unreachable ( " Unsupported target OS " ) ; <nl> mmm a / lib / Basic / LangOptions . cpp <nl> ppp b / lib / Basic / LangOptions . cpp <nl> static const StringRef SupportedConditionalCompilationOSs [ ] = { <nl> " Linux " , <nl> " FreeBSD " , <nl> " Windows " , <nl> - " Android " <nl> + " Android " , <nl> + " PS4 " , <nl> } ; <nl> <nl> static const StringRef SupportedConditionalCompilationArches [ ] = { <nl> std : : pair < bool , bool > LangOptions : : setTarget ( llvm : : Triple triple ) { <nl> addPlatformConditionValue ( " os " , " FreeBSD " ) ; <nl> else if ( triple . isOSWindows ( ) ) <nl> addPlatformConditionValue ( " os " , " Windows " ) ; <nl> - else { <nl> + else if ( triple . isPS4 ( ) ) <nl> + addPlatformConditionValue ( " os " , " PS4 " ) ; <nl> + else <nl> UnsupportedOS = true ; <nl> - } <nl> <nl> bool UnsupportedArch = false ; <nl> <nl> mmm a / lib / Basic / Platform . cpp <nl> ppp b / lib / Basic / Platform . cpp <nl> StringRef swift : : getPlatformNameForTriple ( const llvm : : Triple & triple ) { <nl> case llvm : : Triple : : CUDA : <nl> case llvm : : Triple : : NVCL : <nl> case llvm : : Triple : : AMDHSA : <nl> - case llvm : : Triple : : PS4 : <nl> case llvm : : Triple : : ELFIAMCU : <nl> return " " ; <nl> case llvm : : Triple : : Darwin : <nl> StringRef swift : : getPlatformNameForTriple ( const llvm : : Triple & triple ) { <nl> return " freebsd " ; <nl> case llvm : : Triple : : Win32 : <nl> return " windows " ; <nl> + case llvm : : Triple : : PS4 : <nl> + return " ps4 " ; <nl> } <nl> llvm_unreachable ( " unsupported OS " ) ; <nl> } <nl> mmm a / lib / IRGen / IRGenModule . cpp <nl> ppp b / lib / IRGen / IRGenModule . cpp <nl> llvm : : SmallString < 32 > getTargetDependentLibraryOption ( const llvm : : Triple & T , <nl> buffer + = " . lib " ; <nl> if ( quote ) <nl> buffer + = ' " ' ; <nl> + } else if ( T . isPS4 ( ) ) { <nl> + bool quote = library . find ( ' ' ) ! = StringRef : : npos ; <nl> + <nl> + buffer + = " \ 01 " ; <nl> + if ( quote ) <nl> + buffer + = ' " ' ; <nl> + buffer + = library ; <nl> + if ( quote ) <nl> + buffer + = ' " ' ; <nl> } else { <nl> buffer + = " - l " ; <nl> buffer + = library ; <nl> void IRGenModule : : emitAutolinkInfo ( ) { <nl> } ) , <nl> AutolinkEntries . end ( ) ) ; <nl> <nl> - switch ( TargetInfo . OutputObjectFormat ) { <nl> - case llvm : : Triple : : UnknownObjectFormat : <nl> - llvm_unreachable ( " unknown object format " ) ; <nl> - case llvm : : Triple : : COFF : <nl> - case llvm : : Triple : : MachO : { <nl> + if ( TargetInfo . OutputObjectFormat = = llvm : : Triple : : COFF | | <nl> + TargetInfo . OutputObjectFormat = = llvm : : Triple : : MachO | | <nl> + Triple . isPS4 ( ) ) { <nl> llvm : : LLVMContext & ctx = Module . getContext ( ) ; <nl> <nl> if ( ! LinkerOptions ) { <nl> void IRGenModule : : emitAutolinkInfo ( ) { <nl> ( void ) FoundOldEntry ; <nl> assert ( FoundOldEntry & & " Could not replace old linker options entry ? " ) ; <nl> } <nl> - break ; <nl> - } <nl> - case llvm : : Triple : : ELF : { <nl> + } else { <nl> + assert ( TargetInfo . OutputObjectFormat = = llvm : : Triple : : ELF & & <nl> + " expected ELF output format " ) ; <nl> + <nl> / / Merge the entries into null - separated string . <nl> llvm : : SmallString < 64 > EntriesString ; <nl> for ( auto & EntryNode : AutolinkEntries ) { <nl> void IRGenModule : : emitAutolinkInfo ( ) { <nl> } <nl> } <nl> auto EntriesConstant = llvm : : ConstantDataArray : : getString ( <nl> - LLVMContext , EntriesString , / * AddNull = * / false ) ; <nl> + LLVMContext , EntriesString , / * AddNull = * / false ) ; <nl> <nl> - auto var = new llvm : : GlobalVariable ( * getModule ( ) , <nl> - EntriesConstant - > getType ( ) , true , <nl> - llvm : : GlobalValue : : PrivateLinkage , <nl> - EntriesConstant , <nl> - " _swift1_autolink_entries " ) ; <nl> + auto var = <nl> + new llvm : : GlobalVariable ( * getModule ( ) , EntriesConstant - > getType ( ) , true , <nl> + llvm : : GlobalValue : : PrivateLinkage , <nl> + EntriesConstant , " _swift1_autolink_entries " ) ; <nl> var - > setSection ( " . swift1_autolink_entries " ) ; <nl> var - > setAlignment ( getPointerAlignment ( ) . getValue ( ) ) ; <nl> <nl> addUsedGlobal ( var ) ; <nl> - break ; <nl> - } <nl> } <nl> <nl> if ( ! IRGen . Opts . ForceLoadSymbolName . empty ( ) ) { <nl> mmm a / stdlib / private / StdlibUnittest / RaceTest . swift <nl> ppp b / stdlib / private / StdlibUnittest / RaceTest . swift <nl> import SwiftPrivate <nl> import SwiftPrivatePthreadExtras <nl> # if os ( OSX ) | | os ( iOS ) <nl> import Darwin <nl> - # elseif os ( Linux ) | | os ( FreeBSD ) | | os ( Android ) <nl> + # elseif os ( Linux ) | | os ( FreeBSD ) | | os ( PS4 ) | | os ( Android ) <nl> import Glibc <nl> # endif <nl> <nl> mmm a / stdlib / private / StdlibUnittest / StdlibCoreExtras . swift <nl> ppp b / stdlib / private / StdlibUnittest / StdlibCoreExtras . swift <nl> import SwiftPrivate <nl> import SwiftPrivateLibcExtras <nl> # if os ( OSX ) | | os ( iOS ) <nl> import Darwin <nl> - # elseif os ( Linux ) | | os ( FreeBSD ) | | os ( Android ) <nl> + # elseif os ( Linux ) | | os ( FreeBSD ) | | os ( PS4 ) | | os ( Android ) <nl> import Glibc <nl> # endif <nl> <nl> mmm a / stdlib / private / StdlibUnittest / StdlibUnittest . swift . gyb <nl> ppp b / stdlib / private / StdlibUnittest / StdlibUnittest . swift . gyb <nl> import SwiftPrivateLibcExtras <nl> <nl> # if os ( OSX ) | | os ( iOS ) | | os ( watchOS ) | | os ( tvOS ) <nl> import Darwin <nl> - # elseif os ( Linux ) | | os ( FreeBSD ) | | os ( Android ) <nl> + # elseif os ( Linux ) | | os ( FreeBSD ) | | os ( PS4 ) | | os ( Android ) <nl> import Glibc <nl> # endif <nl> <nl> public enum OSVersion : CustomStringConvertible { <nl> case linux <nl> case freeBSD <nl> case android <nl> + case ps4 <nl> <nl> public var description : String { <nl> switch self { <nl> public enum OSVersion : CustomStringConvertible { <nl> return " Linux " <nl> case . freeBSD : <nl> return " FreeBSD " <nl> + case . ps4 : <nl> + return " PS4 " <nl> case . android : <nl> return " Android " <nl> } <nl> func _getOSVersion ( ) - > OSVersion { <nl> return . linux <nl> # elseif os ( FreeBSD ) <nl> return . freeBSD <nl> + # elseif os ( PS4 ) <nl> + return . ps4 <nl> # elseif os ( Android ) <nl> return . android <nl> # else <nl> public enum TestRunPredicate : CustomStringConvertible { <nl> <nl> case freeBSDAny ( reason : String ) <nl> <nl> + case ps4Any ( reason : String ) <nl> + <nl> case androidAny ( reason : String ) <nl> <nl> case objCRuntime ( / * reason : * / String ) <nl> public enum TestRunPredicate : CustomStringConvertible { <nl> case . freeBSDAny ( reason : let reason ) : <nl> return " freeBSDAny ( * , reason : \ ( reason ) ) " <nl> <nl> + case . ps4Any ( reason : let reason ) : <nl> + return " ps4Any ( * , reason : \ ( reason ) ) " <nl> + <nl> case . objCRuntime ( let reason ) : <nl> return " Objective - C runtime , reason : \ ( reason ) ) " <nl> case . nativeRuntime ( let reason ) : <nl> public enum TestRunPredicate : CustomStringConvertible { <nl> return false <nl> } <nl> <nl> + case . ps4Any : <nl> + switch _getRunningOSVersion ( ) { <nl> + case . ps4 : <nl> + return true <nl> + default : <nl> + return false <nl> + } <nl> + <nl> case . objCRuntime : <nl> # if _runtime ( _ObjC ) <nl> return true <nl> mmm a / stdlib / private / SwiftPrivateLibcExtras / Subprocess . swift <nl> ppp b / stdlib / private / SwiftPrivateLibcExtras / Subprocess . swift <nl> <nl> import SwiftPrivate <nl> # if os ( OSX ) | | os ( iOS ) | | os ( watchOS ) | | os ( tvOS ) <nl> import Darwin <nl> - # elseif os ( Linux ) | | os ( FreeBSD ) | | os ( Android ) <nl> + # elseif os ( Linux ) | | os ( FreeBSD ) | | os ( PS4 ) | | os ( Android ) <nl> import Glibc <nl> # endif <nl> <nl> internal func _getEnviron ( ) - > UnsafeMutablePointer < UnsafeMutablePointer < CChar > ? <nl> # if os ( OSX ) | | os ( iOS ) | | os ( watchOS ) | | os ( tvOS ) <nl> return _NSGetEnviron ( ) . pointee <nl> # elseif os ( FreeBSD ) <nl> - return environ ; <nl> + return environ <nl> + # elseif os ( PS4 ) <nl> + return environ <nl> # elseif os ( Android ) <nl> return environ <nl> # else <nl> mmm a / stdlib / private / SwiftPrivateLibcExtras / SwiftPrivateLibcExtras . swift <nl> ppp b / stdlib / private / SwiftPrivateLibcExtras / SwiftPrivateLibcExtras . swift <nl> <nl> import SwiftPrivate <nl> # if os ( OSX ) | | os ( iOS ) | | os ( watchOS ) | | os ( tvOS ) <nl> import Darwin <nl> - # elseif os ( Linux ) | | os ( FreeBSD ) | | os ( Android ) <nl> + # elseif os ( Linux ) | | os ( FreeBSD ) | | os ( PS4 ) | | os ( Android ) <nl> import Glibc <nl> # endif <nl> <nl> mmm a / stdlib / private / SwiftPrivatePthreadExtras / PthreadBarriers . swift <nl> ppp b / stdlib / private / SwiftPrivatePthreadExtras / PthreadBarriers . swift <nl> <nl> <nl> # if os ( OSX ) | | os ( iOS ) | | os ( watchOS ) | | os ( tvOS ) <nl> import Darwin <nl> - # elseif os ( Linux ) | | os ( FreeBSD ) | | os ( Android ) <nl> + # elseif os ( Linux ) | | os ( FreeBSD ) | | os ( PS4 ) | | os ( Android ) <nl> import Glibc <nl> # endif <nl> <nl> mmm a / stdlib / private / SwiftPrivatePthreadExtras / SwiftPrivatePthreadExtras . swift <nl> ppp b / stdlib / private / SwiftPrivatePthreadExtras / SwiftPrivatePthreadExtras . swift <nl> <nl> <nl> # if os ( OSX ) | | os ( iOS ) | | os ( watchOS ) | | os ( tvOS ) <nl> import Darwin <nl> - # elseif os ( Linux ) | | os ( FreeBSD ) | | os ( Android ) <nl> + # elseif os ( Linux ) | | os ( FreeBSD ) | | os ( PS4 ) | | os ( Android ) <nl> import Glibc <nl> # endif <nl> <nl> mmm a / stdlib / public / Platform / Platform . swift <nl> ppp b / stdlib / public / Platform / Platform . swift <nl> public func | | < T : Boolean > ( <nl> <nl> public var errno : Int32 { <nl> get { <nl> - # if os ( OSX ) | | os ( iOS ) | | os ( watchOS ) | | os ( tvOS ) | | os ( FreeBSD ) <nl> + # if os ( OSX ) | | os ( iOS ) | | os ( watchOS ) | | os ( tvOS ) | | os ( FreeBSD ) | | os ( PS4 ) <nl> return __error ( ) . pointee <nl> / / FIXME : os ( Windows ) should be replaced , such as triple ( Cygwin ) <nl> # elseif os ( Android ) | | os ( Windows ) <nl> public var errno : Int32 { <nl> # endif <nl> } <nl> set ( val ) { <nl> - # if os ( OSX ) | | os ( iOS ) | | os ( watchOS ) | | os ( tvOS ) | | os ( FreeBSD ) <nl> + # if os ( OSX ) | | os ( iOS ) | | os ( watchOS ) | | os ( tvOS ) | | os ( FreeBSD ) | | os ( PS4 ) <nl> return __error ( ) . pointee = val <nl> # elseif os ( Android ) | | os ( Windows ) <nl> return __errno ( ) . pointee = val <nl> public var errno : Int32 { <nl> / / stdio . h <nl> / / = = = mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - = = = / / <nl> <nl> - # if os ( OSX ) | | os ( iOS ) | | os ( watchOS ) | | os ( tvOS ) | | os ( FreeBSD ) <nl> + # if os ( OSX ) | | os ( iOS ) | | os ( watchOS ) | | os ( tvOS ) | | os ( FreeBSD ) | | os ( PS4 ) <nl> public var stdin : UnsafeMutablePointer < FILE > { <nl> get { <nl> return __stdinp <nl> public var SIG_DFL : sig_t ? { return nil } <nl> public var SIG_IGN : sig_t { return unsafeBitCast ( 1 , to : sig_t . self ) } <nl> public var SIG_ERR : sig_t { return unsafeBitCast ( - 1 , to : sig_t . self ) } <nl> public var SIG_HOLD : sig_t { return unsafeBitCast ( 5 , to : sig_t . self ) } <nl> - # elseif os ( Linux ) | | os ( FreeBSD ) | | os ( Android ) | | os ( Windows ) <nl> + # elseif os ( Linux ) | | os ( FreeBSD ) | | os ( PS4 ) | | os ( Android ) | | os ( Windows ) <nl> # if os ( Windows ) <nl> / / In Cygwin , the below SIG_ * have the same value with Linux . <nl> / / Verified with libstdc + + 6 v5 . 3 . 0 in Cygwin v2 . 4 . 1 64bit . <nl> public var SEM_FAILED : UnsafeMutablePointer < sem_t > ? { <nl> # if os ( OSX ) | | os ( iOS ) | | os ( watchOS ) | | os ( tvOS ) <nl> / / The value is ABI . Value verified to be correct for OS X , iOS , watchOS , tvOS . <nl> return UnsafeMutablePointer < sem_t > ( bitPattern : - 1 ) <nl> - # elseif os ( Linux ) | | os ( FreeBSD ) | | os ( Android ) | | os ( Windows ) <nl> + # elseif os ( Linux ) | | os ( FreeBSD ) | | os ( PS4 ) | | os ( Android ) | | os ( Windows ) <nl> / / The value is ABI . Value verified to be correct on Glibc . <nl> return UnsafeMutablePointer < sem_t > ( bitPattern : 0 ) <nl> # else <nl> public func sem_open ( <nl> / / = = = mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - = = = / / <nl> <nl> / / FreeBSD defines extern char * * environ differently than Linux . <nl> - # if os ( FreeBSD ) <nl> + # if os ( FreeBSD ) | | os ( PS4 ) <nl> @ _silgen_name ( " _swift_FreeBSD_getEnv " ) <nl> func _swift_FreeBSD_getEnv ( <nl> ) - > UnsafeMutablePointer < UnsafeMutablePointer < UnsafeMutablePointer < CChar > ? > > <nl> mmm a / stdlib / public / Platform / tgmath . swift . gyb <nl> ppp b / stdlib / public / Platform / tgmath . swift . gyb <nl> public func scalbn ( _ x : $ { T } , _ n : Int ) - > $ { T } { <nl> <nl> % # This is AllFloatTypes not OverlayFloatTypes because of the tuple return . <nl> % for T , CT , f in AllFloatTypes ( ) : <nl> - # if os ( Linux ) | | os ( FreeBSD ) | | os ( Android ) | | os ( Windows ) <nl> + # if os ( Linux ) | | os ( FreeBSD ) | | os ( PS4 ) | | os ( Android ) | | os ( Windows ) <nl> @ _transparent <nl> public func lgamma ( _ x : $ { T } ) - > ( $ { T } , Int ) { <nl> var sign = CInt ( 0 ) <nl> mmm a / test / 1_stdlib / PrintFloat . swift <nl> ppp b / test / 1_stdlib / PrintFloat . swift <nl> <nl> / / REQUIRES : executable_test <nl> <nl> import StdlibUnittest <nl> - # if os ( Linux ) | | os ( FreeBSD ) | | os ( Android ) <nl> + # if os ( Linux ) | | os ( FreeBSD ) | | os ( PS4 ) | | os ( Android ) <nl> import Glibc <nl> # else <nl> import Darwin <nl> mmm a / test / 1_stdlib / tgmath . swift <nl> ppp b / test / 1_stdlib / tgmath . swift <nl> <nl> / / RUN : % target - run - simple - swift <nl> / / REQUIRES : executable_test <nl> <nl> - # if os ( Linux ) | | os ( FreeBSD ) | | os ( Android ) <nl> + # if os ( Linux ) | | os ( FreeBSD ) | | os ( PS4 ) | | os ( Android ) <nl> import Glibc <nl> / / FIXME : this is a quick hack for non Darwin platforms <nl> / / where they doesn ' t have CoreGraphics module . <nl> mmm a / test / 1_stdlib / tgmath_optimized . swift <nl> ppp b / test / 1_stdlib / tgmath_optimized . swift <nl> <nl> / / RUN : % target - run % t / a . out <nl> / / REQUIRES : executable_test <nl> <nl> - # if os ( Linux ) | | os ( FreeBSD ) | | os ( Android ) <nl> + # if os ( Linux ) | | os ( FreeBSD ) | | os ( PS4 ) | | os ( Android ) <nl> import Glibc <nl> # else <nl> import Darwin <nl> new file mode 100644 <nl> index 000000000000 . . 2fb66998f46b <nl> mmm / dev / null <nl> ppp b / test / IRGen / autolink - psei . swift <nl> <nl> + / / RUN : rm - rf % t <nl> + / / RUN : mkdir - p % t <nl> + / / RUN : % swift - target x86_64 - scei - ps4 - parse - as - library - parse - stdlib - emit - module - path % t / module . swiftmodule - module - name module - module - link - name module % s <nl> + / / RUN : % swift - target x86_64 - scei - ps4 - parse - as - library - parse - stdlib - module - name autolink - I % t - D MAIN_MODULE - emit - ir - o - % s | FileCheck % s - check - prefix CHECK - IR <nl> + <nl> + # if MAIN_MODULE <nl> + import module <nl> + # endif <nl> + <nl> + / / CHECK - IR : ! { { [ 0 - 9 ] + } } = ! { i32 { { [ 0 - 9 ] + } } , ! " Linker Options " , [ [ NODE : ! [ 0 - 9 ] + ] ] } <nl> + / / CHECK - IR : [ [ NODE ] ] = ! { [ [ LIST : ! [ 0 - 9 ] + ] ] } <nl> + / / CHECK - IR : [ [ LIST ] ] = ! { ! " \ 01module " } <nl> + <nl> mmm a / test / Interpreter / SDK / libc . swift <nl> ppp b / test / Interpreter / SDK / libc . swift <nl> <nl> <nl> # if os ( OSX ) | | os ( iOS ) | | os ( watchOS ) | | os ( tvOS ) <nl> import Darwin <nl> - # elseif os ( Linux ) | | os ( FreeBSD ) | | os ( Android ) <nl> + # elseif os ( Linux ) | | os ( FreeBSD ) | | os ( PS4 ) | | os ( Android ) <nl> import Glibc <nl> # endif <nl> <nl> new file mode 100644 <nl> index 000000000000 . . 9d080e4e7cf8 <nl> mmm / dev / null <nl> ppp b / test / Parse / ConditionalCompilation / x86_64PS4Target . swift <nl> <nl> + / / RUN : % swift - parse % s - verify - target x86_64 - scei - ps4 - disable - objc - interop - parse - stdlib <nl> + / / RUN : % swift - ide - test - test - input - complete - source - filename % s - target x86_64 - scei - ps4 <nl> + <nl> + # if os ( FreeBSD ) <nl> + / / This block should not parse . <nl> + / / os ( FreeBSD ) does not imply os ( PS4 ) <nl> + let i : Int = " Hello " <nl> + # endif <nl> + <nl> + # if arch ( x86_64 ) & & os ( PS4 ) & & _runtime ( _Native ) & & _endian ( little ) <nl> + class C { } <nl> + var x = C ( ) <nl> + # endif <nl> + <nl> + var y = x <nl> + <nl> mmm a / validation - test / StdlibUnittest / ChildProcessShutdown / FailIfChildCrashesDuringShutdown . swift <nl> ppp b / validation - test / StdlibUnittest / ChildProcessShutdown / FailIfChildCrashesDuringShutdown . swift <nl> <nl> / / REQUIRES : executable_test <nl> <nl> import StdlibUnittest <nl> - # if os ( Linux ) | | os ( FreeBSD ) | | os ( Android ) <nl> + # if os ( Linux ) | | os ( FreeBSD ) | | os ( PS4 ) | | os ( Android ) <nl> import Glibc <nl> # else <nl> import Darwin <nl> mmm a / validation - test / StdlibUnittest / ChildProcessShutdown / FailIfChildExitsDuringShutdown . swift <nl> ppp b / validation - test / StdlibUnittest / ChildProcessShutdown / FailIfChildExitsDuringShutdown . swift <nl> <nl> / / REQUIRES : executable_test <nl> <nl> import StdlibUnittest <nl> - # if os ( Linux ) | | os ( FreeBSD ) | | os ( Android ) <nl> + # if os ( Linux ) | | os ( FreeBSD ) | | os ( PS4 ) | | os ( Android ) <nl> import Glibc <nl> # else <nl> import Darwin <nl> mmm a / validation - test / StdlibUnittest / ChildProcessShutdown / PassIfChildCrashedDuringTestExecution . swift <nl> ppp b / validation - test / StdlibUnittest / ChildProcessShutdown / PassIfChildCrashedDuringTestExecution . swift <nl> <nl> / / REQUIRES : executable_test <nl> <nl> import StdlibUnittest <nl> - # if os ( Linux ) | | os ( FreeBSD ) | | os ( Android ) <nl> + # if os ( Linux ) | | os ( FreeBSD ) | | os ( PS4 ) | | os ( Android ) <nl> import Glibc <nl> # else <nl> import Darwin <nl> new file mode 100644 <nl> index 000000000000 . . a3ce7e8732f0 <nl> mmm / dev / null <nl> ppp b / validation - test / StdlibUnittest / PS4 . swift <nl> <nl> + / / RUN : % target - run - stdlib - swift | FileCheck % s <nl> + / / REQUIRES : executable_test <nl> + <nl> + import Swift <nl> + import StdlibUnittest <nl> + <nl> + _setOverrideOSVersion ( . ps4 ) <nl> + _setTestSuiteFailedCallback ( ) { print ( " abort ( ) " ) } <nl> + <nl> + var XFailsPS4 = TestSuite ( " XFailsPS4 " ) <nl> + <nl> + / / CHECK : [ UXPASS ] XFailsPS4 . xfail PS4 passes { { $ } } <nl> + XFailsPS4 . test ( " xfail PS4 passes " ) . xfail ( . ps4Any ( reason : " " ) ) . code { <nl> + expectEqual ( 1 , 1 ) <nl> + } <nl> + <nl> + / / CHECK : [ XFAIL ] XFailsPS4 . xfail PS4 fails { { $ } } <nl> + XFailsPS4 . test ( " xfail PS4 fails " ) . xfail ( . ps4Any ( reason : " " ) ) . code { <nl> + expectEqual ( 1 , 2 ) <nl> + } <nl> + <nl> + / / CHECK : XFailsPS4 : Some tests failed , aborting <nl> + / / CHECK : abort ( ) <nl> + <nl> + runAllTests ( ) <nl> + <nl> mmm a / validation - test / StdlibUnittest / Stdin . swift <nl> ppp b / validation - test / StdlibUnittest / Stdin . swift <nl> import StdlibUnittest <nl> <nl> # if os ( OSX ) | | os ( iOS ) | | os ( tvOS ) | | os ( watchOS ) <nl> import Darwin <nl> - # elseif os ( Linux ) | | os ( FreeBSD ) | | os ( Android ) <nl> + # elseif os ( Linux ) | | os ( FreeBSD ) | | os ( PS4 ) | | os ( Android ) <nl> import Glibc <nl> # endif <nl> <nl> mmm a / validation - test / stdlib / String . swift <nl> ppp b / validation - test / stdlib / String . swift <nl> StringTests . test ( <nl> # endif <nl> } <nl> <nl> - # if os ( Linux ) | | os ( FreeBSD ) | | os ( Android ) <nl> + # if os ( Linux ) | | os ( FreeBSD ) | | os ( PS4 ) | | os ( Android ) <nl> import Glibc <nl> # endif <nl> <nl>
Merge pull request from compnerd / ps4
apple/swift
f235a626fd64867c8c95c14cfeedb9cc17e3faac
2016-07-01T19:34:24Z
new file mode 100644 <nl> index 00000000000 . . a4a5c4ddce4 <nl> mmm / dev / null <nl> ppp b / ports / flann / CONTROL <nl> <nl> + Source : flann <nl> + Version : 1 . 9 . 1 <nl> + Description : performing fast approximate nearest neighbor searches in high dimensional spaces <nl> new file mode 100644 <nl> index 00000000000 . . f65094e8f8b <nl> mmm / dev / null <nl> ppp b / ports / flann / fix - install - flann . patch <nl> <nl> pppmmm " a / src / cpp / CMakeLists . txt " <nl> ppp + " b / src / cpp / CMakeLists . txt " <nl> + if ( BUILD_C_BINDINGS ) <nl> + set_target_properties ( flann PROPERTIES LINKER_LANGUAGE CXX ) <nl> + target_link_libraries ( flann - Wl , - whole - archive flann_s - Wl , - no - whole - archive ) <nl> + else ( ) <nl> + - add_library ( flann SHARED $ { C_SOURCES } ) <nl> + + add_library ( flann $ { C_SOURCES } ) <nl> + <nl> + if ( MINGW AND OPENMP_FOUND ) <nl> + target_link_libraries ( flann gomp ) <nl> + endif ( ) <nl> + endif ( WIN32 ) <nl> + <nl> + <nl> + - install ( <nl> + - TARGETS flann_cpp flann_cpp_s <nl> + - RUNTIME DESTINATION bin <nl> + - LIBRARY DESTINATION $ { FLANN_LIB_INSTALL_DIR } <nl> + - ARCHIVE DESTINATION $ { FLANN_LIB_INSTALL_DIR } <nl> + - ) <nl> + + # install ( <nl> + + # TARGETS flann_cpp flann_cpp_s <nl> + + # RUNTIME DESTINATION bin <nl> + + # LIBRARY DESTINATION $ { FLANN_LIB_INSTALL_DIR } <nl> + + # ARCHIVE DESTINATION $ { FLANN_LIB_INSTALL_DIR } <nl> + + # ) <nl> + <nl> + if ( BUILD_CUDA_LIB ) <nl> + install ( <nl> + endif ( ) <nl> + <nl> + if ( BUILD_C_BINDINGS ) <nl> + install ( <nl> + - TARGETS flann flann_s <nl> + + TARGETS flann <nl> + RUNTIME DESTINATION bin <nl> + LIBRARY DESTINATION $ { FLANN_LIB_INSTALL_DIR } <nl> + ARCHIVE DESTINATION $ { FLANN_LIB_INSTALL_DIR } <nl> new file mode 100644 <nl> index 00000000000 . . 4a87365c015 <nl> mmm / dev / null <nl> ppp b / ports / flann / portfile . cmake <nl> <nl> + # Common Ambient Variables : <nl> + # CURRENT_BUILDTREES_DIR = $ { VCPKG_ROOT_DIR } \ buildtrees \ $ { PORT } <nl> + # CURRENT_PACKAGES_DIR = $ { VCPKG_ROOT_DIR } \ packages \ $ { PORT } _ $ { TARGET_TRIPLET } <nl> + # CURRENT_PORT DIR = $ { VCPKG_ROOT_DIR } \ ports \ $ { PORT } <nl> + # PORT = current port name ( zlib , etc ) <nl> + # TARGET_TRIPLET = current triplet ( x86 - windows , x64 - windows - static , etc ) <nl> + # VCPKG_CRT_LINKAGE = C runtime linkage type ( static , dynamic ) <nl> + # VCPKG_LIBRARY_LINKAGE = target library linkage type ( static , dynamic ) <nl> + # VCPKG_ROOT_DIR = < C : \ path \ to \ current \ vcpkg > <nl> + # VCPKG_TARGET_ARCHITECTURE = target architecture ( x64 , x86 , arm ) <nl> + # <nl> + <nl> + include ( vcpkg_common_functions ) <nl> + <nl> + vcpkg_from_github ( <nl> + OUT_SOURCE_PATH SOURCE_PATH <nl> + REPO mariusmuja / flann <nl> + REF 1 . 9 . 1 <nl> + SHA512 0da78bb14111013318160dd3dee1f93eb6ed077b18439fd6496017b62a8a6070cc859cfb3e08dad4c614e48d9dc1da5f7c4a21726ee45896d360506da074a6f7 <nl> + ) <nl> + <nl> + vcpkg_apply_patches ( <nl> + SOURCE_PATH $ { SOURCE_PATH } <nl> + PATCHES " $ { CMAKE_CURRENT_LIST_DIR } / fix - install - flann . patch " <nl> + ) <nl> + <nl> + vcpkg_configure_cmake ( <nl> + SOURCE_PATH $ { SOURCE_PATH } <nl> + # PREFER_NINJA # Disable this option if project cannot be built with Ninja <nl> + OPTIONS <nl> + - DBUILD_EXAMPLES = OFF <nl> + - DBUILD_PYTHON_BINDINGS = OFF <nl> + - DBUILD_MATLAB_BINDINGS = OFF <nl> + - DBUILD_DOC = OFF <nl> + <nl> + OPTIONS_RELEASE <nl> + - DFLANN_LIB_INSTALL_DIR = $ { CURRENT_PACKAGES_DIR } / lib <nl> + <nl> + OPTIONS_DEBUG <nl> + - DFLANN_LIB_INSTALL_DIR = $ { CURRENT_PACKAGES_DIR } / debug / lib <nl> + <nl> + ) <nl> + <nl> + vcpkg_install_cmake ( ) <nl> + <nl> + # clean <nl> + file ( REMOVE_RECURSE $ { CURRENT_PACKAGES_DIR } / debug / include ) <nl> + <nl> + # Handle copyright <nl> + file ( COPY $ { SOURCE_PATH } / README . md DESTINATION $ { CURRENT_PACKAGES_DIR } / share / flann ) <nl> + file ( RENAME $ { CURRENT_PACKAGES_DIR } / share / flann / README . md $ { CURRENT_PACKAGES_DIR } / share / flann / copyright ) <nl> \ No newline at end of file <nl>
check in flann
microsoft/vcpkg
ec625bc160748ea4b35c3c0c4fb7daa06b42de32
2017-06-11T21:26:22Z
mmm a / include / rapidjson / filereadstream . h <nl> ppp b / include / rapidjson / filereadstream . h <nl> <nl> <nl> # include " stream . h " <nl> # include < cstdio > <nl> - # include < cstring > <nl> <nl> # ifdef __clang__ <nl> RAPIDJSON_DIAG_PUSH <nl> class FileReadStream { <nl> public : <nl> typedef char Ch ; / / ! < Character type ( byte ) . <nl> <nl> - / / ! Constructor . <nl> - / * ! <nl> - \ param fp File pointer opened for read . <nl> - * / <nl> - FileReadStream ( std : : FILE * fp ) : fp_ ( fp ) , buffer_ ( peekBuffer_ ) , size_ ( sizeof ( peekBuffer_ ) / sizeof ( Ch ) ) , pos_ ( ) , len_ ( ) , count_ ( ) <nl> - { <nl> - RAPIDJSON_ASSERT ( fp_ ! = 0 ) ; <nl> - } <nl> - <nl> / / ! Constructor . <nl> / * ! <nl> \ param fp File pointer opened for read . <nl> \ param buffer user - supplied buffer . <nl> \ param bufferSize size of buffer in bytes . Must > = 4 bytes . <nl> * / <nl> - FileReadStream ( std : : FILE * fp , Ch * buffer , size_t size ) : fp_ ( fp ) , buffer_ ( buffer ) , size_ ( size ) , pos_ ( ) , len_ ( ) , count_ ( ) { <nl> - RAPIDJSON_ASSERT ( fp_ ! = 0 & & buffer_ ! = 0 & & size_ > 0 ) ; <nl> - if ( RAPIDJSON_UNLIKELY ( size_ < sizeof ( peekBuffer_ ) / sizeof ( Ch ) ) ) { <nl> - size_ = sizeof ( peekBuffer_ ) / sizeof ( Ch ) ; <nl> - buffer_ = peekBuffer_ ; <nl> - } <nl> - } <nl> - <nl> - Ch Peek ( ) const { <nl> - if ( RAPIDJSON_UNLIKELY ( pos_ = = len_ ) & & ! Read ( ) ) <nl> - return static_cast < Ch > ( ' \ 0 ' ) ; <nl> - return buffer_ [ pos_ ] ; <nl> - } <nl> - <nl> - Ch Take ( ) { <nl> - if ( RAPIDJSON_UNLIKELY ( pos_ = = len_ ) & & ! Read ( ) ) <nl> - return static_cast < Ch > ( ' \ 0 ' ) ; <nl> - return buffer_ [ pos_ + + ] ; <nl> + FileReadStream ( std : : FILE * fp , char * buffer , size_t bufferSize ) : fp_ ( fp ) , buffer_ ( buffer ) , bufferSize_ ( bufferSize ) , bufferLast_ ( 0 ) , current_ ( buffer_ ) , readCount_ ( 0 ) , count_ ( 0 ) , eof_ ( false ) { <nl> + RAPIDJSON_ASSERT ( fp_ ! = 0 ) ; <nl> + RAPIDJSON_ASSERT ( bufferSize > = 4 ) ; <nl> + Read ( ) ; <nl> } <nl> <nl> - size_t Tell ( ) const { return count_ + pos_ ; } <nl> + Ch Peek ( ) const { return * current_ ; } <nl> + Ch Take ( ) { Ch c = * current_ ; Read ( ) ; return c ; } <nl> + size_t Tell ( ) const { return count_ + static_cast < size_t > ( current_ - buffer_ ) ; } <nl> <nl> / / Not implemented <nl> void Put ( Ch ) { RAPIDJSON_ASSERT ( false ) ; } <nl> class FileReadStream { <nl> <nl> / / For encoding detection only . <nl> const Ch * Peek4 ( ) const { <nl> - if ( len_ - pos_ < 4 ) { <nl> - if ( pos_ ) { <nl> - len_ - = pos_ ; <nl> - std : : memmove ( buffer_ , buffer_ + pos_ , len_ ) ; <nl> - count_ + = pos_ ; <nl> - pos_ = 0 ; <nl> - } <nl> - len_ + = std : : fread ( buffer_ + len_ , sizeof ( Ch ) , size_ - len_ , fp_ ) ; <nl> - if ( len_ < 4 ) <nl> - return 0 ; <nl> - } <nl> - return & buffer_ [ pos_ ] ; <nl> + return ( current_ + 4 < = bufferLast_ ) ? current_ : 0 ; <nl> } <nl> <nl> private : <nl> - FileReadStream ( ) ; <nl> - FileReadStream ( const FileReadStream & ) ; <nl> - FileReadStream & operator = ( const FileReadStream & ) ; <nl> - <nl> - size_t Read ( ) const { <nl> - count_ + = pos_ ; <nl> - pos_ = 0 ; <nl> - len_ = std : : fread ( buffer_ , sizeof ( Ch ) , size_ , fp_ ) ; <nl> - return len_ ; <nl> + void Read ( ) { <nl> + if ( current_ < bufferLast_ ) <nl> + + + current_ ; <nl> + else if ( ! eof_ ) { <nl> + count_ + = readCount_ ; <nl> + readCount_ = std : : fread ( buffer_ , 1 , bufferSize_ , fp_ ) ; <nl> + bufferLast_ = buffer_ + readCount_ - 1 ; <nl> + current_ = buffer_ ; <nl> + <nl> + if ( readCount_ < bufferSize_ ) { <nl> + buffer_ [ readCount_ ] = ' \ 0 ' ; <nl> + + + bufferLast_ ; <nl> + eof_ = true ; <nl> + } <nl> + } <nl> } <nl> <nl> std : : FILE * fp_ ; <nl> - Ch peekBuffer_ [ 4 ] , * buffer_ ; <nl> - size_t size_ ; <nl> - mutable size_t pos_ , len_ , count_ ; <nl> + Ch * buffer_ ; <nl> + size_t bufferSize_ ; <nl> + Ch * bufferLast_ ; <nl> + Ch * current_ ; <nl> + size_t readCount_ ; <nl> + size_t count_ ; / / ! < Number of characters read <nl> + bool eof_ ; <nl> } ; <nl> <nl> RAPIDJSON_NAMESPACE_END <nl> mmm a / include / rapidjson / istreamwrapper . h <nl> ppp b / include / rapidjson / istreamwrapper . h <nl> <nl> <nl> # include " stream . h " <nl> # include < iosfwd > <nl> - # include < cstring > <nl> <nl> # ifdef __clang__ <nl> RAPIDJSON_DIAG_PUSH <nl> class BasicIStreamWrapper { <nl> public : <nl> typedef typename StreamType : : char_type Ch ; <nl> <nl> - BasicIStreamWrapper ( StreamType & stream ) : stream_ ( stream ) , buffer_ ( peekBuffer_ ) , size_ ( sizeof ( peekBuffer_ ) / sizeof ( Ch ) ) , pos_ ( ) , len_ ( ) , count_ ( ) { } <nl> - <nl> - BasicIStreamWrapper ( StreamType & stream , Ch * buffer , size_t size ) : stream_ ( stream ) , buffer_ ( buffer ) , size_ ( size ) , pos_ ( ) , len_ ( ) , count_ ( ) { <nl> - RAPIDJSON_ASSERT ( buffer_ ! = 0 & & static_cast < std : : streamsize > ( size_ ) > 0 ) ; <nl> - if ( RAPIDJSON_UNLIKELY ( size_ < sizeof ( peekBuffer_ ) / sizeof ( Ch ) ) ) { <nl> - size_ = sizeof ( peekBuffer_ ) / sizeof ( Ch ) ; <nl> - buffer_ = peekBuffer_ ; <nl> - } <nl> - } <nl> - <nl> - Ch Peek ( ) const { <nl> - if ( RAPIDJSON_UNLIKELY ( pos_ = = len_ ) & & ! Read ( ) ) <nl> - return static_cast < Ch > ( ' \ 0 ' ) ; <nl> - return buffer_ [ pos_ ] ; <nl> + / / ! Constructor . <nl> + / * ! <nl> + \ param stream stream opened for read . <nl> + * / <nl> + BasicIStreamWrapper ( StreamType & stream ) : stream_ ( stream ) , buffer_ ( peekBuffer_ ) , bufferSize_ ( 4 ) , bufferLast_ ( 0 ) , current_ ( buffer_ ) , readCount_ ( 0 ) , count_ ( 0 ) , eof_ ( false ) { <nl> + Read ( ) ; <nl> } <nl> <nl> - Ch Take ( ) { <nl> - if ( RAPIDJSON_UNLIKELY ( pos_ = = len_ ) & & ! Read ( ) ) <nl> - return static_cast < Ch > ( ' \ 0 ' ) ; <nl> - return buffer_ [ pos_ + + ] ; <nl> + / / ! Constructor . <nl> + / * ! <nl> + \ param stream stream opened for read . <nl> + \ param buffer user - supplied buffer . <nl> + \ param bufferSize size of buffer in bytes . Must > = 4 bytes . <nl> + * / <nl> + BasicIStreamWrapper ( StreamType & stream , char * buffer , size_t bufferSize ) : stream_ ( stream ) , buffer_ ( buffer ) , bufferSize_ ( bufferSize ) , bufferLast_ ( 0 ) , current_ ( buffer_ ) , readCount_ ( 0 ) , count_ ( 0 ) , eof_ ( false ) { <nl> + RAPIDJSON_ASSERT ( bufferSize > = 4 ) ; <nl> + Read ( ) ; <nl> } <nl> <nl> - / / tellg ( ) may return - 1 when failed . So we count by ourself . <nl> - size_t Tell ( ) const { return count_ + pos_ ; } <nl> + Ch Peek ( ) const { return * current_ ; } <nl> + Ch Take ( ) { Ch c = * current_ ; Read ( ) ; return c ; } <nl> + size_t Tell ( ) const { return count_ + static_cast < size_t > ( current_ - buffer_ ) ; } <nl> <nl> / / Not implemented <nl> - Ch * PutBegin ( ) { RAPIDJSON_ASSERT ( false ) ; return 0 ; } <nl> void Put ( Ch ) { RAPIDJSON_ASSERT ( false ) ; } <nl> - void Flush ( ) { RAPIDJSON_ASSERT ( false ) ; } <nl> + void Flush ( ) { RAPIDJSON_ASSERT ( false ) ; } <nl> + Ch * PutBegin ( ) { RAPIDJSON_ASSERT ( false ) ; return 0 ; } <nl> size_t PutEnd ( Ch * ) { RAPIDJSON_ASSERT ( false ) ; return 0 ; } <nl> <nl> / / For encoding detection only . <nl> const Ch * Peek4 ( ) const { <nl> - RAPIDJSON_ASSERT ( sizeof ( Ch ) = = 1 ) ; / / Only usable for byte stream . <nl> - if ( len_ - pos_ < 4 ) { <nl> - if ( pos_ ) { <nl> - len_ - = pos_ ; <nl> - std : : memmove ( buffer_ , buffer_ + pos_ , len_ ) ; <nl> - count_ + = pos_ ; <nl> - pos_ = 0 ; <nl> - } <nl> - if ( ! stream_ . read ( buffer_ + len_ , static_cast < std : : streamsize > ( size_ - len_ ) ) ) { <nl> - len_ + = static_cast < size_t > ( stream_ . gcount ( ) ) ; <nl> - if ( len_ < 4 ) <nl> - return 0 ; <nl> - } <nl> - else <nl> - len_ = size_ ; <nl> - } <nl> - return & buffer_ [ pos_ ] ; <nl> + return ( current_ + 4 - ! eof_ < = bufferLast_ ) ? current_ : 0 ; <nl> } <nl> <nl> private : <nl> + BasicIStreamWrapper ( ) ; <nl> BasicIStreamWrapper ( const BasicIStreamWrapper & ) ; <nl> BasicIStreamWrapper & operator = ( const BasicIStreamWrapper & ) ; <nl> <nl> - size_t Read ( ) const { <nl> - count_ + = pos_ ; <nl> - pos_ = 0 ; <nl> - if ( ! stream_ . read ( buffer_ , static_cast < std : : streamsize > ( size_ ) ) ) <nl> - len_ = static_cast < size_t > ( stream_ . gcount ( ) ) ; <nl> - else <nl> - len_ = size_ ; <nl> - return len_ ; <nl> + void Read ( ) { <nl> + if ( current_ < bufferLast_ ) <nl> + + + current_ ; <nl> + else if ( ! eof_ ) { <nl> + count_ + = readCount_ ; <nl> + readCount_ = bufferSize_ ; <nl> + bufferLast_ = buffer_ + readCount_ - 1 ; <nl> + current_ = buffer_ ; <nl> + <nl> + if ( ! stream_ . read ( buffer_ , static_cast < std : : streamsize > ( bufferSize_ ) ) ) { <nl> + readCount_ = static_cast < size_t > ( stream_ . gcount ( ) ) ; <nl> + * ( bufferLast_ = buffer_ + readCount_ ) = ' \ 0 ' ; <nl> + eof_ = true ; <nl> + } <nl> + } <nl> } <nl> <nl> - StreamType & stream_ ; <nl> + StreamType & stream_ ; <nl> Ch peekBuffer_ [ 4 ] , * buffer_ ; <nl> - size_t size_ ; <nl> - mutable size_t pos_ , len_ , count_ ; <nl> + size_t bufferSize_ ; <nl> + Ch * bufferLast_ ; <nl> + Ch * current_ ; <nl> + size_t readCount_ ; <nl> + size_t count_ ; / / ! < Number of characters read <nl> + bool eof_ ; <nl> } ; <nl> <nl> typedef BasicIStreamWrapper < std : : istream > IStreamWrapper ; <nl> mmm a / test / perftest / rapidjsontest . cpp <nl> ppp b / test / perftest / rapidjsontest . cpp <nl> TEST_F ( RapidJson , FileReadStream ) { <nl> } <nl> } <nl> <nl> - TEST_F ( RapidJson , FileReadStream_Unbuffered ) { <nl> - for ( size_t i = 0 ; i < kTrialCount ; i + + ) { <nl> - FILE * fp = fopen ( filename_ , " rb " ) ; <nl> - FileReadStream s ( fp ) ; <nl> - while ( s . Take ( ) ! = ' \ 0 ' ) <nl> - ; <nl> - fclose ( fp ) ; <nl> - } <nl> - } <nl> - <nl> TEST_F ( RapidJson , SIMD_SUFFIX ( ReaderParse_DummyHandler_FileReadStream ) ) { <nl> for ( size_t i = 0 ; i < kTrialCount ; i + + ) { <nl> FILE * fp = fopen ( filename_ , " rb " ) ; <nl> TEST_F ( RapidJson , SIMD_SUFFIX ( ReaderParse_DummyHandler_FileReadStream ) ) { <nl> } <nl> } <nl> <nl> - TEST_F ( RapidJson , SIMD_SUFFIX ( ReaderParse_DummyHandler_FileReadStream_Unbuffered ) ) { <nl> - for ( size_t i = 0 ; i < kTrialCount ; i + + ) { <nl> - FILE * fp = fopen ( filename_ , " rb " ) ; <nl> - FileReadStream s ( fp ) ; <nl> - BaseReaderHandler < > h ; <nl> - Reader reader ; <nl> - reader . Parse ( s , h ) ; <nl> - fclose ( fp ) ; <nl> - } <nl> - } <nl> - <nl> TEST_F ( RapidJson , IStreamWrapper ) { <nl> for ( size_t i = 0 ; i < kTrialCount ; i + + ) { <nl> std : : ifstream is ( filename_ , std : : ios : : in | std : : ios : : binary ) ; <nl>
Base buffered BasicIStreamWrapper on the original ( better performing ) FileReadStream algorithm .
Tencent/rapidjson
8aab3db129585d81d74ed5108450c874622d46fd
2018-12-05T23:21:05Z
mmm a / cocos / base / CCValue . cpp <nl> ppp b / cocos / base / CCValue . cpp <nl> Value : : Value ( int v ) <nl> _field . intVal = v ; <nl> } <nl> <nl> + Value : : Value ( unsigned int v ) <nl> + : _type ( Type : : UNSIGNED ) <nl> + { <nl> + _field . unsignedVal = v ; <nl> + } <nl> + <nl> Value : : Value ( float v ) <nl> : _type ( Type : : FLOAT ) <nl> { <nl> Value & Value : : operator = ( const Value & other ) <nl> case Type : : INTEGER : <nl> _field . intVal = other . _field . intVal ; <nl> break ; <nl> + case Type : : UNSIGNED : <nl> + _field . unsignedVal = other . _field . unsignedVal ; <nl> + break ; <nl> case Type : : FLOAT : <nl> _field . floatVal = other . _field . floatVal ; <nl> break ; <nl> Value & Value : : operator = ( Value & & other ) <nl> case Type : : INTEGER : <nl> _field . intVal = other . _field . intVal ; <nl> break ; <nl> + case Type : : UNSIGNED : <nl> + _field . unsignedVal = other . _field . unsignedVal ; <nl> + break ; <nl> case Type : : FLOAT : <nl> _field . floatVal = other . _field . floatVal ; <nl> break ; <nl> Value & Value : : operator = ( int v ) <nl> return * this ; <nl> } <nl> <nl> + Value & Value : : operator = ( unsigned int v ) <nl> + { <nl> + reset ( Type : : UNSIGNED ) ; <nl> + _field . unsignedVal = v ; <nl> + return * this ; <nl> + } <nl> + <nl> Value & Value : : operator = ( float v ) <nl> { <nl> reset ( Type : : FLOAT ) ; <nl> bool Value : : operator = = ( const Value & v ) const <nl> if ( this - > isNull ( ) ) return true ; <nl> switch ( _type ) <nl> { <nl> - case Type : : BYTE : return v . _field . byteVal = = this - > _field . byteVal ; <nl> - case Type : : INTEGER : return v . _field . intVal = = this - > _field . intVal ; <nl> - case Type : : BOOLEAN : return v . _field . boolVal = = this - > _field . boolVal ; <nl> - case Type : : STRING : return * v . _field . strVal = = * this - > _field . strVal ; <nl> - case Type : : FLOAT : return fabs ( v . _field . floatVal - this - > _field . floatVal ) < = FLT_EPSILON ; <nl> - case Type : : DOUBLE : return fabs ( v . _field . doubleVal - this - > _field . doubleVal ) < = FLT_EPSILON ; <nl> - case Type : : VECTOR : <nl> - { <nl> - const auto & v1 = * ( this - > _field . vectorVal ) ; <nl> - const auto & v2 = * ( v . _field . vectorVal ) ; <nl> - const auto size = v1 . size ( ) ; <nl> - if ( size = = v2 . size ( ) ) <nl> + case Type : : BYTE : return v . _field . byteVal = = this - > _field . byteVal ; <nl> + case Type : : INTEGER : return v . _field . intVal = = this - > _field . intVal ; <nl> + case Type : : UNSIGNED : return v . _field . unsignedVal = = this - > _field . unsignedVal ; <nl> + case Type : : BOOLEAN : return v . _field . boolVal = = this - > _field . boolVal ; <nl> + case Type : : STRING : return * v . _field . strVal = = * this - > _field . strVal ; <nl> + case Type : : FLOAT : return fabs ( v . _field . floatVal - this - > _field . floatVal ) < = FLT_EPSILON ; <nl> + case Type : : DOUBLE : return fabs ( v . _field . doubleVal - this - > _field . doubleVal ) < = FLT_EPSILON ; <nl> + case Type : : VECTOR : <nl> { <nl> - for ( size_t i = 0 ; i < size ; i + + ) <nl> + const auto & v1 = * ( this - > _field . vectorVal ) ; <nl> + const auto & v2 = * ( v . _field . vectorVal ) ; <nl> + const auto size = v1 . size ( ) ; <nl> + if ( size = = v2 . size ( ) ) <nl> { <nl> - if ( v1 [ i ] ! = v2 [ i ] ) return false ; <nl> + for ( size_t i = 0 ; i < size ; i + + ) <nl> + { <nl> + if ( v1 [ i ] ! = v2 [ i ] ) return false ; <nl> + } <nl> + return true ; <nl> } <nl> - return true ; <nl> + return false ; <nl> } <nl> - return false ; <nl> - } <nl> - case Type : : MAP : <nl> - { <nl> - const auto & map1 = * ( this - > _field . mapVal ) ; <nl> - const auto & map2 = * ( v . _field . mapVal ) ; <nl> - for ( const auto & kvp : map1 ) <nl> + case Type : : MAP : <nl> { <nl> - auto it = map2 . find ( kvp . first ) ; <nl> - if ( it = = map2 . end ( ) | | it - > second ! = kvp . second ) <nl> + const auto & map1 = * ( this - > _field . mapVal ) ; <nl> + const auto & map2 = * ( v . _field . mapVal ) ; <nl> + for ( const auto & kvp : map1 ) <nl> { <nl> - return false ; <nl> + auto it = map2 . find ( kvp . first ) ; <nl> + if ( it = = map2 . end ( ) | | it - > second ! = kvp . second ) <nl> + { <nl> + return false ; <nl> + } <nl> } <nl> + return true ; <nl> } <nl> - return true ; <nl> - } <nl> - case Type : : INT_KEY_MAP : <nl> - { <nl> - const auto & map1 = * ( this - > _field . intKeyMapVal ) ; <nl> - const auto & map2 = * ( v . _field . intKeyMapVal ) ; <nl> - for ( const auto & kvp : map1 ) <nl> + case Type : : INT_KEY_MAP : <nl> { <nl> - auto it = map2 . find ( kvp . first ) ; <nl> - if ( it = = map2 . end ( ) | | it - > second ! = kvp . second ) <nl> + const auto & map1 = * ( this - > _field . intKeyMapVal ) ; <nl> + const auto & map2 = * ( v . _field . intKeyMapVal ) ; <nl> + for ( const auto & kvp : map1 ) <nl> { <nl> - return false ; <nl> + auto it = map2 . find ( kvp . first ) ; <nl> + if ( it = = map2 . end ( ) | | it - > second ! = kvp . second ) <nl> + { <nl> + return false ; <nl> + } <nl> } <nl> + return true ; <nl> } <nl> - return true ; <nl> - } <nl> - default : <nl> - break ; <nl> + default : <nl> + break ; <nl> } ; <nl> <nl> return false ; <nl> unsigned char Value : : asByte ( ) const <nl> return static_cast < unsigned char > ( _field . intVal ) ; <nl> } <nl> <nl> + if ( _type = = Type : : UNSIGNED ) <nl> + { <nl> + return static_cast < unsigned char > ( _field . unsignedVal ) ; <nl> + } <nl> + <nl> if ( _type = = Type : : STRING ) <nl> { <nl> return static_cast < unsigned char > ( atoi ( _field . strVal - > c_str ( ) ) ) ; <nl> int Value : : asInt ( ) const <nl> return _field . intVal ; <nl> } <nl> <nl> + if ( _type = = Type : : UNSIGNED ) <nl> + { <nl> + CCASSERT ( _field . unsignedVal < INT_MAX , " Can only convert values < INT_MAX " ) ; <nl> + return ( int ) _field . unsignedVal ; <nl> + } <nl> + <nl> if ( _type = = Type : : BYTE ) <nl> { <nl> return _field . byteVal ; <nl> int Value : : asInt ( ) const <nl> return 0 ; <nl> } <nl> <nl> + <nl> + unsigned int Value : : asUnsignedInt ( ) const <nl> + { <nl> + CCASSERT ( _type ! = Type : : VECTOR & & _type ! = Type : : MAP & & _type ! = Type : : INT_KEY_MAP , " Only base type ( bool , string , float , double , int ) could be converted " ) ; <nl> + if ( _type = = Type : : UNSIGNED ) <nl> + { <nl> + return _field . unsignedVal ; <nl> + } <nl> + <nl> + if ( _type = = Type : : INTEGER ) <nl> + { <nl> + CCASSERT ( _field . intVal > = 0 , " Only values > = 0 can be converted to unsigned " ) ; <nl> + return static_cast < unsigned int > ( _field . intVal ) ; <nl> + } <nl> + <nl> + if ( _type = = Type : : BYTE ) <nl> + { <nl> + return static_cast < unsigned int > ( _field . byteVal ) ; <nl> + } <nl> + <nl> + if ( _type = = Type : : STRING ) <nl> + { <nl> + / / NOTE : strtoul is required ( need to augment on unsupported platforms ) <nl> + return static_cast < unsigned int > ( strtoul ( _field . strVal - > c_str ( ) , nullptr , 10 ) ) ; <nl> + } <nl> + <nl> + if ( _type = = Type : : FLOAT ) <nl> + { <nl> + return static_cast < unsigned int > ( _field . floatVal ) ; <nl> + } <nl> + <nl> + if ( _type = = Type : : DOUBLE ) <nl> + { <nl> + return static_cast < unsigned int > ( _field . doubleVal ) ; <nl> + } <nl> + <nl> + if ( _type = = Type : : BOOLEAN ) <nl> + { <nl> + return _field . boolVal ? 1u : 0u ; <nl> + } <nl> + <nl> + return 0u ; <nl> + } <nl> + <nl> float Value : : asFloat ( ) const <nl> { <nl> CCASSERT ( _type ! = Type : : VECTOR & & _type ! = Type : : MAP & & _type ! = Type : : INT_KEY_MAP , " Only base type ( bool , string , float , double , int ) could be converted " ) ; <nl> float Value : : asFloat ( ) const <nl> return static_cast < float > ( _field . intVal ) ; <nl> } <nl> <nl> + if ( _type = = Type : : UNSIGNED ) <nl> + { <nl> + return static_cast < float > ( _field . unsignedVal ) ; <nl> + } <nl> + <nl> if ( _type = = Type : : DOUBLE ) <nl> { <nl> return static_cast < float > ( _field . doubleVal ) ; <nl> double Value : : asDouble ( ) const <nl> return static_cast < double > ( _field . intVal ) ; <nl> } <nl> <nl> + if ( _type = = Type : : UNSIGNED ) <nl> + { <nl> + return static_cast < double > ( _field . unsignedVal ) ; <nl> + } <nl> + <nl> if ( _type = = Type : : FLOAT ) <nl> { <nl> return static_cast < double > ( _field . floatVal ) ; <nl> bool Value : : asBool ( ) const <nl> return _field . intVal = = 0 ? false : true ; <nl> } <nl> <nl> + if ( _type = = Type : : UNSIGNED ) <nl> + { <nl> + return _field . unsignedVal = = 0 ? false : true ; <nl> + } <nl> + <nl> if ( _type = = Type : : FLOAT ) <nl> { <nl> return _field . floatVal = = 0 . 0f ? false : true ; <nl> std : : string Value : : asString ( ) const <nl> case Type : : INTEGER : <nl> ret < < _field . intVal ; <nl> break ; <nl> + case Type : : UNSIGNED : <nl> + ret < < _field . unsignedVal ; <nl> + break ; <nl> case Type : : FLOAT : <nl> ret < < std : : fixed < < std : : setprecision ( 7 ) < < _field . floatVal ; <nl> break ; <nl> static std : : string visit ( const Value & v , int depth ) <nl> case Value : : Type : : NONE : <nl> case Value : : Type : : BYTE : <nl> case Value : : Type : : INTEGER : <nl> + case Value : : Type : : UNSIGNED : <nl> case Value : : Type : : FLOAT : <nl> case Value : : Type : : DOUBLE : <nl> case Value : : Type : : BOOLEAN : <nl> void Value : : clear ( ) <nl> case Type : : INTEGER : <nl> _field . intVal = 0 ; <nl> break ; <nl> + case Type : : UNSIGNED : <nl> + _field . unsignedVal = 0u ; <nl> + break ; <nl> case Type : : FLOAT : <nl> _field . floatVal = 0 . 0f ; <nl> break ; <nl> void Value : : clear ( ) <nl> default : <nl> break ; <nl> } <nl> - <nl> + <nl> _type = Type : : NONE ; <nl> } <nl> <nl> mmm a / cocos / base / CCValue . h <nl> ppp b / cocos / base / CCValue . h <nl> class CC_DLL Value <nl> <nl> / * * Create a Value by an integer value . * / <nl> explicit Value ( int v ) ; <nl> - <nl> + <nl> + / * * Create a Value by an unsigned value . * / <nl> + explicit Value ( unsigned int v ) ; <nl> + <nl> / * * Create a Value by a float value . * / <nl> explicit Value ( float v ) ; <nl> <nl> class CC_DLL Value <nl> Value & operator = ( unsigned char v ) ; <nl> / * * Assignment operator , assign from integer to Value . * / <nl> Value & operator = ( int v ) ; <nl> + / * * Assignment operator , assign from integer to Value . * / <nl> + Value & operator = ( unsigned int v ) ; <nl> / * * Assignment operator , assign from float to Value . * / <nl> Value & operator = ( float v ) ; <nl> / * * Assignment operator , assign from double to Value . * / <nl> class CC_DLL Value <nl> unsigned char asByte ( ) const ; <nl> / * * Gets as an integer value . Will convert to integer if possible , or will trigger assert error . * / <nl> int asInt ( ) const ; <nl> + / * * Gets as an unsigned value . Will convert to unsigned if possible , or will trigger assert error . * / <nl> + unsigned int asUnsignedInt ( ) const ; <nl> / * * Gets as a float value . Will convert to float if possible , or will trigger assert error . * / <nl> float asFloat ( ) const ; <nl> / * * Gets as a double value . Will convert to double if possible , or will trigger assert error . * / <nl> class CC_DLL Value <nl> BYTE , <nl> / / / wrap integer <nl> INTEGER , <nl> + / / / wrap unsigned <nl> + UNSIGNED , <nl> / / / wrap float <nl> FLOAT , <nl> / / / wrap double <nl> class CC_DLL Value <nl> { <nl> unsigned char byteVal ; <nl> int intVal ; <nl> + unsigned int unsignedVal ; <nl> float floatVal ; <nl> double doubleVal ; <nl> bool boolVal ; <nl>
Add support for Unsigned with CCValue ( )
cocos2d/cocos2d-x
0f0c9b627b12bccf0dc7bf15bd80de60f6d9a066
2016-04-22T05:49:20Z