diff
stringlengths
41
2.03M
msg
stringlengths
1
1.5k
repo
stringlengths
5
40
sha
stringlengths
40
40
time
stringlengths
20
20
mmm a / xbmc / addons / kodi - dev - kit / include / kodi / addon - instance / Peripheral . h <nl> ppp b / xbmc / addons / kodi - dev - kit / include / kodi / addon - instance / Peripheral . h <nl> <nl> <nl> # include " . . / AddonBase . h " <nl> <nl> - namespace kodi { namespace addon { class CInstancePeripheral ; } } <nl> + namespace kodi <nl> + { <nl> + namespace addon <nl> + { <nl> + class CInstancePeripheral ; <nl> + } <nl> + } / / namespace kodi <nl> <nl> / * indicates a joystick has no preference for port number * / <nl> - # define NO_PORT_REQUESTED ( - 1 ) <nl> + # define NO_PORT_REQUESTED ( - 1 ) <nl> <nl> / * joystick ' s driver button / hat / axis index is unknown * / <nl> - # define DRIVER_INDEX_UNKNOWN ( - 1 ) <nl> + # define DRIVER_INDEX_UNKNOWN ( - 1 ) <nl> <nl> extern " C " <nl> { <nl> extern " C " <nl> * / <nl> typedef enum PERIPHERAL_ERROR <nl> { <nl> - PERIPHERAL_NO_ERROR = 0 , / / no error occurred <nl> - PERIPHERAL_ERROR_UNKNOWN = - 1 , / / an unknown error occurred <nl> - PERIPHERAL_ERROR_FAILED = - 2 , / / the command failed <nl> - PERIPHERAL_ERROR_INVALID_PARAMETERS = - 3 , / / the parameters of the method are invalid for this operation <nl> - PERIPHERAL_ERROR_NOT_IMPLEMENTED = - 4 , / / the method that the frontend called is not implemented <nl> - PERIPHERAL_ERROR_NOT_CONNECTED = - 5 , / / no peripherals are connected <nl> - PERIPHERAL_ERROR_CONNECTION_FAILED = - 6 , / / peripherals are connected , but command was interrupted <nl> + PERIPHERAL_NO_ERROR = 0 , / / no error occurred <nl> + PERIPHERAL_ERROR_UNKNOWN = - 1 , / / an unknown error occurred <nl> + PERIPHERAL_ERROR_FAILED = - 2 , / / the command failed <nl> + PERIPHERAL_ERROR_INVALID_PARAMETERS = <nl> + - 3 , / / the parameters of the method are invalid for this operation <nl> + PERIPHERAL_ERROR_NOT_IMPLEMENTED = - 4 , / / the method that the frontend called is not implemented <nl> + PERIPHERAL_ERROR_NOT_CONNECTED = - 5 , / / no peripherals are connected <nl> + PERIPHERAL_ERROR_CONNECTION_FAILED = <nl> + - 6 , / / peripherals are connected , but command was interrupted <nl> } PERIPHERAL_ERROR ; <nl> <nl> / * ! <nl> extern " C " <nl> * / <nl> typedef struct PERIPHERAL_INFO <nl> { <nl> - PERIPHERAL_TYPE type ; / * ! < @ brief type of peripheral * / <nl> - char * name ; / * ! < @ brief name of peripheral * / <nl> - uint16_t vendor_id ; / * ! < @ brief vendor ID of peripheral , 0x0000 if unknown * / <nl> - uint16_t product_id ; / * ! < @ brief product ID of peripheral , 0x0000 if unknown * / <nl> - unsigned int index ; / * ! < @ brief the order in which the add - on identified this peripheral * / <nl> + PERIPHERAL_TYPE type ; / * ! < @ brief type of peripheral * / <nl> + char * name ; / * ! < @ brief name of peripheral * / <nl> + uint16_t vendor_id ; / * ! < @ brief vendor ID of peripheral , 0x0000 if unknown * / <nl> + uint16_t product_id ; / * ! < @ brief product ID of peripheral , 0x0000 if unknown * / <nl> + unsigned int index ; / * ! < @ brief the order in which the add - on identified this peripheral * / <nl> } ATTRIBUTE_PACKED PERIPHERAL_INFO ; <nl> <nl> / * ! <nl> extern " C " <nl> * / <nl> typedef struct PERIPHERAL_CAPABILITIES <nl> { <nl> - bool provides_joysticks ; / * ! < @ brief true if the add - on provides joysticks * / <nl> + bool provides_joysticks ; / * ! < @ brief true if the add - on provides joysticks * / <nl> bool provides_joystick_rumble ; <nl> bool provides_joystick_power_off ; <nl> - bool provides_buttonmaps ; / * ! < @ brief true if the add - on provides button maps * / <nl> + bool provides_buttonmaps ; / * ! < @ brief true if the add - on provides button maps * / <nl> } ATTRIBUTE_PACKED PERIPHERAL_CAPABILITIES ; <nl> / / / } <nl> <nl> extern " C " <nl> * / <nl> typedef enum PERIPHERAL_EVENT_TYPE <nl> { <nl> - PERIPHERAL_EVENT_TYPE_NONE , / * ! < @ brief unknown event * / <nl> - PERIPHERAL_EVENT_TYPE_DRIVER_BUTTON , / * ! < @ brief state changed for joystick driver button * / <nl> - PERIPHERAL_EVENT_TYPE_DRIVER_HAT , / * ! < @ brief state changed for joystick driver hat * / <nl> - PERIPHERAL_EVENT_TYPE_DRIVER_AXIS , / * ! < @ brief state changed for joystick driver axis * / <nl> - PERIPHERAL_EVENT_TYPE_SET_MOTOR , / * ! < @ brief set the state for joystick rumble motor * / <nl> + PERIPHERAL_EVENT_TYPE_NONE , / * ! < @ brief unknown event * / <nl> + PERIPHERAL_EVENT_TYPE_DRIVER_BUTTON , / * ! < @ brief state changed for joystick driver button * / <nl> + PERIPHERAL_EVENT_TYPE_DRIVER_HAT , / * ! < @ brief state changed for joystick driver hat * / <nl> + PERIPHERAL_EVENT_TYPE_DRIVER_AXIS , / * ! < @ brief state changed for joystick driver axis * / <nl> + PERIPHERAL_EVENT_TYPE_SET_MOTOR , / * ! < @ brief set the state for joystick rumble motor * / <nl> } PERIPHERAL_EVENT_TYPE ; <nl> <nl> / * ! <nl> extern " C " <nl> * / <nl> typedef enum JOYSTICK_STATE_BUTTON <nl> { <nl> - JOYSTICK_STATE_BUTTON_UNPRESSED = 0x0 , / * ! < @ brief button is released * / <nl> - JOYSTICK_STATE_BUTTON_PRESSED = 0x1 , / * ! < @ brief button is pressed * / <nl> + JOYSTICK_STATE_BUTTON_UNPRESSED = 0x0 , / * ! < @ brief button is released * / <nl> + JOYSTICK_STATE_BUTTON_PRESSED = 0x1 , / * ! < @ brief button is pressed * / <nl> } JOYSTICK_STATE_BUTTON ; <nl> <nl> / * ! <nl> extern " C " <nl> * / <nl> typedef enum JOYSTICK_STATE_HAT <nl> { <nl> - JOYSTICK_STATE_HAT_UNPRESSED = 0x0 , / * ! < @ brief no directions are pressed * / <nl> - JOYSTICK_STATE_HAT_LEFT = 0x1 , / * ! < @ brief only left is pressed * / <nl> - JOYSTICK_STATE_HAT_RIGHT = 0x2 , / * ! < @ brief only right is pressed * / <nl> - JOYSTICK_STATE_HAT_UP = 0x4 , / * ! < @ brief only up is pressed * / <nl> - JOYSTICK_STATE_HAT_DOWN = 0x8 , / * ! < @ brief only down is pressed * / <nl> - JOYSTICK_STATE_HAT_LEFT_UP = JOYSTICK_STATE_HAT_LEFT | JOYSTICK_STATE_HAT_UP , <nl> - JOYSTICK_STATE_HAT_LEFT_DOWN = JOYSTICK_STATE_HAT_LEFT | JOYSTICK_STATE_HAT_DOWN , <nl> - JOYSTICK_STATE_HAT_RIGHT_UP = JOYSTICK_STATE_HAT_RIGHT | JOYSTICK_STATE_HAT_UP , <nl> + JOYSTICK_STATE_HAT_UNPRESSED = 0x0 , / * ! < @ brief no directions are pressed * / <nl> + JOYSTICK_STATE_HAT_LEFT = 0x1 , / * ! < @ brief only left is pressed * / <nl> + JOYSTICK_STATE_HAT_RIGHT = 0x2 , / * ! < @ brief only right is pressed * / <nl> + JOYSTICK_STATE_HAT_UP = 0x4 , / * ! < @ brief only up is pressed * / <nl> + JOYSTICK_STATE_HAT_DOWN = 0x8 , / * ! < @ brief only down is pressed * / <nl> + JOYSTICK_STATE_HAT_LEFT_UP = JOYSTICK_STATE_HAT_LEFT | JOYSTICK_STATE_HAT_UP , <nl> + JOYSTICK_STATE_HAT_LEFT_DOWN = JOYSTICK_STATE_HAT_LEFT | JOYSTICK_STATE_HAT_DOWN , <nl> + JOYSTICK_STATE_HAT_RIGHT_UP = JOYSTICK_STATE_HAT_RIGHT | JOYSTICK_STATE_HAT_UP , <nl> JOYSTICK_STATE_HAT_RIGHT_DOWN = JOYSTICK_STATE_HAT_RIGHT | JOYSTICK_STATE_HAT_DOWN , <nl> } JOYSTICK_STATE_HAT ; <nl> <nl> extern " C " <nl> / * ! @ brief The index of the event source * / <nl> unsigned int driver_index ; <nl> <nl> - JOYSTICK_STATE_BUTTON driver_button_state ; <nl> - JOYSTICK_STATE_HAT driver_hat_state ; <nl> - JOYSTICK_STATE_AXIS driver_axis_state ; <nl> - JOYSTICK_STATE_MOTOR motor_state ; <nl> + JOYSTICK_STATE_BUTTON driver_button_state ; <nl> + JOYSTICK_STATE_HAT driver_hat_state ; <nl> + JOYSTICK_STATE_AXIS driver_axis_state ; <nl> + JOYSTICK_STATE_MOTOR motor_state ; <nl> } ATTRIBUTE_PACKED PERIPHERAL_EVENT ; <nl> / / / } <nl> <nl> extern " C " <nl> * / <nl> typedef struct JOYSTICK_INFO <nl> { <nl> - PERIPHERAL_INFO peripheral ; / * ! < @ brief peripheral info for this joystick * / <nl> - char * provider ; / * ! < @ brief name of the driver or interface providing the joystick * / <nl> - int requested_port ; / * ! < @ brief requested port number ( such as for 360 controllers ) , or NO_PORT_REQUESTED * / <nl> - unsigned int button_count ; / * ! < @ brief number of buttons reported by the driver * / <nl> - unsigned int hat_count ; / * ! < @ brief number of hats reported by the driver * / <nl> - unsigned int axis_count ; / * ! < @ brief number of axes reported by the driver * / <nl> - unsigned int motor_count ; / * ! < @ brief number of motors reported by the driver * / <nl> - bool supports_poweroff ; / * ! < @ brief whether the joystick supports being powered off * / <nl> + PERIPHERAL_INFO peripheral ; / * ! < @ brief peripheral info for this joystick * / <nl> + char * provider ; / * ! < @ brief name of the driver or interface providing the joystick * / <nl> + int requested_port ; / * ! < @ brief requested port number ( such as for 360 controllers ) , or NO_PORT_REQUESTED * / <nl> + unsigned int button_count ; / * ! < @ brief number of buttons reported by the driver * / <nl> + unsigned int hat_count ; / * ! < @ brief number of hats reported by the driver * / <nl> + unsigned int axis_count ; / * ! < @ brief number of axes reported by the driver * / <nl> + unsigned int motor_count ; / * ! < @ brief number of motors reported by the driver * / <nl> + bool supports_poweroff ; / * ! < @ brief whether the joystick supports being powered off * / <nl> } ATTRIBUTE_PACKED JOYSTICK_INFO ; <nl> <nl> / * ! <nl> extern " C " <nl> * / <nl> typedef struct JOYSTICK_DRIVER_BUTTON <nl> { <nl> - int index ; <nl> + int index ; <nl> } ATTRIBUTE_PACKED JOYSTICK_DRIVER_BUTTON ; <nl> <nl> / * ! <nl> extern " C " <nl> * / <nl> typedef struct JOYSTICK_DRIVER_HAT <nl> { <nl> - int index ; <nl> + int index ; <nl> JOYSTICK_DRIVER_HAT_DIRECTION direction ; <nl> } ATTRIBUTE_PACKED JOYSTICK_DRIVER_HAT ; <nl> <nl> extern " C " <nl> typedef enum JOYSTICK_DRIVER_SEMIAXIS_DIRECTION <nl> { <nl> JOYSTICK_DRIVER_SEMIAXIS_NEGATIVE = - 1 , / * ! < @ brief negative half of the axis * / <nl> - JOYSTICK_DRIVER_SEMIAXIS_UNKNOWN = 0 , / * ! < @ brief unknown direction * / <nl> - JOYSTICK_DRIVER_SEMIAXIS_POSITIVE = 1 , / * ! < @ brief positive half of the axis * / <nl> + JOYSTICK_DRIVER_SEMIAXIS_UNKNOWN = 0 , / * ! < @ brief unknown direction * / <nl> + JOYSTICK_DRIVER_SEMIAXIS_POSITIVE = 1 , / * ! < @ brief positive half of the axis * / <nl> } JOYSTICK_DRIVER_SEMIAXIS_DIRECTION ; <nl> <nl> / * ! <nl> extern " C " <nl> * / <nl> typedef struct JOYSTICK_DRIVER_SEMIAXIS <nl> { <nl> - int index ; <nl> - int center ; <nl> + int index ; <nl> + int center ; <nl> JOYSTICK_DRIVER_SEMIAXIS_DIRECTION direction ; <nl> - unsigned int range ; <nl> + unsigned int range ; <nl> } ATTRIBUTE_PACKED JOYSTICK_DRIVER_SEMIAXIS ; <nl> <nl> / * ! <nl> extern " C " <nl> * / <nl> typedef struct JOYSTICK_DRIVER_MOTOR <nl> { <nl> - int index ; <nl> + int index ; <nl> } ATTRIBUTE_PACKED JOYSTICK_DRIVER_MOTOR ; <nl> <nl> / * ! <nl> extern " C " <nl> * / <nl> typedef struct JOYSTICK_DRIVER_PRIMITIVE <nl> { <nl> - JOYSTICK_DRIVER_PRIMITIVE_TYPE type ; <nl> + JOYSTICK_DRIVER_PRIMITIVE_TYPE type ; <nl> union <nl> { <nl> - struct JOYSTICK_DRIVER_BUTTON button ; <nl> - struct JOYSTICK_DRIVER_HAT hat ; <nl> + struct JOYSTICK_DRIVER_BUTTON button ; <nl> + struct JOYSTICK_DRIVER_HAT hat ; <nl> struct JOYSTICK_DRIVER_SEMIAXIS semiaxis ; <nl> - struct JOYSTICK_DRIVER_MOTOR motor ; <nl> - struct JOYSTICK_DRIVER_KEY key ; <nl> + struct JOYSTICK_DRIVER_MOTOR motor ; <nl> + struct JOYSTICK_DRIVER_KEY key ; <nl> struct JOYSTICK_DRIVER_MOUSE_BUTTON mouse ; <nl> struct JOYSTICK_DRIVER_RELPOINTER relpointer ; <nl> } ; <nl> extern " C " <nl> * / <nl> typedef struct JOYSTICK_FEATURE <nl> { <nl> - char * name ; <nl> - JOYSTICK_FEATURE_TYPE type ; <nl> - struct JOYSTICK_DRIVER_PRIMITIVE primitives [ JOYSTICK_PRIMITIVE_MAX ] ; <nl> + char * name ; <nl> + JOYSTICK_FEATURE_TYPE type ; <nl> + struct JOYSTICK_DRIVER_PRIMITIVE primitives [ JOYSTICK_PRIMITIVE_MAX ] ; <nl> } ATTRIBUTE_PACKED JOYSTICK_FEATURE ; <nl> / / / } <nl> <nl> typedef struct AddonProps_Peripheral <nl> { <nl> - const char * user_path ; / * ! < @ brief path to the user profile * / <nl> - const char * addon_path ; / * ! < @ brief path to this add - on * / <nl> + const char * user_path ; / * ! < @ brief path to the user profile * / <nl> + const char * addon_path ; / * ! < @ brief path to this add - on * / <nl> } ATTRIBUTE_PACKED AddonProps_Peripheral ; <nl> <nl> struct AddonInstance_Peripheral ; <nl> extern " C " <nl> { <nl> KODI_HANDLE kodiInstance ; <nl> void ( * trigger_scan ) ( void * kodiInstance ) ; <nl> - void ( * refresh_button_maps ) ( void * kodiInstance , const char * device_name , const char * controller_id ) ; <nl> - unsigned int ( * feature_count ) ( void * kodiInstance , const char * controller_id , JOYSTICK_FEATURE_TYPE type ) ; <nl> - JOYSTICK_FEATURE_TYPE ( * feature_type ) ( void * kodiInstance , const char * controller_id , const char * feature_name ) ; <nl> + void ( * refresh_button_maps ) ( void * kodiInstance , <nl> + const char * device_name , <nl> + const char * controller_id ) ; <nl> + unsigned int ( * feature_count ) ( void * kodiInstance , <nl> + const char * controller_id , <nl> + JOYSTICK_FEATURE_TYPE type ) ; <nl> + JOYSTICK_FEATURE_TYPE ( * feature_type ) <nl> + ( void * kodiInstance , const char * controller_id , const char * feature_name ) ; <nl> } AddonToKodiFuncTable_Peripheral ; <nl> <nl> / / ! @ todo Mouse , light gun , multitouch <nl> extern " C " <nl> { <nl> kodi : : addon : : CInstancePeripheral * addonInstance ; <nl> <nl> - void ( __cdecl * get_capabilities ) ( const AddonInstance_Peripheral * addonInstance , PERIPHERAL_CAPABILITIES * capabilities ) ; <nl> - PERIPHERAL_ERROR ( __cdecl * perform_device_scan ) ( const AddonInstance_Peripheral * addonInstance , unsigned int * peripheral_count , PERIPHERAL_INFO * * scan_results ) ; <nl> - void ( __cdecl * free_scan_results ) ( const AddonInstance_Peripheral * addonInstance , unsigned int peripheral_count , PERIPHERAL_INFO * scan_results ) ; <nl> - PERIPHERAL_ERROR ( __cdecl * get_events ) ( const AddonInstance_Peripheral * addonInstance , unsigned int * event_count , PERIPHERAL_EVENT * * events ) ; <nl> - void ( __cdecl * free_events ) ( const AddonInstance_Peripheral * addonInstance , unsigned int event_count , PERIPHERAL_EVENT * events ) ; <nl> - bool ( __cdecl * send_event ) ( const AddonInstance_Peripheral * addonInstance , const PERIPHERAL_EVENT * event ) ; <nl> + void ( __cdecl * get_capabilities ) ( const AddonInstance_Peripheral * addonInstance , <nl> + PERIPHERAL_CAPABILITIES * capabilities ) ; <nl> + PERIPHERAL_ERROR ( __cdecl * perform_device_scan ) <nl> + ( const AddonInstance_Peripheral * addonInstance , <nl> + unsigned int * peripheral_count , <nl> + PERIPHERAL_INFO * * scan_results ) ; <nl> + void ( __cdecl * free_scan_results ) ( const AddonInstance_Peripheral * addonInstance , <nl> + unsigned int peripheral_count , <nl> + PERIPHERAL_INFO * scan_results ) ; <nl> + PERIPHERAL_ERROR ( __cdecl * get_events ) <nl> + ( const AddonInstance_Peripheral * addonInstance , <nl> + unsigned int * event_count , <nl> + PERIPHERAL_EVENT * * events ) ; <nl> + void ( __cdecl * free_events ) ( const AddonInstance_Peripheral * addonInstance , <nl> + unsigned int event_count , <nl> + PERIPHERAL_EVENT * events ) ; <nl> + bool ( __cdecl * send_event ) ( const AddonInstance_Peripheral * addonInstance , <nl> + const PERIPHERAL_EVENT * event ) ; <nl> <nl> / / / @ name Joystick operations <nl> / / / { <nl> - PERIPHERAL_ERROR ( __cdecl * get_joystick_info ) ( const AddonInstance_Peripheral * addonInstance , unsigned int index , JOYSTICK_INFO * info ) ; <nl> - void ( __cdecl * free_joystick_info ) ( const AddonInstance_Peripheral * addonInstance , JOYSTICK_INFO * info ) ; <nl> - PERIPHERAL_ERROR ( __cdecl * get_features ) ( const AddonInstance_Peripheral * addonInstance , const JOYSTICK_INFO * joystick , const char * controller_id , unsigned int * feature_count , JOYSTICK_FEATURE * * features ) ; <nl> - void ( __cdecl * free_features ) ( const AddonInstance_Peripheral * addonInstance , unsigned int feature_count , JOYSTICK_FEATURE * features ) ; <nl> - PERIPHERAL_ERROR ( __cdecl * map_features ) ( const AddonInstance_Peripheral * addonInstance , const JOYSTICK_INFO * joystick , const char * controller_id , unsigned int feature_count , const JOYSTICK_FEATURE * features ) ; <nl> - PERIPHERAL_ERROR ( __cdecl * get_ignored_primitives ) ( const AddonInstance_Peripheral * addonInstance , const JOYSTICK_INFO * joystick , unsigned int * feature_count , JOYSTICK_DRIVER_PRIMITIVE * * primitives ) ; <nl> - void ( __cdecl * free_primitives ) ( const AddonInstance_Peripheral * addonInstance , unsigned int , JOYSTICK_DRIVER_PRIMITIVE * primitives ) ; <nl> - PERIPHERAL_ERROR ( __cdecl * set_ignored_primitives ) ( const AddonInstance_Peripheral * addonInstance , const JOYSTICK_INFO * joystick , unsigned int primitive_count , const JOYSTICK_DRIVER_PRIMITIVE * primitives ) ; <nl> - void ( __cdecl * save_button_map ) ( const AddonInstance_Peripheral * addonInstance , const JOYSTICK_INFO * joystick ) ; <nl> - void ( __cdecl * revert_button_map ) ( const AddonInstance_Peripheral * addonInstance , const JOYSTICK_INFO * joystick ) ; <nl> - void ( __cdecl * reset_button_map ) ( const AddonInstance_Peripheral * addonInstance , const JOYSTICK_INFO * joystick , const char * controller_id ) ; <nl> - void ( __cdecl * power_off_joystick ) ( const AddonInstance_Peripheral * addonInstance , unsigned int index ) ; <nl> + PERIPHERAL_ERROR ( __cdecl * get_joystick_info ) <nl> + ( const AddonInstance_Peripheral * addonInstance , unsigned int index , JOYSTICK_INFO * info ) ; <nl> + void ( __cdecl * free_joystick_info ) ( const AddonInstance_Peripheral * addonInstance , <nl> + JOYSTICK_INFO * info ) ; <nl> + PERIPHERAL_ERROR ( __cdecl * get_features ) <nl> + ( const AddonInstance_Peripheral * addonInstance , <nl> + const JOYSTICK_INFO * joystick , <nl> + const char * controller_id , <nl> + unsigned int * feature_count , <nl> + JOYSTICK_FEATURE * * features ) ; <nl> + void ( __cdecl * free_features ) ( const AddonInstance_Peripheral * addonInstance , <nl> + unsigned int feature_count , <nl> + JOYSTICK_FEATURE * features ) ; <nl> + PERIPHERAL_ERROR ( __cdecl * map_features ) <nl> + ( const AddonInstance_Peripheral * addonInstance , <nl> + const JOYSTICK_INFO * joystick , <nl> + const char * controller_id , <nl> + unsigned int feature_count , <nl> + const JOYSTICK_FEATURE * features ) ; <nl> + PERIPHERAL_ERROR ( __cdecl * get_ignored_primitives ) <nl> + ( const AddonInstance_Peripheral * addonInstance , <nl> + const JOYSTICK_INFO * joystick , <nl> + unsigned int * feature_count , <nl> + JOYSTICK_DRIVER_PRIMITIVE * * primitives ) ; <nl> + void ( __cdecl * free_primitives ) ( const AddonInstance_Peripheral * addonInstance , <nl> + unsigned int , <nl> + JOYSTICK_DRIVER_PRIMITIVE * primitives ) ; <nl> + PERIPHERAL_ERROR ( __cdecl * set_ignored_primitives ) <nl> + ( const AddonInstance_Peripheral * addonInstance , <nl> + const JOYSTICK_INFO * joystick , <nl> + unsigned int primitive_count , <nl> + const JOYSTICK_DRIVER_PRIMITIVE * primitives ) ; <nl> + void ( __cdecl * save_button_map ) ( const AddonInstance_Peripheral * addonInstance , <nl> + const JOYSTICK_INFO * joystick ) ; <nl> + void ( __cdecl * revert_button_map ) ( const AddonInstance_Peripheral * addonInstance , <nl> + const JOYSTICK_INFO * joystick ) ; <nl> + void ( __cdecl * reset_button_map ) ( const AddonInstance_Peripheral * addonInstance , <nl> + const JOYSTICK_INFO * joystick , <nl> + const char * controller_id ) ; <nl> + void ( __cdecl * power_off_joystick ) ( const AddonInstance_Peripheral * addonInstance , <nl> + unsigned int index ) ; <nl> / / / } <nl> } KodiToAddonFuncTable_Peripheral ; <nl> <nl> namespace kodi <nl> namespace addon <nl> { <nl> <nl> - class ATTRIBUTE_HIDDEN CInstancePeripheral : public IAddonInstance <nl> + class ATTRIBUTE_HIDDEN CInstancePeripheral : public IAddonInstance <nl> + { <nl> + public : <nl> + CInstancePeripheral ( ) <nl> + : IAddonInstance ( ADDON_INSTANCE_PERIPHERAL , GetKodiTypeVersion ( ADDON_INSTANCE_PERIPHERAL ) ) <nl> { <nl> - public : <nl> - CInstancePeripheral ( ) <nl> - : IAddonInstance ( ADDON_INSTANCE_PERIPHERAL , GetKodiTypeVersion ( ADDON_INSTANCE_PERIPHERAL ) ) <nl> - { <nl> - if ( CAddonBase : : m_interface - > globalSingleInstance ! = nullptr ) <nl> - throw std : : logic_error ( " kodi : : addon : : CInstancePeripheral : Creation of more as one in single instance way is not allowed ! " ) ; <nl> + if ( CAddonBase : : m_interface - > globalSingleInstance ! = nullptr ) <nl> + throw std : : logic_error ( " kodi : : addon : : CInstancePeripheral : Creation of more as one in single " <nl> + " instance way is not allowed ! " ) ; <nl> + <nl> + SetAddonStruct ( CAddonBase : : m_interface - > firstKodiInstance ) ; <nl> + CAddonBase : : m_interface - > globalSingleInstance = this ; <nl> + } <nl> + <nl> + explicit CInstancePeripheral ( KODI_HANDLE instance , const std : : string & kodiVersion = " " ) <nl> + : IAddonInstance ( ADDON_INSTANCE_PERIPHERAL , <nl> + ! kodiVersion . empty ( ) ? kodiVersion <nl> + : GetKodiTypeVersion ( ADDON_INSTANCE_PERIPHERAL ) ) <nl> + { <nl> + if ( CAddonBase : : m_interface - > globalSingleInstance ! = nullptr ) <nl> + throw std : : logic_error ( " kodi : : addon : : CInstancePeripheral : Creation of multiple together with " <nl> + " single instance way is not allowed ! " ) ; <nl> <nl> - SetAddonStruct ( CAddonBase : : m_interface - > firstKodiInstance ) ; <nl> - CAddonBase : : m_interface - > globalSingleInstance = this ; <nl> - } <nl> + SetAddonStruct ( instance ) ; <nl> + } <nl> <nl> - explicit CInstancePeripheral ( KODI_HANDLE instance , const std : : string & kodiVersion = " " ) <nl> - : IAddonInstance ( ADDON_INSTANCE_PERIPHERAL , <nl> - ! kodiVersion . empty ( ) ? kodiVersion <nl> - : GetKodiTypeVersion ( ADDON_INSTANCE_PERIPHERAL ) ) <nl> - { <nl> - if ( CAddonBase : : m_interface - > globalSingleInstance ! = nullptr ) <nl> - throw std : : logic_error ( " kodi : : addon : : CInstancePeripheral : Creation of multiple together with single instance way is not allowed ! " ) ; <nl> + ~ CInstancePeripheral ( ) override = default ; <nl> <nl> - SetAddonStruct ( instance ) ; <nl> - } <nl> + / / / @ name Peripheral operations <nl> + / / / { <nl> + / * ! <nl> + * @ brief Get the list of features that this add - on provides <nl> + * @ param capabilities The add - on ' s capabilities . <nl> + * @ remarks Valid implementation required . <nl> + * <nl> + * Called by the frontend to query the add - on ' s capabilities and supported <nl> + * peripherals . All capabilities that the add - on supports should be set to true . <nl> + * <nl> + * / <nl> + virtual void GetCapabilities ( PERIPHERAL_CAPABILITIES & capabilities ) { } <nl> <nl> - ~ CInstancePeripheral ( ) override = default ; <nl> + / * ! <nl> + * @ brief Perform a scan for joysticks <nl> + * @ param peripheral_count Assigned to the number of peripherals allocated <nl> + * @ param scan_results Assigned to allocated memory <nl> + * @ return PERIPHERAL_NO_ERROR if successful ; peripherals must be freed using <nl> + * FreeScanResults ( ) in this case <nl> + * <nl> + * The frontend calls this when a hardware change is detected . If an add - on <nl> + * detects a hardware change , it can trigger this function using the <nl> + * TriggerScan ( ) callback . <nl> + * / <nl> + virtual PERIPHERAL_ERROR PerformDeviceScan ( unsigned int * peripheral_count , <nl> + PERIPHERAL_INFO * * scan_results ) <nl> + { <nl> + return PERIPHERAL_ERROR_NOT_IMPLEMENTED ; <nl> + } <nl> <nl> - / / / @ name Peripheral operations <nl> - / / / { <nl> - / * ! <nl> - * @ brief Get the list of features that this add - on provides <nl> - * @ param capabilities The add - on ' s capabilities . <nl> - * @ remarks Valid implementation required . <nl> - * <nl> - * Called by the frontend to query the add - on ' s capabilities and supported <nl> - * peripherals . All capabilities that the add - on supports should be set to true . <nl> - * <nl> - * / <nl> - virtual void GetCapabilities ( PERIPHERAL_CAPABILITIES & capabilities ) { } <nl> - <nl> - / * ! <nl> - * @ brief Perform a scan for joysticks <nl> - * @ param peripheral_count Assigned to the number of peripherals allocated <nl> - * @ param scan_results Assigned to allocated memory <nl> - * @ return PERIPHERAL_NO_ERROR if successful ; peripherals must be freed using <nl> - * FreeScanResults ( ) in this case <nl> - * <nl> - * The frontend calls this when a hardware change is detected . If an add - on <nl> - * detects a hardware change , it can trigger this function using the <nl> - * TriggerScan ( ) callback . <nl> - * / <nl> - virtual PERIPHERAL_ERROR PerformDeviceScan ( unsigned int * peripheral_count , PERIPHERAL_INFO * * scan_results ) { return PERIPHERAL_ERROR_NOT_IMPLEMENTED ; } <nl> - <nl> - / * ! <nl> - * @ brief Free the memory allocated in PerformDeviceScan ( ) <nl> - * <nl> - * Must be called if PerformDeviceScan ( ) returns PERIPHERAL_NO_ERROR . <nl> - * <nl> - * @ param peripheral_count The number of events allocated for the events array <nl> - * @ param scan_results The array of allocated peripherals <nl> - * / <nl> - virtual void FreeScanResults ( unsigned int peripheral_count , PERIPHERAL_INFO * scan_results ) { } <nl> - <nl> - / * ! <nl> - * @ brief Get all events that have occurred since the last call to GetEvents ( ) <nl> - * @ return PERIPHERAL_NO_ERROR if successful ; events must be freed using <nl> - * FreeEvents ( ) in this case <nl> - * / <nl> - virtual PERIPHERAL_ERROR GetEvents ( unsigned int * event_count , PERIPHERAL_EVENT * * events ) { return PERIPHERAL_ERROR_NOT_IMPLEMENTED ; } <nl> - <nl> - / * ! <nl> - * @ brief Free the memory allocated in GetEvents ( ) <nl> - * <nl> - * Must be called if GetEvents ( ) returns PERIPHERAL_NO_ERROR . <nl> - * <nl> - * @ param event_count The number of events allocated for the events array <nl> - * @ param events The array of allocated events <nl> - * / <nl> - virtual void FreeEvents ( unsigned int event_count , PERIPHERAL_EVENT * events ) { } <nl> - <nl> - / * ! <nl> - * @ brief Send an input event to the peripheral <nl> - * @ param event The input event <nl> - * @ return true if the event was handled , false otherwise <nl> - * / <nl> - virtual bool SendEvent ( const PERIPHERAL_EVENT * event ) { return false ; } <nl> - / / / } <nl> + / * ! <nl> + * @ brief Free the memory allocated in PerformDeviceScan ( ) <nl> + * <nl> + * Must be called if PerformDeviceScan ( ) returns PERIPHERAL_NO_ERROR . <nl> + * <nl> + * @ param peripheral_count The number of events allocated for the events array <nl> + * @ param scan_results The array of allocated peripherals <nl> + * / <nl> + virtual void FreeScanResults ( unsigned int peripheral_count , PERIPHERAL_INFO * scan_results ) { } <nl> <nl> - / / / @ name Joystick operations <nl> - / * ! <nl> - * @ note # define PERIPHERAL_ADDON_JOYSTICKS before including kodi_peripheral_dll . h <nl> - * in the add - on if the add - on provides joysticks and add provides_joysticks = " true " <nl> - * to the kodi . peripheral extension point node in addon . xml . <nl> - * / <nl> - / / / { <nl> - / * ! <nl> - * @ brief Get extended info about an attached joystick <nl> - * @ param index The joystick ' s driver index <nl> - * @ param info The container for the allocated joystick info <nl> - * @ return PERIPHERAL_NO_ERROR if successful ; array must be freed using <nl> - * FreeJoystickInfo ( ) in this case <nl> - * / <nl> - virtual PERIPHERAL_ERROR GetJoystickInfo ( unsigned int index , JOYSTICK_INFO * info ) { return PERIPHERAL_ERROR_NOT_IMPLEMENTED ; } <nl> - <nl> - / * ! <nl> - * @ brief Free the memory allocated in GetJoystickInfo ( ) <nl> - * / <nl> - virtual void FreeJoystickInfo ( JOYSTICK_INFO * info ) { } <nl> - <nl> - / * ! <nl> - * @ brief Get the features that allow translating the joystick into the controller profile <nl> - * @ param joystick The device ' s joystick properties ; unknown values may be left at their default <nl> - * @ param controller_id The controller profile being requested , e . g . game . controller . default <nl> - * @ param feature_count The number of features allocated for the features array <nl> - * @ param features The array of allocated features <nl> - * @ return PERIPHERAL_NO_ERROR if successful ; array must be freed using <nl> - * FreeButtonMap ( ) in this case <nl> - * / <nl> - virtual PERIPHERAL_ERROR GetFeatures ( const JOYSTICK_INFO * joystick , const char * controller_id , <nl> - unsigned int * feature_count , JOYSTICK_FEATURE * * features ) { return PERIPHERAL_ERROR_NOT_IMPLEMENTED ; } <nl> - <nl> - / * ! <nl> - * @ brief Free the memory allocated in GetFeatures ( ) <nl> - * <nl> - * Must be called if GetFeatures ( ) returns PERIPHERAL_NO_ERROR . <nl> - * <nl> - * @ param feature_count The number of features allocated for the features array <nl> - * @ param features The array of allocated features <nl> - * / <nl> - virtual void FreeFeatures ( unsigned int feature_count , JOYSTICK_FEATURE * features ) { } <nl> - <nl> - / * ! <nl> - * @ brief Add or update joystick features <nl> - * @ param joystick The device ' s joystick properties ; unknown values may be left at their default <nl> - * @ param controller_id The game controller profile being updated <nl> - * @ param feature_count The number of features in the features array <nl> - * @ param features The array of features <nl> - * @ return PERIPHERAL_NO_ERROR if successful <nl> - * / <nl> - virtual PERIPHERAL_ERROR MapFeatures ( const JOYSTICK_INFO * joystick , const char * controller_id , <nl> - unsigned int feature_count , const JOYSTICK_FEATURE * features ) { return PERIPHERAL_ERROR_NOT_IMPLEMENTED ; } <nl> - <nl> - / * ! <nl> - * @ brief Get the driver primitives that should be ignored while mapping the device <nl> - * @ param joystick The device ' s joystick properties ; unknown values may be left at their default <nl> - * @ param primitive_count The number of features allocated for the primitives array <nl> - * @ param primitives The array of allocated driver primitives to be ignored <nl> - * @ return PERIPHERAL_NO_ERROR if successful ; array must be freed using <nl> - * FreePrimitives ( ) in this case <nl> - * / <nl> - virtual PERIPHERAL_ERROR GetIgnoredPrimitives ( const JOYSTICK_INFO * joystick , <nl> - unsigned int * primitive_count , <nl> - JOYSTICK_DRIVER_PRIMITIVE * * primitives ) { return PERIPHERAL_ERROR_NOT_IMPLEMENTED ; } <nl> - <nl> - / * ! <nl> - * @ brief Free the memory allocated in GetIgnoredPrimitives ( ) <nl> - * <nl> - * Must be called if GetIgnoredPrimitives ( ) returns PERIPHERAL_NO_ERROR . <nl> - * <nl> - * @ param primitive_count The number of driver primitives allocated for the primitives array <nl> - * @ param primitives The array of allocated driver primitives <nl> - * / <nl> - virtual void FreePrimitives ( unsigned int primitive_count , JOYSTICK_DRIVER_PRIMITIVE * primitives ) { } <nl> - <nl> - / * ! <nl> - * @ brief Set the list of driver primitives that are ignored for the device <nl> - * @ param joystick The device ' s joystick properties ; unknown values may be left at their default <nl> - * @ param primitive_count The number of driver features in the primitives array <nl> - * @ param primitives The array of driver primitives to ignore <nl> - * @ return PERIPHERAL_NO_ERROR if successful <nl> - * / <nl> - virtual PERIPHERAL_ERROR SetIgnoredPrimitives ( const JOYSTICK_INFO * joystick , <nl> - unsigned int primitive_count , <nl> - const JOYSTICK_DRIVER_PRIMITIVE * primitives ) { return PERIPHERAL_ERROR_NOT_IMPLEMENTED ; } <nl> - <nl> - / * ! <nl> - * @ brief Save the button map for the given joystick <nl> - * @ param joystick The device ' s joystick properties <nl> - * / <nl> - virtual void SaveButtonMap ( const JOYSTICK_INFO * joystick ) { } <nl> - <nl> - / * ! <nl> - * @ brief Revert the button map to the last time it was loaded or committed to disk <nl> - * @ param joystick The device ' s joystick properties <nl> - * / <nl> - virtual void RevertButtonMap ( const JOYSTICK_INFO * joystick ) { } <nl> - <nl> - / * ! <nl> - * @ brief Reset the button map for the given joystick and controller profile ID <nl> - * @ param joystick The device ' s joystick properties <nl> - * @ param controller_id The game controller profile being reset <nl> - * / <nl> - virtual void ResetButtonMap ( const JOYSTICK_INFO * joystick , const char * controller_id ) { } <nl> - <nl> - / * ! <nl> - * @ brief Powers off the given joystick if supported <nl> - * @ param index The joystick ' s driver index <nl> - * / <nl> - virtual void PowerOffJoystick ( unsigned int index ) { } <nl> - <nl> - const std : : string AddonPath ( ) const <nl> - { <nl> - return m_instanceData - > props . addon_path ; <nl> - } <nl> + / * ! <nl> + * @ brief Get all events that have occurred since the last call to GetEvents ( ) <nl> + * @ return PERIPHERAL_NO_ERROR if successful ; events must be freed using <nl> + * FreeEvents ( ) in this case <nl> + * / <nl> + virtual PERIPHERAL_ERROR GetEvents ( unsigned int * event_count , PERIPHERAL_EVENT * * events ) <nl> + { <nl> + return PERIPHERAL_ERROR_NOT_IMPLEMENTED ; <nl> + } <nl> <nl> - const std : : string UserPath ( ) const <nl> - { <nl> - return m_instanceData - > props . user_path ; <nl> - } <nl> - <nl> - / * ! <nl> - * @ brief Trigger a scan for peripherals <nl> - * <nl> - * The add - on calls this if a change in hardware is detected . <nl> - * / <nl> - void TriggerScan ( void ) <nl> - { <nl> - return m_instanceData - > toKodi . trigger_scan ( m_instanceData - > toKodi . kodiInstance ) ; <nl> - } <nl> - <nl> - / * ! <nl> - * @ brief Notify the frontend that button maps have changed <nl> - * <nl> - * @ param [ optional ] deviceName The name of the device to refresh , or empty / null for all devices <nl> - * @ param [ optional ] controllerId The controller ID to refresh , or empty / null for all controllers <nl> - * / <nl> - void RefreshButtonMaps ( const std : : string & deviceName = " " , const std : : string & controllerId = " " ) <nl> - { <nl> - return m_instanceData - > toKodi . refresh_button_maps ( m_instanceData - > toKodi . kodiInstance , deviceName . c_str ( ) , controllerId . c_str ( ) ) ; <nl> - } <nl> - <nl> - / * ! <nl> - * @ brief Return the number of features belonging to the specified controller <nl> - * <nl> - * @ param controllerId The controller ID to enumerate <nl> - * @ param type [ optional ] Type to filter by , or JOYSTICK_FEATURE_TYPE_UNKNOWN for all features <nl> - * <nl> - * @ return The number of features matching the request parameters <nl> - * / <nl> - unsigned int FeatureCount ( const std : : string & controllerId , JOYSTICK_FEATURE_TYPE type = JOYSTICK_FEATURE_TYPE_UNKNOWN ) <nl> - { <nl> - return m_instanceData - > toKodi . feature_count ( m_instanceData - > toKodi . kodiInstance , controllerId . c_str ( ) , type ) ; <nl> - } <nl> - <nl> - / * ! <nl> - * @ brief Return the type of the feature <nl> - * <nl> - * @ param controllerId The controller ID to check <nl> - * @ param featureName The feature to check <nl> - * <nl> - * @ return The type of the specified feature , or JOYSTICK_FEATURE_TYPE_UNKNOWN <nl> - * if unknown <nl> - * / <nl> - JOYSTICK_FEATURE_TYPE FeatureType ( const std : : string & controllerId , const std : : string & featureName ) <nl> - { <nl> - return m_instanceData - > toKodi . feature_type ( m_instanceData - > toKodi . kodiInstance , controllerId . c_str ( ) , featureName . c_str ( ) ) ; <nl> - } <nl> + / * ! <nl> + * @ brief Free the memory allocated in GetEvents ( ) <nl> + * <nl> + * Must be called if GetEvents ( ) returns PERIPHERAL_NO_ERROR . <nl> + * <nl> + * @ param event_count The number of events allocated for the events array <nl> + * @ param events The array of allocated events <nl> + * / <nl> + virtual void FreeEvents ( unsigned int event_count , PERIPHERAL_EVENT * events ) { } <nl> <nl> - private : <nl> - void SetAddonStruct ( KODI_HANDLE instance ) <nl> - { <nl> - if ( instance = = nullptr ) <nl> - throw std : : logic_error ( " kodi : : addon : : CInstancePeripheral : Creation with empty addon structure not allowed , table must be given from Kodi ! " ) ; <nl> - <nl> - m_instanceData = static_cast < AddonInstance_Peripheral * > ( instance ) ; <nl> - m_instanceData - > toAddon . addonInstance = this ; <nl> - <nl> - m_instanceData - > toAddon . get_capabilities = ADDON_GetCapabilities ; <nl> - m_instanceData - > toAddon . perform_device_scan = ADDON_PerformDeviceScan ; <nl> - m_instanceData - > toAddon . free_scan_results = ADDON_FreeScanResults ; <nl> - m_instanceData - > toAddon . get_events = ADDON_GetEvents ; <nl> - m_instanceData - > toAddon . free_events = ADDON_FreeEvents ; <nl> - m_instanceData - > toAddon . send_event = ADDON_SendEvent ; <nl> - <nl> - m_instanceData - > toAddon . get_joystick_info = ADDON_GetJoystickInfo ; <nl> - m_instanceData - > toAddon . free_joystick_info = ADDON_FreeJoystickInfo ; <nl> - m_instanceData - > toAddon . get_features = ADDON_GetFeatures ; <nl> - m_instanceData - > toAddon . free_features = ADDON_FreeFeatures ; <nl> - m_instanceData - > toAddon . map_features = ADDON_MapFeatures ; <nl> - m_instanceData - > toAddon . get_ignored_primitives = ADDON_GetIgnoredPrimitives ; <nl> - m_instanceData - > toAddon . free_primitives = ADDON_FreePrimitives ; <nl> - m_instanceData - > toAddon . set_ignored_primitives = ADDON_SetIgnoredPrimitives ; <nl> - m_instanceData - > toAddon . save_button_map = ADDON_SaveButtonMap ; <nl> - m_instanceData - > toAddon . revert_button_map = ADDON_RevertButtonMap ; <nl> - m_instanceData - > toAddon . reset_button_map = ADDON_ResetButtonMap ; <nl> - m_instanceData - > toAddon . power_off_joystick = ADDON_PowerOffJoystick ; <nl> - } <nl> - <nl> - inline static void ADDON_GetCapabilities ( const AddonInstance_Peripheral * addonInstance , PERIPHERAL_CAPABILITIES * capabilities ) <nl> - { <nl> - addonInstance - > toAddon . addonInstance - > GetCapabilities ( * capabilities ) ; <nl> - } <nl> + / * ! <nl> + * @ brief Send an input event to the peripheral <nl> + * @ param event The input event <nl> + * @ return true if the event was handled , false otherwise <nl> + * / <nl> + virtual bool SendEvent ( const PERIPHERAL_EVENT * event ) { return false ; } <nl> + / / / } <nl> <nl> - inline static PERIPHERAL_ERROR ADDON_PerformDeviceScan ( const AddonInstance_Peripheral * addonInstance , unsigned int * peripheral_count , PERIPHERAL_INFO * * scan_results ) <nl> - { <nl> - return addonInstance - > toAddon . addonInstance - > PerformDeviceScan ( peripheral_count , scan_results ) ; <nl> - } <nl> + / / / @ name Joystick operations <nl> + / * ! <nl> + * @ note # define PERIPHERAL_ADDON_JOYSTICKS before including kodi_peripheral_dll . h <nl> + * in the add - on if the add - on provides joysticks and add provides_joysticks = " true " <nl> + * to the kodi . peripheral extension point node in addon . xml . <nl> + * / <nl> + / / / { <nl> + / * ! <nl> + * @ brief Get extended info about an attached joystick <nl> + * @ param index The joystick ' s driver index <nl> + * @ param info The container for the allocated joystick info <nl> + * @ return PERIPHERAL_NO_ERROR if successful ; array must be freed using <nl> + * FreeJoystickInfo ( ) in this case <nl> + * / <nl> + virtual PERIPHERAL_ERROR GetJoystickInfo ( unsigned int index , JOYSTICK_INFO * info ) <nl> + { <nl> + return PERIPHERAL_ERROR_NOT_IMPLEMENTED ; <nl> + } <nl> <nl> - inline static void ADDON_FreeScanResults ( const AddonInstance_Peripheral * addonInstance , unsigned int peripheral_count , PERIPHERAL_INFO * scan_results ) <nl> - { <nl> - addonInstance - > toAddon . addonInstance - > FreeScanResults ( peripheral_count , scan_results ) ; <nl> - } <nl> + / * ! <nl> + * @ brief Free the memory allocated in GetJoystickInfo ( ) <nl> + * / <nl> + virtual void FreeJoystickInfo ( JOYSTICK_INFO * info ) { } <nl> <nl> - inline static PERIPHERAL_ERROR ADDON_GetEvents ( const AddonInstance_Peripheral * addonInstance , unsigned int * event_count , PERIPHERAL_EVENT * * events ) <nl> - { <nl> - return addonInstance - > toAddon . addonInstance - > GetEvents ( event_count , events ) ; <nl> - } <nl> + / * ! <nl> + * @ brief Get the features that allow translating the joystick into the controller profile <nl> + * @ param joystick The device ' s joystick properties ; unknown values may be left at their default <nl> + * @ param controller_id The controller profile being requested , e . g . game . controller . default <nl> + * @ param feature_count The number of features allocated for the features array <nl> + * @ param features The array of allocated features <nl> + * @ return PERIPHERAL_NO_ERROR if successful ; array must be freed using <nl> + * FreeButtonMap ( ) in this case <nl> + * / <nl> + virtual PERIPHERAL_ERROR GetFeatures ( const JOYSTICK_INFO * joystick , <nl> + const char * controller_id , <nl> + unsigned int * feature_count , <nl> + JOYSTICK_FEATURE * * features ) <nl> + { <nl> + return PERIPHERAL_ERROR_NOT_IMPLEMENTED ; <nl> + } <nl> <nl> - inline static void ADDON_FreeEvents ( const AddonInstance_Peripheral * addonInstance , unsigned int event_count , PERIPHERAL_EVENT * events ) <nl> - { <nl> - addonInstance - > toAddon . addonInstance - > FreeEvents ( event_count , events ) ; <nl> - } <nl> + / * ! <nl> + * @ brief Free the memory allocated in GetFeatures ( ) <nl> + * <nl> + * Must be called if GetFeatures ( ) returns PERIPHERAL_NO_ERROR . <nl> + * <nl> + * @ param feature_count The number of features allocated for the features array <nl> + * @ param features The array of allocated features <nl> + * / <nl> + virtual void FreeFeatures ( unsigned int feature_count , JOYSTICK_FEATURE * features ) { } <nl> <nl> - inline static bool ADDON_SendEvent ( const AddonInstance_Peripheral * addonInstance , const PERIPHERAL_EVENT * event ) <nl> - { <nl> - return addonInstance - > toAddon . addonInstance - > SendEvent ( event ) ; <nl> - } <nl> + / * ! <nl> + * @ brief Add or update joystick features <nl> + * @ param joystick The device ' s joystick properties ; unknown values may be left at their default <nl> + * @ param controller_id The game controller profile being updated <nl> + * @ param feature_count The number of features in the features array <nl> + * @ param features The array of features <nl> + * @ return PERIPHERAL_NO_ERROR if successful <nl> + * / <nl> + virtual PERIPHERAL_ERROR MapFeatures ( const JOYSTICK_INFO * joystick , <nl> + const char * controller_id , <nl> + unsigned int feature_count , <nl> + const JOYSTICK_FEATURE * features ) <nl> + { <nl> + return PERIPHERAL_ERROR_NOT_IMPLEMENTED ; <nl> + } <nl> <nl> + / * ! <nl> + * @ brief Get the driver primitives that should be ignored while mapping the device <nl> + * @ param joystick The device ' s joystick properties ; unknown values may be left at their default <nl> + * @ param primitive_count The number of features allocated for the primitives array <nl> + * @ param primitives The array of allocated driver primitives to be ignored <nl> + * @ return PERIPHERAL_NO_ERROR if successful ; array must be freed using <nl> + * FreePrimitives ( ) in this case <nl> + * / <nl> + virtual PERIPHERAL_ERROR GetIgnoredPrimitives ( const JOYSTICK_INFO * joystick , <nl> + unsigned int * primitive_count , <nl> + JOYSTICK_DRIVER_PRIMITIVE * * primitives ) <nl> + { <nl> + return PERIPHERAL_ERROR_NOT_IMPLEMENTED ; <nl> + } <nl> <nl> - inline static PERIPHERAL_ERROR ADDON_GetJoystickInfo ( const AddonInstance_Peripheral * addonInstance , unsigned int index , JOYSTICK_INFO * info ) <nl> - { <nl> - return addonInstance - > toAddon . addonInstance - > GetJoystickInfo ( index , info ) ; <nl> - } <nl> + / * ! <nl> + * @ brief Free the memory allocated in GetIgnoredPrimitives ( ) <nl> + * <nl> + * Must be called if GetIgnoredPrimitives ( ) returns PERIPHERAL_NO_ERROR . <nl> + * <nl> + * @ param primitive_count The number of driver primitives allocated for the primitives array <nl> + * @ param primitives The array of allocated driver primitives <nl> + * / <nl> + virtual void FreePrimitives ( unsigned int primitive_count , JOYSTICK_DRIVER_PRIMITIVE * primitives ) <nl> + { <nl> + } <nl> <nl> - inline static void ADDON_FreeJoystickInfo ( const AddonInstance_Peripheral * addonInstance , JOYSTICK_INFO * info ) <nl> - { <nl> - addonInstance - > toAddon . addonInstance - > FreeJoystickInfo ( info ) ; <nl> - } <nl> + / * ! <nl> + * @ brief Set the list of driver primitives that are ignored for the device <nl> + * @ param joystick The device ' s joystick properties ; unknown values may be left at their default <nl> + * @ param primitive_count The number of driver features in the primitives array <nl> + * @ param primitives The array of driver primitives to ignore <nl> + * @ return PERIPHERAL_NO_ERROR if successful <nl> + * / <nl> + virtual PERIPHERAL_ERROR SetIgnoredPrimitives ( const JOYSTICK_INFO * joystick , <nl> + unsigned int primitive_count , <nl> + const JOYSTICK_DRIVER_PRIMITIVE * primitives ) <nl> + { <nl> + return PERIPHERAL_ERROR_NOT_IMPLEMENTED ; <nl> + } <nl> <nl> - inline static PERIPHERAL_ERROR ADDON_GetFeatures ( const AddonInstance_Peripheral * addonInstance , <nl> - const JOYSTICK_INFO * joystick , const char * controller_id , <nl> - unsigned int * feature_count , JOYSTICK_FEATURE * * features ) <nl> - { <nl> - return addonInstance - > toAddon . addonInstance - > GetFeatures ( joystick , controller_id , feature_count , features ) ; <nl> - } <nl> + / * ! <nl> + * @ brief Save the button map for the given joystick <nl> + * @ param joystick The device ' s joystick properties <nl> + * / <nl> + virtual void SaveButtonMap ( const JOYSTICK_INFO * joystick ) { } <nl> <nl> - inline static void ADDON_FreeFeatures ( const AddonInstance_Peripheral * addonInstance , unsigned int feature_count , JOYSTICK_FEATURE * features ) <nl> - { <nl> - addonInstance - > toAddon . addonInstance - > FreeFeatures ( feature_count , features ) ; <nl> - } <nl> + / * ! <nl> + * @ brief Revert the button map to the last time it was loaded or committed to disk <nl> + * @ param joystick The device ' s joystick properties <nl> + * / <nl> + virtual void RevertButtonMap ( const JOYSTICK_INFO * joystick ) { } <nl> <nl> - inline static PERIPHERAL_ERROR ADDON_MapFeatures ( const AddonInstance_Peripheral * addonInstance , <nl> - const JOYSTICK_INFO * joystick , const char * controller_id , <nl> - unsigned int feature_count , const JOYSTICK_FEATURE * features ) <nl> - { <nl> - return addonInstance - > toAddon . addonInstance - > MapFeatures ( joystick , controller_id , feature_count , features ) ; <nl> - } <nl> + / * ! <nl> + * @ brief Reset the button map for the given joystick and controller profile ID <nl> + * @ param joystick The device ' s joystick properties <nl> + * @ param controller_id The game controller profile being reset <nl> + * / <nl> + virtual void ResetButtonMap ( const JOYSTICK_INFO * joystick , const char * controller_id ) { } <nl> <nl> - inline static PERIPHERAL_ERROR ADDON_GetIgnoredPrimitives ( const AddonInstance_Peripheral * addonInstance , <nl> - const JOYSTICK_INFO * joystick , unsigned int * primitive_count , <nl> - JOYSTICK_DRIVER_PRIMITIVE * * primitives ) <nl> - { <nl> - return addonInstance - > toAddon . addonInstance - > GetIgnoredPrimitives ( joystick , primitive_count , primitives ) ; <nl> - } <nl> + / * ! <nl> + * @ brief Powers off the given joystick if supported <nl> + * @ param index The joystick ' s driver index <nl> + * / <nl> + virtual void PowerOffJoystick ( unsigned int index ) { } <nl> <nl> - inline static void ADDON_FreePrimitives ( const AddonInstance_Peripheral * addonInstance , <nl> - unsigned int primitive_count , JOYSTICK_DRIVER_PRIMITIVE * primitives ) <nl> - { <nl> - addonInstance - > toAddon . addonInstance - > FreePrimitives ( primitive_count , primitives ) ; <nl> - } <nl> + const std : : string AddonPath ( ) const { return m_instanceData - > props . addon_path ; } <nl> <nl> - inline static PERIPHERAL_ERROR ADDON_SetIgnoredPrimitives ( const AddonInstance_Peripheral * addonInstance , <nl> - const JOYSTICK_INFO * joystick , unsigned int primitive_count , <nl> - const JOYSTICK_DRIVER_PRIMITIVE * primitives ) <nl> - { <nl> - return addonInstance - > toAddon . addonInstance - > SetIgnoredPrimitives ( joystick , primitive_count , primitives ) ; <nl> - } <nl> + const std : : string UserPath ( ) const { return m_instanceData - > props . user_path ; } <nl> <nl> - inline static void ADDON_SaveButtonMap ( const AddonInstance_Peripheral * addonInstance , const JOYSTICK_INFO * joystick ) <nl> - { <nl> - addonInstance - > toAddon . addonInstance - > SaveButtonMap ( joystick ) ; <nl> - } <nl> + / * ! <nl> + * @ brief Trigger a scan for peripherals <nl> + * <nl> + * The add - on calls this if a change in hardware is detected . <nl> + * / <nl> + void TriggerScan ( void ) <nl> + { <nl> + return m_instanceData - > toKodi . trigger_scan ( m_instanceData - > toKodi . kodiInstance ) ; <nl> + } <nl> <nl> - inline static void ADDON_RevertButtonMap ( const AddonInstance_Peripheral * addonInstance , const JOYSTICK_INFO * joystick ) <nl> - { <nl> - addonInstance - > toAddon . addonInstance - > RevertButtonMap ( joystick ) ; <nl> - } <nl> + / * ! <nl> + * @ brief Notify the frontend that button maps have changed <nl> + * <nl> + * @ param [ optional ] deviceName The name of the device to refresh , or empty / null for all devices <nl> + * @ param [ optional ] controllerId The controller ID to refresh , or empty / null for all controllers <nl> + * / <nl> + void RefreshButtonMaps ( const std : : string & deviceName = " " , const std : : string & controllerId = " " ) <nl> + { <nl> + return m_instanceData - > toKodi . refresh_button_maps ( m_instanceData - > toKodi . kodiInstance , <nl> + deviceName . c_str ( ) , controllerId . c_str ( ) ) ; <nl> + } <nl> <nl> - inline static void ADDON_ResetButtonMap ( const AddonInstance_Peripheral * addonInstance , const JOYSTICK_INFO * joystick , const char * controller_id ) <nl> - { <nl> - addonInstance - > toAddon . addonInstance - > ResetButtonMap ( joystick , controller_id ) ; <nl> - } <nl> + / * ! <nl> + * @ brief Return the number of features belonging to the specified controller <nl> + * <nl> + * @ param controllerId The controller ID to enumerate <nl> + * @ param type [ optional ] Type to filter by , or JOYSTICK_FEATURE_TYPE_UNKNOWN for all features <nl> + * <nl> + * @ return The number of features matching the request parameters <nl> + * / <nl> + unsigned int FeatureCount ( const std : : string & controllerId , <nl> + JOYSTICK_FEATURE_TYPE type = JOYSTICK_FEATURE_TYPE_UNKNOWN ) <nl> + { <nl> + return m_instanceData - > toKodi . feature_count ( m_instanceData - > toKodi . kodiInstance , <nl> + controllerId . c_str ( ) , type ) ; <nl> + } <nl> <nl> - inline static void ADDON_PowerOffJoystick ( const AddonInstance_Peripheral * addonInstance , unsigned int index ) <nl> - { <nl> - addonInstance - > toAddon . addonInstance - > PowerOffJoystick ( index ) ; <nl> - } <nl> + / * ! <nl> + * @ brief Return the type of the feature <nl> + * <nl> + * @ param controllerId The controller ID to check <nl> + * @ param featureName The feature to check <nl> + * <nl> + * @ return The type of the specified feature , or JOYSTICK_FEATURE_TYPE_UNKNOWN <nl> + * if unknown <nl> + * / <nl> + JOYSTICK_FEATURE_TYPE FeatureType ( const std : : string & controllerId , const std : : string & featureName ) <nl> + { <nl> + return m_instanceData - > toKodi . feature_type ( m_instanceData - > toKodi . kodiInstance , <nl> + controllerId . c_str ( ) , featureName . c_str ( ) ) ; <nl> + } <nl> + <nl> + private : <nl> + void SetAddonStruct ( KODI_HANDLE instance ) <nl> + { <nl> + if ( instance = = nullptr ) <nl> + throw std : : logic_error ( " kodi : : addon : : CInstancePeripheral : Creation with empty addon " <nl> + " structure not allowed , table must be given from Kodi ! " ) ; <nl> + <nl> + m_instanceData = static_cast < AddonInstance_Peripheral * > ( instance ) ; <nl> + m_instanceData - > toAddon . addonInstance = this ; <nl> + <nl> + m_instanceData - > toAddon . get_capabilities = ADDON_GetCapabilities ; <nl> + m_instanceData - > toAddon . perform_device_scan = ADDON_PerformDeviceScan ; <nl> + m_instanceData - > toAddon . free_scan_results = ADDON_FreeScanResults ; <nl> + m_instanceData - > toAddon . get_events = ADDON_GetEvents ; <nl> + m_instanceData - > toAddon . free_events = ADDON_FreeEvents ; <nl> + m_instanceData - > toAddon . send_event = ADDON_SendEvent ; <nl> + <nl> + m_instanceData - > toAddon . get_joystick_info = ADDON_GetJoystickInfo ; <nl> + m_instanceData - > toAddon . free_joystick_info = ADDON_FreeJoystickInfo ; <nl> + m_instanceData - > toAddon . get_features = ADDON_GetFeatures ; <nl> + m_instanceData - > toAddon . free_features = ADDON_FreeFeatures ; <nl> + m_instanceData - > toAddon . map_features = ADDON_MapFeatures ; <nl> + m_instanceData - > toAddon . get_ignored_primitives = ADDON_GetIgnoredPrimitives ; <nl> + m_instanceData - > toAddon . free_primitives = ADDON_FreePrimitives ; <nl> + m_instanceData - > toAddon . set_ignored_primitives = ADDON_SetIgnoredPrimitives ; <nl> + m_instanceData - > toAddon . save_button_map = ADDON_SaveButtonMap ; <nl> + m_instanceData - > toAddon . revert_button_map = ADDON_RevertButtonMap ; <nl> + m_instanceData - > toAddon . reset_button_map = ADDON_ResetButtonMap ; <nl> + m_instanceData - > toAddon . power_off_joystick = ADDON_PowerOffJoystick ; <nl> + } <nl> + <nl> + inline static void ADDON_GetCapabilities ( const AddonInstance_Peripheral * addonInstance , <nl> + PERIPHERAL_CAPABILITIES * capabilities ) <nl> + { <nl> + addonInstance - > toAddon . addonInstance - > GetCapabilities ( * capabilities ) ; <nl> + } <nl> + <nl> + inline static PERIPHERAL_ERROR ADDON_PerformDeviceScan ( <nl> + const AddonInstance_Peripheral * addonInstance , <nl> + unsigned int * peripheral_count , <nl> + PERIPHERAL_INFO * * scan_results ) <nl> + { <nl> + return addonInstance - > toAddon . addonInstance - > PerformDeviceScan ( peripheral_count , scan_results ) ; <nl> + } <nl> + <nl> + inline static void ADDON_FreeScanResults ( const AddonInstance_Peripheral * addonInstance , <nl> + unsigned int peripheral_count , <nl> + PERIPHERAL_INFO * scan_results ) <nl> + { <nl> + addonInstance - > toAddon . addonInstance - > FreeScanResults ( peripheral_count , scan_results ) ; <nl> + } <nl> + <nl> + inline static PERIPHERAL_ERROR ADDON_GetEvents ( const AddonInstance_Peripheral * addonInstance , <nl> + unsigned int * event_count , <nl> + PERIPHERAL_EVENT * * events ) <nl> + { <nl> + return addonInstance - > toAddon . addonInstance - > GetEvents ( event_count , events ) ; <nl> + } <nl> + <nl> + inline static void ADDON_FreeEvents ( const AddonInstance_Peripheral * addonInstance , <nl> + unsigned int event_count , <nl> + PERIPHERAL_EVENT * events ) <nl> + { <nl> + addonInstance - > toAddon . addonInstance - > FreeEvents ( event_count , events ) ; <nl> + } <nl> + <nl> + inline static bool ADDON_SendEvent ( const AddonInstance_Peripheral * addonInstance , <nl> + const PERIPHERAL_EVENT * event ) <nl> + { <nl> + return addonInstance - > toAddon . addonInstance - > SendEvent ( event ) ; <nl> + } <nl> + <nl> + <nl> + inline static PERIPHERAL_ERROR ADDON_GetJoystickInfo ( <nl> + const AddonInstance_Peripheral * addonInstance , unsigned int index , JOYSTICK_INFO * info ) <nl> + { <nl> + return addonInstance - > toAddon . addonInstance - > GetJoystickInfo ( index , info ) ; <nl> + } <nl> + <nl> + inline static void ADDON_FreeJoystickInfo ( const AddonInstance_Peripheral * addonInstance , <nl> + JOYSTICK_INFO * info ) <nl> + { <nl> + addonInstance - > toAddon . addonInstance - > FreeJoystickInfo ( info ) ; <nl> + } <nl> + <nl> + inline static PERIPHERAL_ERROR ADDON_GetFeatures ( const AddonInstance_Peripheral * addonInstance , <nl> + const JOYSTICK_INFO * joystick , <nl> + const char * controller_id , <nl> + unsigned int * feature_count , <nl> + JOYSTICK_FEATURE * * features ) <nl> + { <nl> + return addonInstance - > toAddon . addonInstance - > GetFeatures ( joystick , controller_id , feature_count , <nl> + features ) ; <nl> + } <nl> + <nl> + inline static void ADDON_FreeFeatures ( const AddonInstance_Peripheral * addonInstance , <nl> + unsigned int feature_count , <nl> + JOYSTICK_FEATURE * features ) <nl> + { <nl> + addonInstance - > toAddon . addonInstance - > FreeFeatures ( feature_count , features ) ; <nl> + } <nl> + <nl> + inline static PERIPHERAL_ERROR ADDON_MapFeatures ( const AddonInstance_Peripheral * addonInstance , <nl> + const JOYSTICK_INFO * joystick , <nl> + const char * controller_id , <nl> + unsigned int feature_count , <nl> + const JOYSTICK_FEATURE * features ) <nl> + { <nl> + return addonInstance - > toAddon . addonInstance - > MapFeatures ( joystick , controller_id , feature_count , <nl> + features ) ; <nl> + } <nl> + <nl> + inline static PERIPHERAL_ERROR ADDON_GetIgnoredPrimitives ( <nl> + const AddonInstance_Peripheral * addonInstance , <nl> + const JOYSTICK_INFO * joystick , <nl> + unsigned int * primitive_count , <nl> + JOYSTICK_DRIVER_PRIMITIVE * * primitives ) <nl> + { <nl> + return addonInstance - > toAddon . addonInstance - > GetIgnoredPrimitives ( joystick , primitive_count , <nl> + primitives ) ; <nl> + } <nl> + <nl> + inline static void ADDON_FreePrimitives ( const AddonInstance_Peripheral * addonInstance , <nl> + unsigned int primitive_count , <nl> + JOYSTICK_DRIVER_PRIMITIVE * primitives ) <nl> + { <nl> + addonInstance - > toAddon . addonInstance - > FreePrimitives ( primitive_count , primitives ) ; <nl> + } <nl> + <nl> + inline static PERIPHERAL_ERROR ADDON_SetIgnoredPrimitives ( <nl> + const AddonInstance_Peripheral * addonInstance , <nl> + const JOYSTICK_INFO * joystick , <nl> + unsigned int primitive_count , <nl> + const JOYSTICK_DRIVER_PRIMITIVE * primitives ) <nl> + { <nl> + return addonInstance - > toAddon . addonInstance - > SetIgnoredPrimitives ( joystick , primitive_count , <nl> + primitives ) ; <nl> + } <nl> + <nl> + inline static void ADDON_SaveButtonMap ( const AddonInstance_Peripheral * addonInstance , <nl> + const JOYSTICK_INFO * joystick ) <nl> + { <nl> + addonInstance - > toAddon . addonInstance - > SaveButtonMap ( joystick ) ; <nl> + } <nl> + <nl> + inline static void ADDON_RevertButtonMap ( const AddonInstance_Peripheral * addonInstance , <nl> + const JOYSTICK_INFO * joystick ) <nl> + { <nl> + addonInstance - > toAddon . addonInstance - > RevertButtonMap ( joystick ) ; <nl> + } <nl> + <nl> + inline static void ADDON_ResetButtonMap ( const AddonInstance_Peripheral * addonInstance , <nl> + const JOYSTICK_INFO * joystick , <nl> + const char * controller_id ) <nl> + { <nl> + addonInstance - > toAddon . addonInstance - > ResetButtonMap ( joystick , controller_id ) ; <nl> + } <nl> + <nl> + inline static void ADDON_PowerOffJoystick ( const AddonInstance_Peripheral * addonInstance , <nl> + unsigned int index ) <nl> + { <nl> + addonInstance - > toAddon . addonInstance - > PowerOffJoystick ( index ) ; <nl> + } <nl> <nl> - AddonInstance_Peripheral * m_instanceData ; <nl> - } ; <nl> + AddonInstance_Peripheral * m_instanceData ; <nl> + } ; <nl> <nl> } / * namespace addon * / <nl> } / * namespace kodi * / <nl> mmm a / xbmc / addons / kodi - dev - kit / include / kodi / addon - instance / PeripheralUtils . h <nl> ppp b / xbmc / addons / kodi - dev - kit / include / kodi / addon - instance / PeripheralUtils . h <nl> <nl> # include < utility > <nl> # include < vector > <nl> <nl> - # define PERIPHERAL_SAFE_DELETE ( x ) do { delete ( x ) ; ( x ) = NULL ; } while ( 0 ) <nl> - # define PERIPHERAL_SAFE_DELETE_ARRAY ( x ) do { delete [ ] ( x ) ; ( x ) = NULL ; } while ( 0 ) <nl> + # define PERIPHERAL_SAFE_DELETE ( x ) \ <nl> + do \ <nl> + { \ <nl> + delete ( x ) ; \ <nl> + ( x ) = NULL ; \ <nl> + } while ( 0 ) <nl> + # define PERIPHERAL_SAFE_DELETE_ARRAY ( x ) \ <nl> + do \ <nl> + { \ <nl> + delete [ ] ( x ) ; \ <nl> + ( x ) = NULL ; \ <nl> + } while ( 0 ) <nl> <nl> namespace kodi <nl> { <nl> namespace addon <nl> { <nl> - / * ! <nl> - * Utility class to manipulate arrays of peripheral types . <nl> - * / <nl> - template < class THE_CLASS , typename THE_STRUCT > <nl> - class PeripheralVector <nl> + / * ! <nl> + * Utility class to manipulate arrays of peripheral types . <nl> + * / <nl> + template < class THE_CLASS , typename THE_STRUCT > <nl> + class PeripheralVector <nl> + { <nl> + public : <nl> + static void ToStructs ( const std : : vector < THE_CLASS > & vecObjects , THE_STRUCT * * pStructs ) <nl> { <nl> - public : <nl> - static void ToStructs ( const std : : vector < THE_CLASS > & vecObjects , THE_STRUCT * * pStructs ) <nl> - { <nl> - if ( ! pStructs ) <nl> - return ; <nl> + if ( ! pStructs ) <nl> + return ; <nl> <nl> - if ( vecObjects . empty ( ) ) <nl> - { <nl> - * pStructs = NULL ; <nl> - } <nl> - else <nl> - { <nl> - ( * pStructs ) = new THE_STRUCT [ vecObjects . size ( ) ] ; <nl> - for ( unsigned int i = 0 ; i < vecObjects . size ( ) ; i + + ) <nl> - vecObjects . at ( i ) . ToStruct ( ( * pStructs ) [ i ] ) ; <nl> - } <nl> + if ( vecObjects . empty ( ) ) <nl> + { <nl> + * pStructs = NULL ; <nl> } <nl> - <nl> - static void ToStructs ( const std : : vector < THE_CLASS * > & vecObjects , THE_STRUCT * * pStructs ) <nl> + else <nl> { <nl> - if ( ! pStructs ) <nl> - return ; <nl> - <nl> - if ( vecObjects . empty ( ) ) <nl> - { <nl> - * pStructs = NULL ; <nl> - } <nl> - else <nl> - { <nl> - * pStructs = new THE_STRUCT [ vecObjects . size ( ) ] ; <nl> - for ( unsigned int i = 0 ; i < vecObjects . size ( ) ; i + + ) <nl> - vecObjects . at ( i ) - > ToStruct ( ( * pStructs ) [ i ] ) ; <nl> - } <nl> + ( * pStructs ) = new THE_STRUCT [ vecObjects . size ( ) ] ; <nl> + for ( unsigned int i = 0 ; i < vecObjects . size ( ) ; i + + ) <nl> + vecObjects . at ( i ) . ToStruct ( ( * pStructs ) [ i ] ) ; <nl> } <nl> + } <nl> <nl> - static void FreeStructs ( unsigned int structCount , THE_STRUCT * structs ) <nl> + static void ToStructs ( const std : : vector < THE_CLASS * > & vecObjects , THE_STRUCT * * pStructs ) <nl> + { <nl> + if ( ! pStructs ) <nl> + return ; <nl> + <nl> + if ( vecObjects . empty ( ) ) <nl> { <nl> - if ( structs ) <nl> - { <nl> - for ( unsigned int i = 0 ; i < structCount ; i + + ) <nl> - THE_CLASS : : FreeStruct ( structs [ i ] ) ; <nl> - } <nl> - PERIPHERAL_SAFE_DELETE_ARRAY ( structs ) ; <nl> + * pStructs = NULL ; <nl> } <nl> - } ; <nl> + else <nl> + { <nl> + * pStructs = new THE_STRUCT [ vecObjects . size ( ) ] ; <nl> + for ( unsigned int i = 0 ; i < vecObjects . size ( ) ; i + + ) <nl> + vecObjects . at ( i ) - > ToStruct ( ( * pStructs ) [ i ] ) ; <nl> + } <nl> + } <nl> <nl> - / * ! <nl> - * ADDON : : Peripheral <nl> - * <nl> - * Wrapper class providing peripheral information . Classes can extend <nl> - * Peripheral to inherit peripheral properties . <nl> - * / <nl> - class Peripheral <nl> + static void FreeStructs ( unsigned int structCount , THE_STRUCT * structs ) <nl> { <nl> - public : <nl> - Peripheral ( PERIPHERAL_TYPE type = PERIPHERAL_TYPE_UNKNOWN , const std : : string & strName = " " ) : <nl> - m_type ( type ) , <nl> - m_strName ( strName ) <nl> + if ( structs ) <nl> { <nl> + for ( unsigned int i = 0 ; i < structCount ; i + + ) <nl> + THE_CLASS : : FreeStruct ( structs [ i ] ) ; <nl> } <nl> + PERIPHERAL_SAFE_DELETE_ARRAY ( structs ) ; <nl> + } <nl> + } ; <nl> <nl> - explicit Peripheral ( const PERIPHERAL_INFO & info ) : <nl> - m_type ( info . type ) , <nl> + / * ! <nl> + * ADDON : : Peripheral <nl> + * <nl> + * Wrapper class providing peripheral information . Classes can extend <nl> + * Peripheral to inherit peripheral properties . <nl> + * / <nl> + class Peripheral <nl> + { <nl> + public : <nl> + Peripheral ( PERIPHERAL_TYPE type = PERIPHERAL_TYPE_UNKNOWN , const std : : string & strName = " " ) <nl> + : m_type ( type ) , m_strName ( strName ) <nl> + { <nl> + } <nl> + <nl> + explicit Peripheral ( const PERIPHERAL_INFO & info ) <nl> + : m_type ( info . type ) , <nl> m_strName ( info . name ? info . name : " " ) , <nl> m_vendorId ( info . vendor_id ) , <nl> m_productId ( info . product_id ) , <nl> m_index ( info . index ) <nl> - { <nl> - } <nl> + { <nl> + } <nl> <nl> - virtual ~ Peripheral ( void ) = default ; <nl> + virtual ~ Peripheral ( void ) = default ; <nl> <nl> - PERIPHERAL_TYPE Type ( void ) const { return m_type ; } <nl> - const std : : string & Name ( void ) const { return m_strName ; } <nl> - uint16_t VendorID ( void ) const { return m_vendorId ; } <nl> - uint16_t ProductID ( void ) const { return m_productId ; } <nl> - unsigned int Index ( void ) const { return m_index ; } <nl> + PERIPHERAL_TYPE Type ( void ) const { return m_type ; } <nl> + const std : : string & Name ( void ) const { return m_strName ; } <nl> + uint16_t VendorID ( void ) const { return m_vendorId ; } <nl> + uint16_t ProductID ( void ) const { return m_productId ; } <nl> + unsigned int Index ( void ) const { return m_index ; } <nl> <nl> - / / Derived property : VID and PID are 0x0000 if unknown <nl> - bool IsVidPidKnown ( void ) const { return m_vendorId ! = 0 | | m_productId ! = 0 ; } <nl> + / / Derived property : VID and PID are 0x0000 if unknown <nl> + bool IsVidPidKnown ( void ) const { return m_vendorId ! = 0 | | m_productId ! = 0 ; } <nl> <nl> - void SetType ( PERIPHERAL_TYPE type ) { m_type = type ; } <nl> - void SetName ( const std : : string & strName ) { m_strName = strName ; } <nl> - void SetVendorID ( uint16_t vendorId ) { m_vendorId = vendorId ; } <nl> - void SetProductID ( uint16_t productId ) { m_productId = productId ; } <nl> - void SetIndex ( unsigned int index ) { m_index = index ; } <nl> + void SetType ( PERIPHERAL_TYPE type ) { m_type = type ; } <nl> + void SetName ( const std : : string & strName ) { m_strName = strName ; } <nl> + void SetVendorID ( uint16_t vendorId ) { m_vendorId = vendorId ; } <nl> + void SetProductID ( uint16_t productId ) { m_productId = productId ; } <nl> + void SetIndex ( unsigned int index ) { m_index = index ; } <nl> <nl> - void ToStruct ( PERIPHERAL_INFO & info ) const <nl> - { <nl> - info . type = m_type ; <nl> - info . name = new char [ m_strName . size ( ) + 1 ] ; <nl> - info . vendor_id = m_vendorId ; <nl> - info . product_id = m_productId ; <nl> - info . index = m_index ; <nl> + void ToStruct ( PERIPHERAL_INFO & info ) const <nl> + { <nl> + info . type = m_type ; <nl> + info . name = new char [ m_strName . size ( ) + 1 ] ; <nl> + info . vendor_id = m_vendorId ; <nl> + info . product_id = m_productId ; <nl> + info . index = m_index ; <nl> <nl> - std : : strcpy ( info . name , m_strName . c_str ( ) ) ; <nl> - } <nl> + std : : strcpy ( info . name , m_strName . c_str ( ) ) ; <nl> + } <nl> <nl> - static void FreeStruct ( PERIPHERAL_INFO & info ) <nl> - { <nl> - PERIPHERAL_SAFE_DELETE_ARRAY ( info . name ) ; <nl> - } <nl> + static void FreeStruct ( PERIPHERAL_INFO & info ) { PERIPHERAL_SAFE_DELETE_ARRAY ( info . name ) ; } <nl> <nl> - private : <nl> - PERIPHERAL_TYPE m_type ; <nl> - std : : string m_strName ; <nl> - uint16_t m_vendorId = 0 ; <nl> - uint16_t m_productId = 0 ; <nl> - unsigned int m_index = 0 ; <nl> - } ; <nl> + private : <nl> + PERIPHERAL_TYPE m_type ; <nl> + std : : string m_strName ; <nl> + uint16_t m_vendorId = 0 ; <nl> + uint16_t m_productId = 0 ; <nl> + unsigned int m_index = 0 ; <nl> + } ; <nl> <nl> - typedef PeripheralVector < Peripheral , PERIPHERAL_INFO > Peripherals ; <nl> + typedef PeripheralVector < Peripheral , PERIPHERAL_INFO > Peripherals ; <nl> <nl> - / * ! <nl> - * ADDON : : PeripheralEvent <nl> - * <nl> - * Wrapper class for peripheral events . <nl> - * / <nl> - class PeripheralEvent <nl> - { <nl> - public : <nl> - PeripheralEvent ( ) = default ; <nl> + / * ! <nl> + * ADDON : : PeripheralEvent <nl> + * <nl> + * Wrapper class for peripheral events . <nl> + * / <nl> + class PeripheralEvent <nl> + { <nl> + public : <nl> + PeripheralEvent ( ) = default ; <nl> <nl> - PeripheralEvent ( unsigned int peripheralIndex , unsigned int buttonIndex , JOYSTICK_STATE_BUTTON state ) : <nl> - m_type ( PERIPHERAL_EVENT_TYPE_DRIVER_BUTTON ) , <nl> + PeripheralEvent ( unsigned int peripheralIndex , <nl> + unsigned int buttonIndex , <nl> + JOYSTICK_STATE_BUTTON state ) <nl> + : m_type ( PERIPHERAL_EVENT_TYPE_DRIVER_BUTTON ) , <nl> m_peripheralIndex ( peripheralIndex ) , <nl> m_driverIndex ( buttonIndex ) , <nl> m_buttonState ( state ) <nl> - { <nl> - } <nl> + { <nl> + } <nl> <nl> - PeripheralEvent ( unsigned int peripheralIndex , unsigned int hatIndex , JOYSTICK_STATE_HAT state ) : <nl> - m_type ( PERIPHERAL_EVENT_TYPE_DRIVER_HAT ) , <nl> + PeripheralEvent ( unsigned int peripheralIndex , unsigned int hatIndex , JOYSTICK_STATE_HAT state ) <nl> + : m_type ( PERIPHERAL_EVENT_TYPE_DRIVER_HAT ) , <nl> m_peripheralIndex ( peripheralIndex ) , <nl> m_driverIndex ( hatIndex ) , <nl> m_hatState ( state ) <nl> - { <nl> - } <nl> + { <nl> + } <nl> <nl> - PeripheralEvent ( unsigned int peripheralIndex , unsigned int axisIndex , JOYSTICK_STATE_AXIS state ) : <nl> - m_type ( PERIPHERAL_EVENT_TYPE_DRIVER_AXIS ) , <nl> + PeripheralEvent ( unsigned int peripheralIndex , unsigned int axisIndex , JOYSTICK_STATE_AXIS state ) <nl> + : m_type ( PERIPHERAL_EVENT_TYPE_DRIVER_AXIS ) , <nl> m_peripheralIndex ( peripheralIndex ) , <nl> m_driverIndex ( axisIndex ) , <nl> m_axisState ( state ) <nl> - { <nl> - } <nl> + { <nl> + } <nl> <nl> - explicit PeripheralEvent ( const PERIPHERAL_EVENT & event ) : <nl> - m_type ( event . type ) , <nl> + explicit PeripheralEvent ( const PERIPHERAL_EVENT & event ) <nl> + : m_type ( event . type ) , <nl> m_peripheralIndex ( event . peripheral_index ) , <nl> m_driverIndex ( event . driver_index ) , <nl> m_buttonState ( event . driver_button_state ) , <nl> m_hatState ( event . driver_hat_state ) , <nl> m_axisState ( event . driver_axis_state ) , <nl> m_motorState ( event . motor_state ) <nl> - { <nl> - } <nl> - <nl> - PERIPHERAL_EVENT_TYPE Type ( void ) const { return m_type ; } <nl> - unsigned int PeripheralIndex ( void ) const { return m_peripheralIndex ; } <nl> - unsigned int DriverIndex ( void ) const { return m_driverIndex ; } <nl> - JOYSTICK_STATE_BUTTON ButtonState ( void ) const { return m_buttonState ; } <nl> - JOYSTICK_STATE_HAT HatState ( void ) const { return m_hatState ; } <nl> - JOYSTICK_STATE_AXIS AxisState ( void ) const { return m_axisState ; } <nl> - JOYSTICK_STATE_MOTOR MotorState ( void ) const { return m_motorState ; } <nl> - <nl> - void SetType ( PERIPHERAL_EVENT_TYPE type ) { m_type = type ; } <nl> - void SetPeripheralIndex ( unsigned int index ) { m_peripheralIndex = index ; } <nl> - void SetDriverIndex ( unsigned int index ) { m_driverIndex = index ; } <nl> - void SetButtonState ( JOYSTICK_STATE_BUTTON state ) { m_buttonState = state ; } <nl> - void SetHatState ( JOYSTICK_STATE_HAT state ) { m_hatState = state ; } <nl> - void SetAxisState ( JOYSTICK_STATE_AXIS state ) { m_axisState = state ; } <nl> - void SetMotorState ( JOYSTICK_STATE_MOTOR state ) { m_motorState = state ; } <nl> - <nl> - void ToStruct ( PERIPHERAL_EVENT & event ) const <nl> - { <nl> - event . type = m_type ; <nl> - event . peripheral_index = m_peripheralIndex ; <nl> - event . driver_index = m_driverIndex ; <nl> - event . driver_button_state = m_buttonState ; <nl> - event . driver_hat_state = m_hatState ; <nl> - event . driver_axis_state = m_axisState ; <nl> - event . motor_state = m_motorState ; <nl> - } <nl> - <nl> - static void FreeStruct ( PERIPHERAL_EVENT & event ) <nl> - { <nl> - ( void ) event ; <nl> - } <nl> - <nl> - private : <nl> - PERIPHERAL_EVENT_TYPE m_type = PERIPHERAL_EVENT_TYPE_NONE ; <nl> - unsigned int m_peripheralIndex = 0 ; <nl> - unsigned int m_driverIndex = 0 ; <nl> - JOYSTICK_STATE_BUTTON m_buttonState = JOYSTICK_STATE_BUTTON_UNPRESSED ; <nl> - JOYSTICK_STATE_HAT m_hatState = JOYSTICK_STATE_HAT_UNPRESSED ; <nl> - JOYSTICK_STATE_AXIS m_axisState = 0 . 0f ; <nl> - JOYSTICK_STATE_MOTOR m_motorState = 0 . 0f ; <nl> - } ; <nl> - <nl> - typedef PeripheralVector < PeripheralEvent , PERIPHERAL_EVENT > PeripheralEvents ; <nl> - <nl> - / * ! <nl> - * kodi : : addon : : Joystick <nl> - * <nl> - * Wrapper class providing additional joystick information not provided by <nl> - * ADDON : : Peripheral . <nl> - * / <nl> - class Joystick : public Peripheral <nl> { <nl> - public : <nl> - Joystick ( const std : : string & provider = " " , const std : : string & strName = " " ) : <nl> - Peripheral ( PERIPHERAL_TYPE_JOYSTICK , strName ) , <nl> + } <nl> + <nl> + PERIPHERAL_EVENT_TYPE Type ( void ) const { return m_type ; } <nl> + unsigned int PeripheralIndex ( void ) const { return m_peripheralIndex ; } <nl> + unsigned int DriverIndex ( void ) const { return m_driverIndex ; } <nl> + JOYSTICK_STATE_BUTTON ButtonState ( void ) const { return m_buttonState ; } <nl> + JOYSTICK_STATE_HAT HatState ( void ) const { return m_hatState ; } <nl> + JOYSTICK_STATE_AXIS AxisState ( void ) const { return m_axisState ; } <nl> + JOYSTICK_STATE_MOTOR MotorState ( void ) const { return m_motorState ; } <nl> + <nl> + void SetType ( PERIPHERAL_EVENT_TYPE type ) { m_type = type ; } <nl> + void SetPeripheralIndex ( unsigned int index ) { m_peripheralIndex = index ; } <nl> + void SetDriverIndex ( unsigned int index ) { m_driverIndex = index ; } <nl> + void SetButtonState ( JOYSTICK_STATE_BUTTON state ) { m_buttonState = state ; } <nl> + void SetHatState ( JOYSTICK_STATE_HAT state ) { m_hatState = state ; } <nl> + void SetAxisState ( JOYSTICK_STATE_AXIS state ) { m_axisState = state ; } <nl> + void SetMotorState ( JOYSTICK_STATE_MOTOR state ) { m_motorState = state ; } <nl> + <nl> + void ToStruct ( PERIPHERAL_EVENT & event ) const <nl> + { <nl> + event . type = m_type ; <nl> + event . peripheral_index = m_peripheralIndex ; <nl> + event . driver_index = m_driverIndex ; <nl> + event . driver_button_state = m_buttonState ; <nl> + event . driver_hat_state = m_hatState ; <nl> + event . driver_axis_state = m_axisState ; <nl> + event . motor_state = m_motorState ; <nl> + } <nl> + <nl> + static void FreeStruct ( PERIPHERAL_EVENT & event ) { ( void ) event ; } <nl> + <nl> + private : <nl> + PERIPHERAL_EVENT_TYPE m_type = PERIPHERAL_EVENT_TYPE_NONE ; <nl> + unsigned int m_peripheralIndex = 0 ; <nl> + unsigned int m_driverIndex = 0 ; <nl> + JOYSTICK_STATE_BUTTON m_buttonState = JOYSTICK_STATE_BUTTON_UNPRESSED ; <nl> + JOYSTICK_STATE_HAT m_hatState = JOYSTICK_STATE_HAT_UNPRESSED ; <nl> + JOYSTICK_STATE_AXIS m_axisState = 0 . 0f ; <nl> + JOYSTICK_STATE_MOTOR m_motorState = 0 . 0f ; <nl> + } ; <nl> + <nl> + typedef PeripheralVector < PeripheralEvent , PERIPHERAL_EVENT > PeripheralEvents ; <nl> + <nl> + / * ! <nl> + * kodi : : addon : : Joystick <nl> + * <nl> + * Wrapper class providing additional joystick information not provided by <nl> + * ADDON : : Peripheral . <nl> + * / <nl> + class Joystick : public Peripheral <nl> + { <nl> + public : <nl> + Joystick ( const std : : string & provider = " " , const std : : string & strName = " " ) <nl> + : Peripheral ( PERIPHERAL_TYPE_JOYSTICK , strName ) , <nl> m_provider ( provider ) , <nl> m_requestedPort ( NO_PORT_REQUESTED ) <nl> - { <nl> - } <nl> + { <nl> + } <nl> <nl> - Joystick ( const Joystick & other ) <nl> - { <nl> - * this = other ; <nl> - } <nl> + Joystick ( const Joystick & other ) { * this = other ; } <nl> <nl> - explicit Joystick ( const JOYSTICK_INFO & info ) : <nl> - Peripheral ( info . peripheral ) , <nl> + explicit Joystick ( const JOYSTICK_INFO & info ) <nl> + : Peripheral ( info . peripheral ) , <nl> m_provider ( info . provider ? info . provider : " " ) , <nl> m_requestedPort ( info . requested_port ) , <nl> m_buttonCount ( info . button_count ) , <nl> namespace addon <nl> m_axisCount ( info . axis_count ) , <nl> m_motorCount ( info . motor_count ) , <nl> m_supportsPowerOff ( info . supports_poweroff ) <nl> - { <nl> - } <nl> + { <nl> + } <nl> <nl> - ~ Joystick ( void ) override = default ; <nl> + ~ Joystick ( void ) override = default ; <nl> <nl> - Joystick & operator = ( const Joystick & rhs ) <nl> - { <nl> - if ( this ! = & rhs ) <nl> - { <nl> - Peripheral : : operator = ( rhs ) ; <nl> - <nl> - m_provider = rhs . m_provider ; <nl> - m_requestedPort = rhs . m_requestedPort ; <nl> - m_buttonCount = rhs . m_buttonCount ; <nl> - m_hatCount = rhs . m_hatCount ; <nl> - m_axisCount = rhs . m_axisCount ; <nl> - m_motorCount = rhs . m_motorCount ; <nl> - m_supportsPowerOff = rhs . m_supportsPowerOff ; <nl> - } <nl> - return * this ; <nl> - } <nl> - <nl> - const std : : string & Provider ( void ) const { return m_provider ; } <nl> - int RequestedPort ( void ) const { return m_requestedPort ; } <nl> - unsigned int ButtonCount ( void ) const { return m_buttonCount ; } <nl> - unsigned int HatCount ( void ) const { return m_hatCount ; } <nl> - unsigned int AxisCount ( void ) const { return m_axisCount ; } <nl> - unsigned int MotorCount ( void ) const { return m_motorCount ; } <nl> - bool SupportsPowerOff ( void ) const { return m_supportsPowerOff ; } <nl> - <nl> - void SetProvider ( const std : : string & provider ) { m_provider = provider ; } <nl> - void SetRequestedPort ( int requestedPort ) { m_requestedPort = requestedPort ; } <nl> - void SetButtonCount ( unsigned int buttonCount ) { m_buttonCount = buttonCount ; } <nl> - void SetHatCount ( unsigned int hatCount ) { m_hatCount = hatCount ; } <nl> - void SetAxisCount ( unsigned int axisCount ) { m_axisCount = axisCount ; } <nl> - void SetMotorCount ( unsigned int motorCount ) { m_motorCount = motorCount ; } <nl> - void SetSupportsPowerOff ( bool supportsPowerOff ) { m_supportsPowerOff = supportsPowerOff ; } <nl> - <nl> - void ToStruct ( JOYSTICK_INFO & info ) const <nl> - { <nl> - Peripheral : : ToStruct ( info . peripheral ) ; <nl> + Joystick & operator = ( const Joystick & rhs ) <nl> + { <nl> + if ( this ! = & rhs ) <nl> + { <nl> + Peripheral : : operator = ( rhs ) ; <nl> + <nl> + m_provider = rhs . m_provider ; <nl> + m_requestedPort = rhs . m_requestedPort ; <nl> + m_buttonCount = rhs . m_buttonCount ; <nl> + m_hatCount = rhs . m_hatCount ; <nl> + m_axisCount = rhs . m_axisCount ; <nl> + m_motorCount = rhs . m_motorCount ; <nl> + m_supportsPowerOff = rhs . m_supportsPowerOff ; <nl> + } <nl> + return * this ; <nl> + } <nl> + <nl> + const std : : string & Provider ( void ) const { return m_provider ; } <nl> + int RequestedPort ( void ) const { return m_requestedPort ; } <nl> + unsigned int ButtonCount ( void ) const { return m_buttonCount ; } <nl> + unsigned int HatCount ( void ) const { return m_hatCount ; } <nl> + unsigned int AxisCount ( void ) const { return m_axisCount ; } <nl> + unsigned int MotorCount ( void ) const { return m_motorCount ; } <nl> + bool SupportsPowerOff ( void ) const { return m_supportsPowerOff ; } <nl> + <nl> + void SetProvider ( const std : : string & provider ) { m_provider = provider ; } <nl> + void SetRequestedPort ( int requestedPort ) { m_requestedPort = requestedPort ; } <nl> + void SetButtonCount ( unsigned int buttonCount ) { m_buttonCount = buttonCount ; } <nl> + void SetHatCount ( unsigned int hatCount ) { m_hatCount = hatCount ; } <nl> + void SetAxisCount ( unsigned int axisCount ) { m_axisCount = axisCount ; } <nl> + void SetMotorCount ( unsigned int motorCount ) { m_motorCount = motorCount ; } <nl> + void SetSupportsPowerOff ( bool supportsPowerOff ) { m_supportsPowerOff = supportsPowerOff ; } <nl> + <nl> + void ToStruct ( JOYSTICK_INFO & info ) const <nl> + { <nl> + Peripheral : : ToStruct ( info . peripheral ) ; <nl> <nl> - info . provider = new char [ m_provider . size ( ) + 1 ] ; <nl> - info . requested_port = m_requestedPort ; <nl> - info . button_count = m_buttonCount ; <nl> - info . hat_count = m_hatCount ; <nl> - info . axis_count = m_axisCount ; <nl> - info . motor_count = m_motorCount ; <nl> - info . supports_poweroff = m_supportsPowerOff ; <nl> + info . provider = new char [ m_provider . size ( ) + 1 ] ; <nl> + info . requested_port = m_requestedPort ; <nl> + info . button_count = m_buttonCount ; <nl> + info . hat_count = m_hatCount ; <nl> + info . axis_count = m_axisCount ; <nl> + info . motor_count = m_motorCount ; <nl> + info . supports_poweroff = m_supportsPowerOff ; <nl> <nl> - std : : strcpy ( info . provider , m_provider . c_str ( ) ) ; <nl> - } <nl> + std : : strcpy ( info . provider , m_provider . c_str ( ) ) ; <nl> + } <nl> <nl> - static void FreeStruct ( JOYSTICK_INFO & info ) <nl> - { <nl> - Peripheral : : FreeStruct ( info . peripheral ) ; <nl> + static void FreeStruct ( JOYSTICK_INFO & info ) <nl> + { <nl> + Peripheral : : FreeStruct ( info . peripheral ) ; <nl> <nl> - PERIPHERAL_SAFE_DELETE_ARRAY ( info . provider ) ; <nl> - } <nl> + PERIPHERAL_SAFE_DELETE_ARRAY ( info . provider ) ; <nl> + } <nl> <nl> - private : <nl> - std : : string m_provider ; <nl> - int m_requestedPort ; <nl> - unsigned int m_buttonCount = 0 ; <nl> - unsigned int m_hatCount = 0 ; <nl> - unsigned int m_axisCount = 0 ; <nl> - unsigned int m_motorCount = 0 ; <nl> - bool m_supportsPowerOff = false ; <nl> - } ; <nl> + private : <nl> + std : : string m_provider ; <nl> + int m_requestedPort ; <nl> + unsigned int m_buttonCount = 0 ; <nl> + unsigned int m_hatCount = 0 ; <nl> + unsigned int m_axisCount = 0 ; <nl> + unsigned int m_motorCount = 0 ; <nl> + bool m_supportsPowerOff = false ; <nl> + } ; <nl> <nl> - typedef PeripheralVector < Joystick , JOYSTICK_INFO > Joysticks ; <nl> + typedef PeripheralVector < Joystick , JOYSTICK_INFO > Joysticks ; <nl> <nl> + / * ! <nl> + * ADDON : : DriverPrimitive <nl> + * <nl> + * Base class for joystick driver primitives . A driver primitive can be : <nl> + * <nl> + * 1 ) a button <nl> + * 2 ) a hat direction <nl> + * 3 ) a semiaxis ( either the positive or negative half of an axis ) <nl> + * 4 ) a motor <nl> + * 5 ) a keyboard key <nl> + * 6 ) a mouse button <nl> + * 7 ) a relative pointer direction <nl> + * <nl> + * The type determines the fields in use : <nl> + * <nl> + * Button : <nl> + * - driver index <nl> + * <nl> + * Hat direction : <nl> + * - driver index <nl> + * - hat direction <nl> + * <nl> + * Semiaxis : <nl> + * - driver index <nl> + * - center <nl> + * - semiaxis direction <nl> + * - range <nl> + * <nl> + * Motor : <nl> + * - driver index <nl> + * <nl> + * Key : <nl> + * - key code <nl> + * <nl> + * Mouse button : <nl> + * - driver index <nl> + * <nl> + * Relative pointer direction : <nl> + * - relative pointer direction <nl> + * / <nl> + struct DriverPrimitive <nl> + { <nl> + protected : <nl> / * ! <nl> - * ADDON : : DriverPrimitive <nl> - * <nl> - * Base class for joystick driver primitives . A driver primitive can be : <nl> - * <nl> - * 1 ) a button <nl> - * 2 ) a hat direction <nl> - * 3 ) a semiaxis ( either the positive or negative half of an axis ) <nl> - * 4 ) a motor <nl> - * 5 ) a keyboard key <nl> - * 6 ) a mouse button <nl> - * 7 ) a relative pointer direction <nl> - * <nl> - * The type determines the fields in use : <nl> - * <nl> - * Button : <nl> - * - driver index <nl> - * <nl> - * Hat direction : <nl> - * - driver index <nl> - * - hat direction <nl> - * <nl> - * Semiaxis : <nl> - * - driver index <nl> - * - center <nl> - * - semiaxis direction <nl> - * - range <nl> - * <nl> - * Motor : <nl> - * - driver index <nl> - * <nl> - * Key : <nl> - * - key code <nl> - * <nl> - * Mouse button : <nl> - * - driver index <nl> - * <nl> - * Relative pointer direction : <nl> - * - relative pointer direction <nl> + * \ brief Construct a driver primitive of the specified type <nl> * / <nl> - struct DriverPrimitive <nl> - { <nl> - protected : <nl> - / * ! <nl> - * \ brief Construct a driver primitive of the specified type <nl> - * / <nl> - DriverPrimitive ( JOYSTICK_DRIVER_PRIMITIVE_TYPE type , unsigned int driverIndex ) : <nl> - m_type ( type ) , <nl> - m_driverIndex ( driverIndex ) <nl> - { <nl> - } <nl> + DriverPrimitive ( JOYSTICK_DRIVER_PRIMITIVE_TYPE type , unsigned int driverIndex ) <nl> + : m_type ( type ) , m_driverIndex ( driverIndex ) <nl> + { <nl> + } <nl> <nl> - public : <nl> - / * ! <nl> - * \ brief Construct an invalid driver primitive <nl> - * / <nl> - DriverPrimitive ( void ) = default ; <nl> + public : <nl> + / * ! <nl> + * \ brief Construct an invalid driver primitive <nl> + * / <nl> + DriverPrimitive ( void ) = default ; <nl> <nl> - / * ! <nl> - * \ brief Construct a driver primitive representing a joystick button <nl> - * / <nl> - static DriverPrimitive CreateButton ( unsigned int buttonIndex ) <nl> - { <nl> - return DriverPrimitive ( JOYSTICK_DRIVER_PRIMITIVE_TYPE_BUTTON , buttonIndex ) ; <nl> - } <nl> + / * ! <nl> + * \ brief Construct a driver primitive representing a joystick button <nl> + * / <nl> + static DriverPrimitive CreateButton ( unsigned int buttonIndex ) <nl> + { <nl> + return DriverPrimitive ( JOYSTICK_DRIVER_PRIMITIVE_TYPE_BUTTON , buttonIndex ) ; <nl> + } <nl> <nl> - / * ! <nl> - * \ brief Construct a driver primitive representing one of the four direction <nl> - * arrows on a dpad <nl> - * / <nl> - DriverPrimitive ( unsigned int hatIndex , JOYSTICK_DRIVER_HAT_DIRECTION direction ) : <nl> - m_type ( JOYSTICK_DRIVER_PRIMITIVE_TYPE_HAT_DIRECTION ) , <nl> + / * ! <nl> + * \ brief Construct a driver primitive representing one of the four direction <nl> + * arrows on a dpad <nl> + * / <nl> + DriverPrimitive ( unsigned int hatIndex , JOYSTICK_DRIVER_HAT_DIRECTION direction ) <nl> + : m_type ( JOYSTICK_DRIVER_PRIMITIVE_TYPE_HAT_DIRECTION ) , <nl> m_driverIndex ( hatIndex ) , <nl> m_hatDirection ( direction ) <nl> - { <nl> - } <nl> + { <nl> + } <nl> <nl> - / * ! <nl> - * \ brief Construct a driver primitive representing the positive or negative <nl> - * half of an axis <nl> - * / <nl> - DriverPrimitive ( unsigned int axisIndex , int center , JOYSTICK_DRIVER_SEMIAXIS_DIRECTION direction , unsigned int range ) : <nl> - m_type ( JOYSTICK_DRIVER_PRIMITIVE_TYPE_SEMIAXIS ) , <nl> + / * ! <nl> + * \ brief Construct a driver primitive representing the positive or negative <nl> + * half of an axis <nl> + * / <nl> + DriverPrimitive ( unsigned int axisIndex , <nl> + int center , <nl> + JOYSTICK_DRIVER_SEMIAXIS_DIRECTION direction , <nl> + unsigned int range ) <nl> + : m_type ( JOYSTICK_DRIVER_PRIMITIVE_TYPE_SEMIAXIS ) , <nl> m_driverIndex ( axisIndex ) , <nl> m_center ( center ) , <nl> m_semiAxisDirection ( direction ) , <nl> m_range ( range ) <nl> - { <nl> - } <nl> + { <nl> + } <nl> <nl> - / * ! <nl> - * \ brief Construct a driver primitive representing a motor <nl> - * / <nl> - static DriverPrimitive CreateMotor ( unsigned int motorIndex ) <nl> - { <nl> - return DriverPrimitive ( JOYSTICK_DRIVER_PRIMITIVE_TYPE_MOTOR , motorIndex ) ; <nl> - } <nl> + / * ! <nl> + * \ brief Construct a driver primitive representing a motor <nl> + * / <nl> + static DriverPrimitive CreateMotor ( unsigned int motorIndex ) <nl> + { <nl> + return DriverPrimitive ( JOYSTICK_DRIVER_PRIMITIVE_TYPE_MOTOR , motorIndex ) ; <nl> + } <nl> <nl> - / * ! <nl> - * \ brief Construct a driver primitive representing a key on a keyboard <nl> - * / <nl> - DriverPrimitive ( std : : string keycode ) : <nl> - m_type ( JOYSTICK_DRIVER_PRIMITIVE_TYPE_KEY ) , <nl> - m_keycode ( std : : move ( keycode ) ) <nl> - { <nl> - } <nl> + / * ! <nl> + * \ brief Construct a driver primitive representing a key on a keyboard <nl> + * / <nl> + DriverPrimitive ( std : : string keycode ) <nl> + : m_type ( JOYSTICK_DRIVER_PRIMITIVE_TYPE_KEY ) , m_keycode ( std : : move ( keycode ) ) <nl> + { <nl> + } <nl> <nl> - / * ! <nl> - * \ brief Construct a driver primitive representing a mouse button <nl> - * / <nl> - static DriverPrimitive CreateMouseButton ( JOYSTICK_DRIVER_MOUSE_INDEX buttonIndex ) <nl> - { <nl> - return DriverPrimitive ( JOYSTICK_DRIVER_PRIMITIVE_TYPE_MOUSE_BUTTON , static_cast < unsigned int > ( buttonIndex ) ) ; <nl> - } <nl> + / * ! <nl> + * \ brief Construct a driver primitive representing a mouse button <nl> + * / <nl> + static DriverPrimitive CreateMouseButton ( JOYSTICK_DRIVER_MOUSE_INDEX buttonIndex ) <nl> + { <nl> + return DriverPrimitive ( JOYSTICK_DRIVER_PRIMITIVE_TYPE_MOUSE_BUTTON , <nl> + static_cast < unsigned int > ( buttonIndex ) ) ; <nl> + } <nl> <nl> - / * ! <nl> - * \ brief Construct a driver primitive representing one of the four <nl> - * direction in which a relative pointer can move <nl> - * / <nl> - DriverPrimitive ( JOYSTICK_DRIVER_RELPOINTER_DIRECTION direction ) : <nl> - m_type ( JOYSTICK_DRIVER_PRIMITIVE_TYPE_RELPOINTER_DIRECTION ) , <nl> - m_relPointerDirection ( direction ) <nl> - { <nl> - } <nl> + / * ! <nl> + * \ brief Construct a driver primitive representing one of the four <nl> + * direction in which a relative pointer can move <nl> + * / <nl> + DriverPrimitive ( JOYSTICK_DRIVER_RELPOINTER_DIRECTION direction ) <nl> + : m_type ( JOYSTICK_DRIVER_PRIMITIVE_TYPE_RELPOINTER_DIRECTION ) , m_relPointerDirection ( direction ) <nl> + { <nl> + } <nl> <nl> - explicit DriverPrimitive ( const JOYSTICK_DRIVER_PRIMITIVE & primitive ) : <nl> - m_type ( primitive . type ) <nl> + explicit DriverPrimitive ( const JOYSTICK_DRIVER_PRIMITIVE & primitive ) : m_type ( primitive . type ) <nl> + { <nl> + switch ( m_type ) <nl> { <nl> - switch ( m_type ) <nl> + case JOYSTICK_DRIVER_PRIMITIVE_TYPE_BUTTON : <nl> { <nl> - case JOYSTICK_DRIVER_PRIMITIVE_TYPE_BUTTON : <nl> - { <nl> - m_driverIndex = primitive . button . index ; <nl> - break ; <nl> - } <nl> - case JOYSTICK_DRIVER_PRIMITIVE_TYPE_HAT_DIRECTION : <nl> - { <nl> - m_driverIndex = primitive . hat . index ; <nl> - m_hatDirection = primitive . hat . direction ; <nl> - break ; <nl> - } <nl> - case JOYSTICK_DRIVER_PRIMITIVE_TYPE_SEMIAXIS : <nl> - { <nl> - m_driverIndex = primitive . semiaxis . index ; <nl> - m_center = primitive . semiaxis . center ; <nl> - m_semiAxisDirection = primitive . semiaxis . direction ; <nl> - m_range = primitive . semiaxis . range ; <nl> - break ; <nl> - } <nl> - case JOYSTICK_DRIVER_PRIMITIVE_TYPE_MOTOR : <nl> - { <nl> - m_driverIndex = primitive . motor . index ; <nl> - break ; <nl> - } <nl> - case JOYSTICK_DRIVER_PRIMITIVE_TYPE_KEY : <nl> - { <nl> - m_keycode = primitive . key . keycode ; <nl> - break ; <nl> - } <nl> - case JOYSTICK_DRIVER_PRIMITIVE_TYPE_MOUSE_BUTTON : <nl> - { <nl> - m_driverIndex = primitive . mouse . button ; <nl> - break ; <nl> - } <nl> - case JOYSTICK_DRIVER_PRIMITIVE_TYPE_RELPOINTER_DIRECTION : <nl> - { <nl> - m_relPointerDirection = primitive . relpointer . direction ; <nl> - break ; <nl> - } <nl> - default : <nl> - break ; <nl> + m_driverIndex = primitive . button . index ; <nl> + break ; <nl> } <nl> - } <nl> - <nl> - JOYSTICK_DRIVER_PRIMITIVE_TYPE Type ( void ) const { return m_type ; } <nl> - unsigned int DriverIndex ( void ) const { return m_driverIndex ; } <nl> - JOYSTICK_DRIVER_HAT_DIRECTION HatDirection ( void ) const { return m_hatDirection ; } <nl> - int Center ( void ) const { return m_center ; } <nl> - JOYSTICK_DRIVER_SEMIAXIS_DIRECTION SemiAxisDirection ( void ) const { return m_semiAxisDirection ; } <nl> - unsigned int Range ( void ) const { return m_range ; } <nl> - const std : : string & Keycode ( void ) const { return m_keycode ; } <nl> - JOYSTICK_DRIVER_MOUSE_INDEX MouseIndex ( void ) const { return static_cast < JOYSTICK_DRIVER_MOUSE_INDEX > ( m_driverIndex ) ; } <nl> - JOYSTICK_DRIVER_RELPOINTER_DIRECTION RelPointerDirection ( void ) const { return m_relPointerDirection ; } <nl> - <nl> - bool operator = = ( const DriverPrimitive & other ) const <nl> - { <nl> - if ( m_type = = other . m_type ) <nl> + case JOYSTICK_DRIVER_PRIMITIVE_TYPE_HAT_DIRECTION : <nl> { <nl> - switch ( m_type ) <nl> - { <nl> - case JOYSTICK_DRIVER_PRIMITIVE_TYPE_BUTTON : <nl> - { <nl> - return m_driverIndex = = other . m_driverIndex ; <nl> - } <nl> - case JOYSTICK_DRIVER_PRIMITIVE_TYPE_HAT_DIRECTION : <nl> - { <nl> - return m_driverIndex = = other . m_driverIndex & & <nl> - m_hatDirection = = other . m_hatDirection ; <nl> - } <nl> - case JOYSTICK_DRIVER_PRIMITIVE_TYPE_SEMIAXIS : <nl> - { <nl> - return m_driverIndex = = other . m_driverIndex & & <nl> - m_center = = other . m_center & & <nl> - m_semiAxisDirection = = other . m_semiAxisDirection & & <nl> - m_range = = other . m_range ; <nl> - } <nl> - case JOYSTICK_DRIVER_PRIMITIVE_TYPE_KEY : <nl> - { <nl> - return m_keycode = = other . m_keycode ; <nl> - } <nl> - case JOYSTICK_DRIVER_PRIMITIVE_TYPE_MOTOR : <nl> - { <nl> - return m_driverIndex = = other . m_driverIndex ; <nl> - } <nl> - case JOYSTICK_DRIVER_PRIMITIVE_TYPE_MOUSE_BUTTON : <nl> - { <nl> - return m_driverIndex = = other . m_driverIndex ; <nl> - } <nl> - case JOYSTICK_DRIVER_PRIMITIVE_TYPE_RELPOINTER_DIRECTION : <nl> - { <nl> - return m_relPointerDirection = = other . m_relPointerDirection ; <nl> - } <nl> - default : <nl> - break ; <nl> - } <nl> + m_driverIndex = primitive . hat . index ; <nl> + m_hatDirection = primitive . hat . direction ; <nl> + break ; <nl> } <nl> - return false ; <nl> - } <nl> + case JOYSTICK_DRIVER_PRIMITIVE_TYPE_SEMIAXIS : <nl> + { <nl> + m_driverIndex = primitive . semiaxis . index ; <nl> + m_center = primitive . semiaxis . center ; <nl> + m_semiAxisDirection = primitive . semiaxis . direction ; <nl> + m_range = primitive . semiaxis . range ; <nl> + break ; <nl> + } <nl> + case JOYSTICK_DRIVER_PRIMITIVE_TYPE_MOTOR : <nl> + { <nl> + m_driverIndex = primitive . motor . index ; <nl> + break ; <nl> + } <nl> + case JOYSTICK_DRIVER_PRIMITIVE_TYPE_KEY : <nl> + { <nl> + m_keycode = primitive . key . keycode ; <nl> + break ; <nl> + } <nl> + case JOYSTICK_DRIVER_PRIMITIVE_TYPE_MOUSE_BUTTON : <nl> + { <nl> + m_driverIndex = primitive . mouse . button ; <nl> + break ; <nl> + } <nl> + case JOYSTICK_DRIVER_PRIMITIVE_TYPE_RELPOINTER_DIRECTION : <nl> + { <nl> + m_relPointerDirection = primitive . relpointer . direction ; <nl> + break ; <nl> + } <nl> + default : <nl> + break ; <nl> + } <nl> + } <nl> + <nl> + JOYSTICK_DRIVER_PRIMITIVE_TYPE Type ( void ) const { return m_type ; } <nl> + unsigned int DriverIndex ( void ) const { return m_driverIndex ; } <nl> + JOYSTICK_DRIVER_HAT_DIRECTION HatDirection ( void ) const { return m_hatDirection ; } <nl> + int Center ( void ) const { return m_center ; } <nl> + JOYSTICK_DRIVER_SEMIAXIS_DIRECTION SemiAxisDirection ( void ) const { return m_semiAxisDirection ; } <nl> + unsigned int Range ( void ) const { return m_range ; } <nl> + const std : : string & Keycode ( void ) const { return m_keycode ; } <nl> + JOYSTICK_DRIVER_MOUSE_INDEX MouseIndex ( void ) const <nl> + { <nl> + return static_cast < JOYSTICK_DRIVER_MOUSE_INDEX > ( m_driverIndex ) ; <nl> + } <nl> + JOYSTICK_DRIVER_RELPOINTER_DIRECTION RelPointerDirection ( void ) const <nl> + { <nl> + return m_relPointerDirection ; <nl> + } <nl> <nl> - void ToStruct ( JOYSTICK_DRIVER_PRIMITIVE & driver_primitive ) const <nl> + bool operator = = ( const DriverPrimitive & other ) const <nl> + { <nl> + if ( m_type = = other . m_type ) <nl> { <nl> - driver_primitive . type = m_type ; <nl> switch ( m_type ) <nl> { <nl> case JOYSTICK_DRIVER_PRIMITIVE_TYPE_BUTTON : <nl> { <nl> - driver_primitive . button . index = m_driverIndex ; <nl> - break ; <nl> + return m_driverIndex = = other . m_driverIndex ; <nl> } <nl> case JOYSTICK_DRIVER_PRIMITIVE_TYPE_HAT_DIRECTION : <nl> { <nl> - driver_primitive . hat . index = m_driverIndex ; <nl> - driver_primitive . hat . direction = m_hatDirection ; <nl> - break ; <nl> + return m_driverIndex = = other . m_driverIndex & & m_hatDirection = = other . m_hatDirection ; <nl> } <nl> case JOYSTICK_DRIVER_PRIMITIVE_TYPE_SEMIAXIS : <nl> { <nl> - driver_primitive . semiaxis . index = m_driverIndex ; <nl> - driver_primitive . semiaxis . center = m_center ; <nl> - driver_primitive . semiaxis . direction = m_semiAxisDirection ; <nl> - driver_primitive . semiaxis . range = m_range ; <nl> - break ; <nl> + return m_driverIndex = = other . m_driverIndex & & m_center = = other . m_center & & <nl> + m_semiAxisDirection = = other . m_semiAxisDirection & & m_range = = other . m_range ; <nl> } <nl> - case JOYSTICK_DRIVER_PRIMITIVE_TYPE_MOTOR : <nl> + case JOYSTICK_DRIVER_PRIMITIVE_TYPE_KEY : <nl> { <nl> - driver_primitive . motor . index = m_driverIndex ; <nl> - break ; <nl> + return m_keycode = = other . m_keycode ; <nl> } <nl> - case JOYSTICK_DRIVER_PRIMITIVE_TYPE_KEY : <nl> + case JOYSTICK_DRIVER_PRIMITIVE_TYPE_MOTOR : <nl> { <nl> - const size_t size = sizeof ( driver_primitive . key . keycode ) ; <nl> - std : : strncpy ( driver_primitive . key . keycode , m_keycode . c_str ( ) , size - 1 ) ; <nl> - driver_primitive . key . keycode [ size - 1 ] = ' \ 0 ' ; <nl> - break ; <nl> + return m_driverIndex = = other . m_driverIndex ; <nl> } <nl> case JOYSTICK_DRIVER_PRIMITIVE_TYPE_MOUSE_BUTTON : <nl> { <nl> - driver_primitive . mouse . button = static_cast < JOYSTICK_DRIVER_MOUSE_INDEX > ( m_driverIndex ) ; <nl> - break ; <nl> + return m_driverIndex = = other . m_driverIndex ; <nl> } <nl> case JOYSTICK_DRIVER_PRIMITIVE_TYPE_RELPOINTER_DIRECTION : <nl> { <nl> - driver_primitive . relpointer . direction = m_relPointerDirection ; <nl> - break ; <nl> + return m_relPointerDirection = = other . m_relPointerDirection ; <nl> } <nl> default : <nl> break ; <nl> } <nl> } <nl> + return false ; <nl> + } <nl> <nl> - static void FreeStruct ( JOYSTICK_DRIVER_PRIMITIVE & primitive ) <nl> + void ToStruct ( JOYSTICK_DRIVER_PRIMITIVE & driver_primitive ) const <nl> + { <nl> + driver_primitive . type = m_type ; <nl> + switch ( m_type ) <nl> { <nl> - ( void ) primitive ; <nl> + case JOYSTICK_DRIVER_PRIMITIVE_TYPE_BUTTON : <nl> + { <nl> + driver_primitive . button . index = m_driverIndex ; <nl> + break ; <nl> + } <nl> + case JOYSTICK_DRIVER_PRIMITIVE_TYPE_HAT_DIRECTION : <nl> + { <nl> + driver_primitive . hat . index = m_driverIndex ; <nl> + driver_primitive . hat . direction = m_hatDirection ; <nl> + break ; <nl> + } <nl> + case JOYSTICK_DRIVER_PRIMITIVE_TYPE_SEMIAXIS : <nl> + { <nl> + driver_primitive . semiaxis . index = m_driverIndex ; <nl> + driver_primitive . semiaxis . center = m_center ; <nl> + driver_primitive . semiaxis . direction = m_semiAxisDirection ; <nl> + driver_primitive . semiaxis . range = m_range ; <nl> + break ; <nl> + } <nl> + case JOYSTICK_DRIVER_PRIMITIVE_TYPE_MOTOR : <nl> + { <nl> + driver_primitive . motor . index = m_driverIndex ; <nl> + break ; <nl> + } <nl> + case JOYSTICK_DRIVER_PRIMITIVE_TYPE_KEY : <nl> + { <nl> + const size_t size = sizeof ( driver_primitive . key . keycode ) ; <nl> + std : : strncpy ( driver_primitive . key . keycode , m_keycode . c_str ( ) , size - 1 ) ; <nl> + driver_primitive . key . keycode [ size - 1 ] = ' \ 0 ' ; <nl> + break ; <nl> + } <nl> + case JOYSTICK_DRIVER_PRIMITIVE_TYPE_MOUSE_BUTTON : <nl> + { <nl> + driver_primitive . mouse . button = static_cast < JOYSTICK_DRIVER_MOUSE_INDEX > ( m_driverIndex ) ; <nl> + break ; <nl> + } <nl> + case JOYSTICK_DRIVER_PRIMITIVE_TYPE_RELPOINTER_DIRECTION : <nl> + { <nl> + driver_primitive . relpointer . direction = m_relPointerDirection ; <nl> + break ; <nl> + } <nl> + default : <nl> + break ; <nl> } <nl> + } <nl> <nl> - private : <nl> - JOYSTICK_DRIVER_PRIMITIVE_TYPE m_type = JOYSTICK_DRIVER_PRIMITIVE_TYPE_UNKNOWN ; <nl> - unsigned int m_driverIndex = 0 ; <nl> - JOYSTICK_DRIVER_HAT_DIRECTION m_hatDirection = JOYSTICK_DRIVER_HAT_UNKNOWN ; <nl> - int m_center = 0 ; <nl> - JOYSTICK_DRIVER_SEMIAXIS_DIRECTION m_semiAxisDirection = JOYSTICK_DRIVER_SEMIAXIS_UNKNOWN ; <nl> - unsigned int m_range = 1 ; <nl> - std : : string m_keycode ; <nl> - JOYSTICK_DRIVER_RELPOINTER_DIRECTION m_relPointerDirection = JOYSTICK_DRIVER_RELPOINTER_UNKNOWN ; <nl> - } ; <nl> + static void FreeStruct ( JOYSTICK_DRIVER_PRIMITIVE & primitive ) { ( void ) primitive ; } <nl> <nl> - typedef PeripheralVector < DriverPrimitive , JOYSTICK_DRIVER_PRIMITIVE > DriverPrimitives ; <nl> + private : <nl> + JOYSTICK_DRIVER_PRIMITIVE_TYPE m_type = JOYSTICK_DRIVER_PRIMITIVE_TYPE_UNKNOWN ; <nl> + unsigned int m_driverIndex = 0 ; <nl> + JOYSTICK_DRIVER_HAT_DIRECTION m_hatDirection = JOYSTICK_DRIVER_HAT_UNKNOWN ; <nl> + int m_center = 0 ; <nl> + JOYSTICK_DRIVER_SEMIAXIS_DIRECTION m_semiAxisDirection = JOYSTICK_DRIVER_SEMIAXIS_UNKNOWN ; <nl> + unsigned int m_range = 1 ; <nl> + std : : string m_keycode ; <nl> + JOYSTICK_DRIVER_RELPOINTER_DIRECTION m_relPointerDirection = JOYSTICK_DRIVER_RELPOINTER_UNKNOWN ; <nl> + } ; <nl> <nl> - / * ! <nl> - * kodi : : addon : : JoystickFeature <nl> - * <nl> - * Class for joystick features . A feature can be : <nl> - * <nl> - * 1 ) scalar [ 1 ] <nl> - * 2 ) analog stick <nl> - * 3 ) accelerometer <nl> - * 4 ) motor <nl> - * 5 ) relative pointer [ 2 ] <nl> - * 6 ) absolute pointer <nl> - * 7 ) wheel <nl> - * 8 ) throttle <nl> - * 9 ) keyboard key <nl> - * <nl> - * [ 1 ] All three driver primitives ( buttons , hats and axes ) have a state that <nl> - * can be represented using a single scalar value . For this reason , <nl> - * features that map to a single primitive are called " scalar features " . <nl> - * <nl> - * [ 2 ] Relative pointers are similar to analog sticks , but they use <nl> - * relative distances instead of positions . <nl> - * / <nl> - class JoystickFeature <nl> + typedef PeripheralVector < DriverPrimitive , JOYSTICK_DRIVER_PRIMITIVE > DriverPrimitives ; <nl> + <nl> + / * ! <nl> + * kodi : : addon : : JoystickFeature <nl> + * <nl> + * Class for joystick features . A feature can be : <nl> + * <nl> + * 1 ) scalar [ 1 ] <nl> + * 2 ) analog stick <nl> + * 3 ) accelerometer <nl> + * 4 ) motor <nl> + * 5 ) relative pointer [ 2 ] <nl> + * 6 ) absolute pointer <nl> + * 7 ) wheel <nl> + * 8 ) throttle <nl> + * 9 ) keyboard key <nl> + * <nl> + * [ 1 ] All three driver primitives ( buttons , hats and axes ) have a state that <nl> + * can be represented using a single scalar value . For this reason , <nl> + * features that map to a single primitive are called " scalar features " . <nl> + * <nl> + * [ 2 ] Relative pointers are similar to analog sticks , but they use <nl> + * relative distances instead of positions . <nl> + * / <nl> + class JoystickFeature <nl> + { <nl> + public : <nl> + JoystickFeature ( const std : : string & name = " " , <nl> + JOYSTICK_FEATURE_TYPE type = JOYSTICK_FEATURE_TYPE_UNKNOWN ) <nl> + : m_name ( name ) , m_type ( type ) , m_primitives { } <nl> { <nl> - public : <nl> - JoystickFeature ( const std : : string & name = " " , JOYSTICK_FEATURE_TYPE type = JOYSTICK_FEATURE_TYPE_UNKNOWN ) : <nl> - m_name ( name ) , <nl> - m_type ( type ) , <nl> - m_primitives { } <nl> - { <nl> - } <nl> + } <nl> <nl> - JoystickFeature ( const JoystickFeature & other ) <nl> - { <nl> - * this = other ; <nl> - } <nl> + JoystickFeature ( const JoystickFeature & other ) { * this = other ; } <nl> <nl> - explicit JoystickFeature ( const JOYSTICK_FEATURE & feature ) : <nl> - m_name ( feature . name ? feature . name : " " ) , <nl> - m_type ( feature . type ) <nl> - { <nl> - for ( unsigned int i = 0 ; i < JOYSTICK_PRIMITIVE_MAX ; i + + ) <nl> - m_primitives [ i ] = DriverPrimitive ( feature . primitives [ i ] ) ; <nl> - } <nl> + explicit JoystickFeature ( const JOYSTICK_FEATURE & feature ) <nl> + : m_name ( feature . name ? feature . name : " " ) , m_type ( feature . type ) <nl> + { <nl> + for ( unsigned int i = 0 ; i < JOYSTICK_PRIMITIVE_MAX ; i + + ) <nl> + m_primitives [ i ] = DriverPrimitive ( feature . primitives [ i ] ) ; <nl> + } <nl> <nl> - JoystickFeature & operator = ( const JoystickFeature & rhs ) <nl> + JoystickFeature & operator = ( const JoystickFeature & rhs ) <nl> + { <nl> + if ( this ! = & rhs ) <nl> { <nl> - if ( this ! = & rhs ) <nl> - { <nl> - m_name = rhs . m_name ; <nl> - m_type = rhs . m_type ; <nl> - m_primitives = rhs . m_primitives ; <nl> - } <nl> - return * this ; <nl> + m_name = rhs . m_name ; <nl> + m_type = rhs . m_type ; <nl> + m_primitives = rhs . m_primitives ; <nl> } <nl> + return * this ; <nl> + } <nl> <nl> - bool operator = = ( const JoystickFeature & other ) const <nl> - { <nl> - return m_name = = other . m_name & & <nl> - m_type = = other . m_type & & <nl> - m_primitives = = other . m_primitives ; <nl> - } <nl> + bool operator = = ( const JoystickFeature & other ) const <nl> + { <nl> + return m_name = = other . m_name & & m_type = = other . m_type & & m_primitives = = other . m_primitives ; <nl> + } <nl> <nl> - const std : : string & Name ( void ) const { return m_name ; } <nl> - JOYSTICK_FEATURE_TYPE Type ( void ) const { return m_type ; } <nl> - bool IsValid ( ) const { return m_type ! = JOYSTICK_FEATURE_TYPE_UNKNOWN ; } <nl> + const std : : string & Name ( void ) const { return m_name ; } <nl> + JOYSTICK_FEATURE_TYPE Type ( void ) const { return m_type ; } <nl> + bool IsValid ( ) const { return m_type ! = JOYSTICK_FEATURE_TYPE_UNKNOWN ; } <nl> <nl> - void SetName ( const std : : string & name ) { m_name = name ; } <nl> - void SetType ( JOYSTICK_FEATURE_TYPE type ) { m_type = type ; } <nl> - void SetInvalid ( void ) { m_type = JOYSTICK_FEATURE_TYPE_UNKNOWN ; } <nl> + void SetName ( const std : : string & name ) { m_name = name ; } <nl> + void SetType ( JOYSTICK_FEATURE_TYPE type ) { m_type = type ; } <nl> + void SetInvalid ( void ) { m_type = JOYSTICK_FEATURE_TYPE_UNKNOWN ; } <nl> <nl> - const DriverPrimitive & Primitive ( JOYSTICK_FEATURE_PRIMITIVE which ) const { return m_primitives [ which ] ; } <nl> - void SetPrimitive ( JOYSTICK_FEATURE_PRIMITIVE which , const DriverPrimitive & primitive ) { m_primitives [ which ] = primitive ; } <nl> + const DriverPrimitive & Primitive ( JOYSTICK_FEATURE_PRIMITIVE which ) const <nl> + { <nl> + return m_primitives [ which ] ; <nl> + } <nl> + void SetPrimitive ( JOYSTICK_FEATURE_PRIMITIVE which , const DriverPrimitive & primitive ) <nl> + { <nl> + m_primitives [ which ] = primitive ; <nl> + } <nl> <nl> - std : : array < DriverPrimitive , JOYSTICK_PRIMITIVE_MAX > & Primitives ( ) { return m_primitives ; } <nl> - const std : : array < DriverPrimitive , JOYSTICK_PRIMITIVE_MAX > & Primitives ( ) const { return m_primitives ; } <nl> + std : : array < DriverPrimitive , JOYSTICK_PRIMITIVE_MAX > & Primitives ( ) { return m_primitives ; } <nl> + const std : : array < DriverPrimitive , JOYSTICK_PRIMITIVE_MAX > & Primitives ( ) const <nl> + { <nl> + return m_primitives ; <nl> + } <nl> <nl> - void ToStruct ( JOYSTICK_FEATURE & feature ) const <nl> - { <nl> - feature . name = new char [ m_name . length ( ) + 1 ] ; <nl> - feature . type = m_type ; <nl> - for ( unsigned int i = 0 ; i < JOYSTICK_PRIMITIVE_MAX ; i + + ) <nl> - m_primitives [ i ] . ToStruct ( feature . primitives [ i ] ) ; <nl> + void ToStruct ( JOYSTICK_FEATURE & feature ) const <nl> + { <nl> + feature . name = new char [ m_name . length ( ) + 1 ] ; <nl> + feature . type = m_type ; <nl> + for ( unsigned int i = 0 ; i < JOYSTICK_PRIMITIVE_MAX ; i + + ) <nl> + m_primitives [ i ] . ToStruct ( feature . primitives [ i ] ) ; <nl> <nl> - std : : strcpy ( feature . name , m_name . c_str ( ) ) ; <nl> - } <nl> + std : : strcpy ( feature . name , m_name . c_str ( ) ) ; <nl> + } <nl> <nl> - static void FreeStruct ( JOYSTICK_FEATURE & feature ) <nl> - { <nl> - PERIPHERAL_SAFE_DELETE_ARRAY ( feature . name ) ; <nl> - } <nl> + static void FreeStruct ( JOYSTICK_FEATURE & feature ) { PERIPHERAL_SAFE_DELETE_ARRAY ( feature . name ) ; } <nl> <nl> - private : <nl> - std : : string m_name ; <nl> - JOYSTICK_FEATURE_TYPE m_type ; <nl> - std : : array < DriverPrimitive , JOYSTICK_PRIMITIVE_MAX > m_primitives ; <nl> - } ; <nl> + private : <nl> + std : : string m_name ; <nl> + JOYSTICK_FEATURE_TYPE m_type ; <nl> + std : : array < DriverPrimitive , JOYSTICK_PRIMITIVE_MAX > m_primitives ; <nl> + } ; <nl> <nl> - typedef PeripheralVector < JoystickFeature , JOYSTICK_FEATURE > JoystickFeatures ; <nl> + typedef PeripheralVector < JoystickFeature , JOYSTICK_FEATURE > JoystickFeatures ; <nl> <nl> } / * namespace addon * / <nl> } / * namespace kodi * / <nl>
[ addons ] [ peripheral ] clang clean up on addon headers
xbmc/xbmc
8e7169c1c975b48e7d9df92254f035a8e27a4c99
2020-09-06T16:26:47Z
similarity index 99 % <nl> rename from code / unclassified / minSubarraySizeWithDegree / minSubarraySizeWithDegree . cpp <nl> rename to code / unclassified / minimum_subarray_size_with_degree / minSubarraySizeWithDegree . cpp <nl> mmm a / code / unclassified / minSubarraySizeWithDegree / minSubarraySizeWithDegree . cpp <nl> ppp b / code / unclassified / minimum_subarray_size_with_degree / minSubarraySizeWithDegree . cpp <nl> int main ( ) { <nl> <nl> <nl> return 0 ; <nl> - } <nl> \ No newline at end of file <nl> + } <nl>
fixed location
OpenGenus/cosmos
68976b806a212504bdc0cef7ff3391b933530a07
2017-10-17T23:24:56Z
mmm a / test / sanitizer_suppressions / tsan <nl> ppp b / test / sanitizer_suppressions / tsan <nl> race : zmq : : * <nl> race : bitcoin - qt <nl> <nl> # deadlock ( TODO fix ) <nl> - deadlock : CConnman : : ForNode <nl> - deadlock : CConnman : : GetNodeStats <nl> deadlock : CChainState : : ConnectTip <nl> - deadlock : UpdateTip <nl> - <nl> - # WalletBatch ( unidentified deadlock ) <nl> - deadlock : WalletBatch <nl> <nl> # Intentional deadlock in tests <nl> deadlock : TestPotentialDeadLockDetected <nl>
Merge : Drop some TSan suppressions
bitcoin/bitcoin
98de9eb1aa84e712c3f4add0cf5602444dfa6dfd
2020-12-22T20:55:22Z
mmm a / lib / Sema / TypeCheckProtocol . cpp <nl> ppp b / lib / Sema / TypeCheckProtocol . cpp <nl> ResolveWitnessResult ConformanceChecker : : resolveTypeWitnessViaLookup ( <nl> SmallVector < std : : pair < TypeDecl * , CheckTypeWitnessResult > , 2 > nonViable ; <nl> for ( auto candidate : candidates ) { <nl> / / Skip nested generic types . <nl> - if ( auto * genericDecl = dyn_cast < GenericTypeDecl > ( candidate . Member ) ) <nl> + if ( auto * genericDecl = dyn_cast < GenericTypeDecl > ( candidate . Member ) ) { <nl> + / / If the declaration has generic parameters , it cannot witness an <nl> + / / associated type . <nl> if ( genericDecl - > isGeneric ( ) ) <nl> continue ; <nl> <nl> + / / As a narrow fix for a source compatibility issue with SwiftUI ' s <nl> + / / swiftinterface , allow the conformance if the underlying type of <nl> + / / the typealias is Never . <nl> + / / <nl> + / / FIXME : This should be conditionalized on a new language version . <nl> + bool skipRequirementCheck = false ; <nl> + if ( auto * typeAliasDecl = dyn_cast < TypeAliasDecl > ( candidate . Member ) ) { <nl> + if ( typeAliasDecl - > getUnderlyingType ( ) - > isUninhabited ( ) ) <nl> + skipRequirementCheck = true ; <nl> + } <nl> + <nl> + / / If the type comes from a constrained extension or has a ' where ' <nl> + / / clause , check those requirements now . <nl> + if ( ! skipRequirementCheck & & <nl> + ! TypeChecker : : checkContextualRequirements ( genericDecl , Adoptee , <nl> + SourceLoc ( ) , DC ) ) { <nl> + continue ; <nl> + } <nl> + } <nl> + <nl> / / Skip typealiases with an unbound generic type as their underlying type . <nl> if ( auto * typeAliasDecl = dyn_cast < TypeAliasDecl > ( candidate . Member ) ) <nl> if ( typeAliasDecl - > getDeclaredInterfaceType ( ) - > is < UnboundGenericType > ( ) ) <nl> ResolveWitnessResult ConformanceChecker : : resolveTypeWitnessViaLookup ( <nl> / / If there is a single viable candidate , form a substitution for it . <nl> if ( viable . size ( ) = = 1 ) { <nl> auto interfaceType = viable . front ( ) . MemberType ; <nl> - if ( interfaceType - > hasArchetype ( ) ) <nl> - interfaceType = interfaceType - > mapTypeOutOfContext ( ) ; <nl> recordTypeWitness ( assocType , interfaceType , viable . front ( ) . Member ) ; <nl> return ResolveWitnessResult : : Success ; <nl> } <nl> mmm a / lib / Sema / TypeCheckType . cpp <nl> ppp b / lib / Sema / TypeCheckType . cpp <nl> static bool isPointerToVoid ( ASTContext & Ctx , Type Ty , bool & IsMutable ) { <nl> return BGT - > getGenericArgs ( ) . front ( ) - > isVoid ( ) ; <nl> } <nl> <nl> - static Type checkContextualRequirements ( Type type , <nl> - SourceLoc loc , <nl> - DeclContext * dc ) { <nl> - / / Even if the type is not generic , it might be inside of a generic <nl> - / / context , so we need to check requirements . <nl> - GenericTypeDecl * decl ; <nl> - Type parentTy ; <nl> - if ( auto * aliasTy = dyn_cast < TypeAliasType > ( type . getPointer ( ) ) ) { <nl> - decl = aliasTy - > getDecl ( ) ; <nl> - parentTy = aliasTy - > getParent ( ) ; <nl> - } else if ( auto * nominalTy = type - > getAs < NominalType > ( ) ) { <nl> - decl = nominalTy - > getDecl ( ) ; <nl> - parentTy = nominalTy - > getParent ( ) ; <nl> - } else { <nl> - return type ; <nl> - } <nl> - <nl> + / / / Even if the type is not generic , it might be inside of a generic <nl> + / / / context or have a free - standing ' where ' clause , so we need to <nl> + / / / those check requirements too . <nl> + / / / <nl> + / / / Return true on success . <nl> + bool TypeChecker : : checkContextualRequirements ( GenericTypeDecl * decl , <nl> + Type parentTy , <nl> + SourceLoc loc , <nl> + DeclContext * dc ) { <nl> if ( ! parentTy | | parentTy - > hasUnboundGenericType ( ) | | <nl> parentTy - > hasTypeVariable ( ) ) { <nl> - return type ; <nl> + return true ; <nl> } <nl> <nl> auto & ctx = dc - > getASTContext ( ) ; <nl> static Type checkContextualRequirements ( Type type , <nl> else if ( ext & & ext - > isConstrainedExtension ( ) ) <nl> noteLoc = ext - > getLoc ( ) ; <nl> else <nl> - return type ; <nl> + return true ; <nl> <nl> if ( noteLoc . isInvalid ( ) ) <nl> noteLoc = loc ; <nl> static Type checkContextualRequirements ( Type type , <nl> const auto subMap = parentTy - > getContextSubstitutions ( decl - > getDeclContext ( ) ) ; <nl> const auto genericSig = decl - > getGenericSignature ( ) ; <nl> if ( ! genericSig ) { <nl> - ctx . Diags . diagnose ( loc , diag : : recursive_decl_reference , <nl> - decl - > getDescriptiveKind ( ) , decl - > getName ( ) ) ; <nl> - decl - > diagnose ( diag : : kind_declared_here , DescriptiveDeclKind : : Type ) ; <nl> - return ErrorType : : get ( ctx ) ; <nl> + if ( loc . isValid ( ) ) { <nl> + ctx . Diags . diagnose ( loc , diag : : recursive_decl_reference , <nl> + decl - > getDescriptiveKind ( ) , decl - > getName ( ) ) ; <nl> + decl - > diagnose ( diag : : kind_declared_here , DescriptiveDeclKind : : Type ) ; <nl> + } <nl> + return false ; <nl> } <nl> <nl> const auto result = <nl> TypeChecker : : checkGenericArguments ( <nl> - dc , loc , noteLoc , type , <nl> + dc , loc , noteLoc , <nl> + decl - > getDeclaredInterfaceType ( ) , <nl> genericSig - > getGenericParams ( ) , <nl> genericSig - > getRequirements ( ) , <nl> QueryTypeSubstitutionMap { subMap } ) ; <nl> static Type checkContextualRequirements ( Type type , <nl> switch ( result ) { <nl> case RequirementCheckResult : : Failure : <nl> case RequirementCheckResult : : SubstitutionFailure : <nl> - return ErrorType : : get ( ctx ) ; <nl> + return false ; <nl> case RequirementCheckResult : : Success : <nl> - return type ; <nl> + return true ; <nl> } <nl> llvm_unreachable ( " invalid requirement check type " ) ; <nl> } <nl> static Type applyGenericArguments ( Type type , TypeResolution resolution , <nl> if ( resolution . getStage ( ) = = TypeResolutionStage : : Structural ) <nl> return type ; <nl> <nl> - return checkContextualRequirements ( type , loc , dc ) ; <nl> + GenericTypeDecl * decl ; <nl> + Type parentTy ; <nl> + if ( auto * aliasTy = dyn_cast < TypeAliasType > ( type . getPointer ( ) ) ) { <nl> + decl = aliasTy - > getDecl ( ) ; <nl> + parentTy = aliasTy - > getParent ( ) ; <nl> + } else if ( auto * nominalTy = type - > getAs < NominalType > ( ) ) { <nl> + decl = nominalTy - > getDecl ( ) ; <nl> + parentTy = nominalTy - > getParent ( ) ; <nl> + } else { <nl> + return type ; <nl> + } <nl> + <nl> + if ( TypeChecker : : checkContextualRequirements ( decl , parentTy , loc , dc ) ) <nl> + return type ; <nl> + <nl> + return ErrorType : : get ( resolution . getASTContext ( ) ) ; <nl> } <nl> <nl> if ( type - > hasError ( ) ) { <nl> mmm a / lib / Sema / TypeChecker . h <nl> ppp b / lib / Sema / TypeChecker . h <nl> RequirementCheckResult checkGenericArguments ( <nl> ArrayRef < Requirement > requirements , TypeSubstitutionFn substitutions , <nl> SubstOptions options = None ) ; <nl> <nl> + bool checkContextualRequirements ( GenericTypeDecl * decl , <nl> + Type parentTy , <nl> + SourceLoc loc , <nl> + DeclContext * dc ) ; <nl> + <nl> / / / Add any implicitly - defined constructors required for the given <nl> / / / struct or class . <nl> void addImplicitConstructors ( NominalTypeDecl * typeDecl ) ; <nl> mmm a / test / Constraints / conditionally_defined_types . swift <nl> ppp b / test / Constraints / conditionally_defined_types . swift <nl> let _ = SameType < X > . Decl3 . self <nl> let _ = SameType < X > . Decl4 < X > . self <nl> let _ = SameType < X > . Decl5 < X > . self <nl> <nl> - let _ = SameType < Y > . TypeAlias1 . self / / expected - error { { ' SameType < Y > . TypeAlias1 ' ( aka ' X ' ) requires the types ' Y ' and ' X ' be equivalent } } <nl> - let _ = SameType < Y > . TypeAlias2 . self / / expected - error { { ' SameType < Y > . TypeAlias2 ' ( aka ' Y ' ) requires the types ' Y ' and ' X ' be equivalent } } <nl> + let _ = SameType < Y > . TypeAlias1 . self / / expected - error { { ' SameType < T > . TypeAlias1 ' requires the types ' Y ' and ' X ' be equivalent } } <nl> + let _ = SameType < Y > . TypeAlias2 . self / / expected - error { { ' SameType < T > . TypeAlias2 ' ( aka ' Y ' ) requires the types ' Y ' and ' X ' be equivalent } } <nl> let _ = SameType < Y > . TypeAlias3 < X > . self / / expected - error { { ' SameType < Y > . TypeAlias3 ' requires the types ' Y ' and ' X ' be equivalent } } <nl> - let _ = SameType < Y > . Decl1 . self / / expected - error { { ' SameType < Y > . Decl1 ' requires the types ' Y ' and ' X ' be equivalent } } <nl> - let _ = SameType < Y > . Decl2 . self / / expected - error { { ' SameType < Y > . Decl2 ' requires the types ' Y ' and ' X ' be equivalent } } <nl> - let _ = SameType < Y > . Decl3 . self / / expected - error { { ' SameType < Y > . Decl3 ' requires the types ' Y ' and ' X ' be equivalent } } <nl> + let _ = SameType < Y > . Decl1 . self / / expected - error { { ' SameType < T > . Decl1 ' requires the types ' Y ' and ' X ' be equivalent } } <nl> + let _ = SameType < Y > . Decl2 . self / / expected - error { { ' SameType < T > . Decl2 ' requires the types ' Y ' and ' X ' be equivalent } } <nl> + let _ = SameType < Y > . Decl3 . self / / expected - error { { ' SameType < T > . Decl3 ' requires the types ' Y ' and ' X ' be equivalent } } <nl> let _ = SameType < Y > . Decl4 < X > . self / / expected - error { { ' SameType < Y > . Decl4 ' requires the types ' Y ' and ' X ' be equivalent } } <nl> let _ = SameType < Y > . Decl5 < X > . self / / expected - error { { ' SameType < Y > . Decl5 ' requires the types ' Y ' and ' X ' be equivalent } } <nl> <nl> extension SameType : AssociatedType where T = = X { } <nl> / / expected - note @ - 1 { { requirement specified as ' T ' = = ' X ' [ with T = Y ] } } <nl> <nl> let _ = SameType < X > . T . self <nl> - let _ = SameType < Y > . T . self / / expected - error { { ' SameType < Y > . T ' ( aka ' X ' ) requires the types ' Y ' and ' X ' be equivalent } } <nl> + let _ = SameType < Y > . T . self / / expected - error { { ' SameType < T > . T ' ( aka ' X ' ) requires the types ' Y ' and ' X ' be equivalent } } <nl> <nl> <nl> struct Conforms < T > { } <nl> let _ = SameType < X > . Decl1 . Decl3 . self <nl> let _ = SameType < X > . Decl1 . Decl4 < X > . self <nl> let _ = SameType < X > . Decl1 . Decl5 < X > . self <nl> <nl> - let _ = SameType < Y > . Decl1 . TypeAlias1 . self / / expected - error { { ' SameType < Y > . Decl1 ' requires the types ' Y ' and ' X ' be equivalent } } <nl> - let _ = SameType < Y > . Decl1 . TypeAlias2 . self / / expected - error { { ' SameType < Y > . Decl1 ' requires the types ' Y ' and ' X ' be equivalent } } <nl> - let _ = SameType < Y > . Decl1 . TypeAlias3 < X > . self / / expected - error { { ' SameType < Y > . Decl1 ' requires the types ' Y ' and ' X ' be equivalent } } <nl> - let _ = SameType < Y > . Decl1 . Decl1 . self / / expected - error { { ' SameType < Y > . Decl1 ' requires the types ' Y ' and ' X ' be equivalent } } <nl> - let _ = SameType < Y > . Decl1 . Decl2 . self / / expected - error { { ' SameType < Y > . Decl1 ' requires the types ' Y ' and ' X ' be equivalent } } <nl> - let _ = SameType < Y > . Decl1 . Decl3 . self / / expected - error { { ' SameType < Y > . Decl1 ' requires the types ' Y ' and ' X ' be equivalent } } <nl> - let _ = SameType < Y > . Decl1 . Decl4 < X > . self / / expected - error { { ' SameType < Y > . Decl1 ' requires the types ' Y ' and ' X ' be equivalent } } <nl> - let _ = SameType < Y > . Decl1 . Decl5 < X > . self / / expected - error { { ' SameType < Y > . Decl1 ' requires the types ' Y ' and ' X ' be equivalent } } <nl> + let _ = SameType < Y > . Decl1 . TypeAlias1 . self / / expected - error { { ' SameType < T > . Decl1 ' requires the types ' Y ' and ' X ' be equivalent } } <nl> + let _ = SameType < Y > . Decl1 . TypeAlias2 . self / / expected - error { { ' SameType < T > . Decl1 ' requires the types ' Y ' and ' X ' be equivalent } } <nl> + let _ = SameType < Y > . Decl1 . TypeAlias3 < X > . self / / expected - error { { ' SameType < T > . Decl1 ' requires the types ' Y ' and ' X ' be equivalent } } <nl> + let _ = SameType < Y > . Decl1 . Decl1 . self / / expected - error { { ' SameType < T > . Decl1 ' requires the types ' Y ' and ' X ' be equivalent } } <nl> + let _ = SameType < Y > . Decl1 . Decl2 . self / / expected - error { { ' SameType < T > . Decl1 ' requires the types ' Y ' and ' X ' be equivalent } } <nl> + let _ = SameType < Y > . Decl1 . Decl3 . self / / expected - error { { ' SameType < T > . Decl1 ' requires the types ' Y ' and ' X ' be equivalent } } <nl> + let _ = SameType < Y > . Decl1 . Decl4 < X > . self / / expected - error { { ' SameType < T > . Decl1 ' requires the types ' Y ' and ' X ' be equivalent } } <nl> + let _ = SameType < Y > . Decl1 . Decl5 < X > . self / / expected - error { { ' SameType < T > . Decl1 ' requires the types ' Y ' and ' X ' be equivalent } } <nl> <nl> extension SameType . Decl4 where U = = X { / / expected - note 5 { { requirement specified as ' U ' = = ' X ' [ with U = Y ] } } <nl> typealias TypeAlias1 = T <nl> let _ = SameType < X > . Decl4 < X > . Decl3 . self <nl> let _ = SameType < X > . Decl4 < X > . Decl4 < X > . self <nl> let _ = SameType < X > . Decl4 < X > . Decl5 < X > . self <nl> <nl> - let _ = SameType < X > . Decl4 < Y > . TypeAlias1 . self / / expected - error { { ' SameType < X > . Decl4 < Y > . TypeAlias1 ' ( aka ' X ' ) requires the types ' Y ' and ' X ' be equivalent } } <nl> - let _ = SameType < X > . Decl4 < Y > . TypeAlias2 . self / / expected - error { { ' SameType < X > . Decl4 < Y > . TypeAlias2 ' ( aka ' Y ' ) requires the types ' Y ' and ' X ' be equivalent } } <nl> + let _ = SameType < X > . Decl4 < Y > . TypeAlias1 . self / / expected - error { { ' SameType < T > . Decl4 < U > . TypeAlias1 ' requires the types ' Y ' and ' X ' be equivalent } } <nl> + let _ = SameType < X > . Decl4 < Y > . TypeAlias2 . self / / expected - error { { ' SameType < T > . Decl4 < U > . TypeAlias2 ' ( aka ' Y ' ) requires the types ' Y ' and ' X ' be equivalent } } <nl> let _ = SameType < X > . Decl4 < Y > . TypeAlias3 < X > . self / / expected - error { { ' SameType < X > . Decl4 < Y > . TypeAlias3 ' requires the types ' Y ' and ' X ' be equivalent } } <nl> - let _ = SameType < X > . Decl4 < Y > . Decl1 . self / / expected - error { { ' SameType < X > . Decl4 < Y > . Decl1 ' requires the types ' Y ' and ' X ' be equivalent } } <nl> - let _ = SameType < X > . Decl4 < Y > . Decl2 . self / / expected - error { { ' SameType < X > . Decl4 < Y > . Decl2 ' requires the types ' Y ' and ' X ' be equivalent } } <nl> - let _ = SameType < X > . Decl4 < Y > . Decl3 . self / / expected - error { { ' SameType < X > . Decl4 < Y > . Decl3 ' requires the types ' Y ' and ' X ' be equivalent } } <nl> + let _ = SameType < X > . Decl4 < Y > . Decl1 . self / / expected - error { { ' SameType < T > . Decl4 < U > . Decl1 ' requires the types ' Y ' and ' X ' be equivalent } } <nl> + let _ = SameType < X > . Decl4 < Y > . Decl2 . self / / expected - error { { ' SameType < T > . Decl4 < U > . Decl2 ' requires the types ' Y ' and ' X ' be equivalent } } <nl> + let _ = SameType < X > . Decl4 < Y > . Decl3 . self / / expected - error { { ' SameType < T > . Decl4 < U > . Decl3 ' requires the types ' Y ' and ' X ' be equivalent } } <nl> let _ = SameType < X > . Decl4 < Y > . Decl4 < X > . self / / expected - error { { ' SameType < X > . Decl4 < Y > . Decl4 ' requires the types ' Y ' and ' X ' be equivalent } } <nl> let _ = SameType < X > . Decl4 < Y > . Decl5 < X > . self / / expected - error { { ' SameType < X > . Decl4 < Y > . Decl5 ' requires the types ' Y ' and ' X ' be equivalent } } <nl> <nl> mmm a / test / Constraints / rdar39931339 . swift <nl> ppp b / test / Constraints / rdar39931339 . swift <nl> _ = B < C > . S1 ( ) / / Ok <nl> _ = B < Int > . S2 ( ) / / Ok <nl> _ = B < Float > . S1 ( ) / / expected - error { { type ' Float ' does not conform to protocol ' P ' } } <nl> _ = B < String > . S2 ( ) <nl> - / / expected - error @ - 1 { { ' B < String > . S2 ' ( aka ' Int ' ) requires the types ' [ String ] ' and ' [ Int ] ' be equivalent } } <nl> + / / expected - error @ - 1 { { ' A < T , U > . S2 ' ( aka ' Int ' ) requires the types ' [ String ] ' and ' [ Int ] ' be equivalent } } <nl> <nl> _ = S < C > . A ( ) / / Ok <nl> _ = S < Int > . A ( ) / / expected - error { { type ' Int ' does not conform to protocol ' P ' } } <nl> _ = S < String > . B < Int > ( ) / / expected - error { { type ' String ' does not conform to protocol ' P ' } } <nl> - _ = S < Int > . C ( ) / / expected - error { { ' S < Int > . C ' ( aka ' Int ' ) requires the types ' Int ' and ' Float ' be equivalent } } <nl> + _ = S < Int > . C ( ) / / expected - error { { ' S < T > . C ' ( aka ' Int ' ) requires the types ' Int ' and ' Float ' be equivalent } } <nl> <nl> func foo < T > ( _ s : S < T > . Type ) { <nl> _ = s . A ( ) / / expected - error { { referencing type alias ' A ' on ' S ' requires that ' T ' conform to ' P ' } } <nl> mmm a / test / Constraints / requirement_failures_in_contextual_type . swift <nl> ppp b / test / Constraints / requirement_failures_in_contextual_type . swift <nl> extension A where T = = Int32 { / / expected - note 3 { { requirement specified as ' T ' <nl> } <nl> <nl> let _ : A < Int > . B = 0 <nl> - / / expected - error @ - 1 { { ' A < Int > . B ' requires the types ' Int ' and ' Int32 ' be equivalent } } <nl> + / / expected - error @ - 1 { { ' A < T > . B ' requires the types ' Int ' and ' Int32 ' be equivalent } } <nl> let _ : A < Int > . C = 0 <nl> - / / expected - error @ - 1 { { ' A < Int > . C ' ( aka ' Int ' ) requires the types ' Int ' and ' Int32 ' be equivalent } } <nl> + / / expected - error @ - 1 { { ' A < T > . C ' ( aka ' Int ' ) requires the types ' Int ' and ' Int32 ' be equivalent } } <nl> let _ : A < Int > . B . E = 0 <nl> - / / expected - error @ - 1 { { ' A < Int > . B ' requires the types ' Int ' and ' Int32 ' be equivalent } } <nl> + / / expected - error @ - 1 { { ' A < T > . B ' requires the types ' Int ' and ' Int32 ' be equivalent } } <nl> <nl> <nl> protocol P { } <nl> new file mode 100644 <nl> index 000000000000 . . e56586e752a6 <nl> mmm / dev / null <nl> ppp b / test / Generics / constrained_type_witnesses . swift <nl> <nl> + / / RUN : % target - typecheck - verify - swift <nl> + <nl> + protocol P { <nl> + associatedtype A <nl> + / / expected - note @ - 1 3 { { protocol requires nested type ' A ' ; do you want to add it ? } } <nl> + } <nl> + <nl> + struct S1 < T > { } <nl> + <nl> + extension S1 where T : P { <nl> + typealias A = Int <nl> + } <nl> + <nl> + / / This is rejected because S1 . A is not a suitable witness for P . A . <nl> + extension S1 : P { } <nl> + / / expected - error @ - 1 { { type ' S1 < T > ' does not conform to protocol ' P ' } } <nl> + <nl> + struct S2 < T > { } <nl> + <nl> + extension S2 where T : P { <nl> + typealias A = Never <nl> + } <nl> + <nl> + / / Hack : This is OK to make SwiftUI work , which accidentally relies on the <nl> + / / incorrect behavior with a typealias whose underlying type is ' Never ' <nl> + / / ( so it didn ' t hit the compiler crash ) . <nl> + extension S2 : P { } <nl> + <nl> + / / Here we have a suitable witness <nl> + struct S3 < T > { } <nl> + <nl> + extension S3 where T = = Int { <nl> + typealias A = Int <nl> + } <nl> + <nl> + extension S3 : P where T = = Int { } <nl> + <nl> + / / Check where clause on the type itself <nl> + <nl> + struct S4 < T > { <nl> + typealias A = Int where T : P <nl> + } <nl> + <nl> + extension S4 : P { } <nl> + / / expected - error @ - 1 { { type ' S4 < T > ' does not conform to protocol ' P ' } } <nl> + <nl> + struct S5 < T > { <nl> + typealias A = Never where T : P <nl> + } <nl> + <nl> + extension S5 : P { } <nl> + <nl> + struct S6 < T > { <nl> + typealias A = Int where T = = Int <nl> + } <nl> + <nl> + extension S6 : P where T = = Int { } <nl> + <nl> + / / Witness in a constrained protocol extension <nl> + protocol Q { <nl> + associatedtype B <nl> + } <nl> + <nl> + extension Q where B = = Int { <nl> + typealias A = Int <nl> + } <nl> + <nl> + struct S7 : Q , P { <nl> + typealias B = Int <nl> + } <nl> + <nl> + struct S8 : Q , P { <nl> + / / expected - error @ - 1 { { type ' S8 ' does not conform to protocol ' P ' } } <nl> + typealias B = String <nl> + } <nl> mmm a / test / Generics / where_clause_contextually_generic_decls . swift <nl> ppp b / test / Generics / where_clause_contextually_generic_decls . swift <nl> <nl> - / / RUN : % target - typecheck - verify - swift - typecheck % s - verify - swift - version 4 <nl> + / / RUN : % target - typecheck - verify - swift - swift - version 4 <nl> <nl> func bet ( ) where A : B { } / / expected - error { { ' where ' clause cannot be applied to a non - generic top - level declaration } } <nl> <nl> _ = Container < String > . NestedAlias2 . self / / expected - error { { type ' String ' does n <nl> _ = Container < Container < Bool > > . NestedClass . self / / expected - error { { type ' Container < Bool > ' does not conform to protocol ' Equatable ' } } <nl> _ = Container < Void > . NestedStruct . self / / expected - error { { type ' Void ' does not conform to protocol ' Sequence ' } } <nl> _ = Container < Array < Void > > . NestedStruct2 . self / / expected - error { { type ' Void ' does not conform to protocol ' Comparable ' } } <nl> - _ = Container < String > . NestedStruct2 . NestedEnum . self / / expected - error { { ' Container < String > . NestedStruct2 . NestedEnum ' requires the types ' String . Element ' ( aka ' Character ' ) and ' Double ' be equivalent } } <nl> + _ = Container < String > . NestedStruct2 . NestedEnum . self / / expected - error { { ' Container < T > . NestedStruct2 . NestedEnum ' requires the types ' String . Element ' ( aka ' Character ' ) and ' Double ' be equivalent } } <nl> _ = Container < Int > . NestedAlias2 . self <nl> _ = Container < Bool > . NestedClass . self <nl> _ = Container < String > . NestedStruct . self <nl> mmm a / test / decl / protocol / req / missing_conformance . swift <nl> ppp b / test / decl / protocol / req / missing_conformance . swift <nl> extension CountSteps1 / / expected - error { { type ' CountSteps1 < T > ' does not conform <nl> where T : Equatable <nl> { <nl> typealias Index = Int <nl> + / / expected - error @ - 1 { { invalid redeclaration of synthesized implementation for protocol requirement ' Index ' } } <nl> func index ( _ i : Index , offsetBy d : Int ) - > Index { <nl> return i + d <nl> } <nl> mmm a / test / decl / var / property_wrappers . swift <nl> ppp b / test / decl / var / property_wrappers . swift <nl> extension SR_11288_P4 where Self : AnyObject { / / expected - note { { requirement spe <nl> } <nl> <nl> struct SR_11288_S4 : SR_11288_P4 { <nl> - @ SR_11288_Wrapper4 var answer = 42 / / expected - error { { ' SR_11288_S4 . SR_11288_Wrapper4 ' ( aka ' SR_11288_S0 ' ) requires that ' SR_11288_S4 ' be a class type } } <nl> + @ SR_11288_Wrapper4 var answer = 42 / / expected - error { { ' Self . SR_11288_Wrapper4 ' ( aka ' SR_11288_S0 ' ) requires that ' SR_11288_S4 ' be a class type } } <nl> } <nl> <nl> class SR_11288_C0 : SR_11288_P4 { <nl> new file mode 100644 <nl> index 000000000000 . . 33a5140a92d2 <nl> mmm / dev / null <nl> ppp b / validation - test / compiler_crashers_2_fixed / sr12327 . swift <nl> <nl> + / / RUN : % target - swift - frontend - emit - ir - O % s <nl> + <nl> + protocol A { <nl> + associatedtype Foo / / Does not crash if renamed <nl> + } <nl> + <nl> + protocol B { <nl> + associatedtype Foo / / Does not crash if renamed <nl> + var aFoo : Foo { get } <nl> + } <nl> + <nl> + public struct Wrapper < T > { <nl> + let wrapped : T <nl> + } <nl> + <nl> + / / Removing this extension or combining it with the next one prevents the crash <nl> + extension Wrapper : A where T : A { <nl> + typealias Foo = Wrapper < T . Foo > <nl> + } <nl> + <nl> + extension Wrapper : B where T : B { <nl> + var aFoo : Wrapper < T . Foo > { <nl> + return . init ( wrapped : wrapped . aFoo ) <nl> + } <nl> + } <nl> + <nl> + public struct Model : B { <nl> + public struct Foo { } <nl> + <nl> + public var aFoo : Foo { <nl> + return Foo ( ) <nl> + } <nl> + } <nl> + <nl> + / / Attempting to specialize this method for Wrapper < Model > crashes the compiler <nl> + func fooString < Body : B > ( body : Body ) - > String { <nl> + return " \ ( body . aFoo ) " <nl> + } <nl> + <nl> + public func foo ( _ command : Wrapper < Model > ) - > String { <nl> + return fooString ( body : command ) <nl> + } <nl> mmm a / validation - test / compiler_crashers_2_fixed / sr9199 . swift <nl> ppp b / validation - test / compiler_crashers_2_fixed / sr9199 . swift <nl> <nl> - / / RUN : % target - swift - frontend - emit - ir - o % t . ll % s <nl> + / / RUN : not % target - swift - frontend - emit - ir % s <nl> <nl> / / Just make sure we don ' t crash . <nl> <nl> extension Controller : WithReturnType { <nl> <nl> let controller = Controller < String > ( ) <nl> <nl> - controller . returnTheThing ( ) <nl> \ No newline at end of file <nl> + controller . returnTheThing ( ) <nl>
Merge pull request from slavapestov / type - witness - where - clause - check
apple/swift
fbc0463e6225a1ee8ca5ecb6fa7dad7be65c9b90
2020-08-15T16:20:24Z
mmm a / src / mongo / client / dbclient_rs_test . cpp <nl> ppp b / src / mongo / client / dbclient_rs_test . cpp <nl> <nl> # include " mongo / client / connpool . h " <nl> # include " mongo / client / dbclient_rs . h " <nl> # include " mongo / client / replica_set_monitor . h " <nl> + # include " mongo / client / replica_set_monitor_params_gen . h " <nl> # include " mongo / db / jsobj . h " <nl> # include " mongo / dbtests / mock / mock_conn_registry . h " <nl> # include " mongo / dbtests / mock / mock_replica_set . h " <nl> BSONObj makeMetadata ( ReadPreference rp , TagSet tagSet ) { <nl> } <nl> <nl> / * * <nl> - * Basic fixture with one primary and one secondary . <nl> + * Ensures a global ServiceContext exists and the ScanningReplicaSetMonitor is used for each test . <nl> * / <nl> - class BasicRS : public unittest : : Test { <nl> + class DBClientRSTest : public unittest : : Test { <nl> protected : <nl> void setUp ( ) { <nl> auto serviceContext = ServiceContext : : make ( ) ; <nl> setGlobalServiceContext ( std : : move ( serviceContext ) ) ; <nl> <nl> + setDisableStreamableTrue ( ) ; <nl> + } <nl> + <nl> + void tearDown ( ) { <nl> + resetDisableStreamable ( ) ; <nl> + } <nl> + <nl> + / * * <nl> + * Ensures the ScanningReplicaSetMonitor is used for the tests . <nl> + * / <nl> + void setDisableStreamableTrue ( ) { <nl> + const BSONObj newFlagParameter = BSON ( kDisableStreamableFlagName < < true ) ; <nl> + BSONObjIterator parameterIterator ( newFlagParameter ) ; <nl> + BSONElement newParameter = parameterIterator . next ( ) ; <nl> + const auto foundParameter = findDisableStreamableServerParameter ( ) ; <nl> + <nl> + uassertStatusOK ( foundParameter - > second - > set ( newParameter ) ) ; <nl> + ASSERT_TRUE ( disableStreamableReplicaSetMonitor . load ( ) ) ; <nl> + } <nl> + <nl> + / * * <nl> + * Restores the disableStreamableReplicaSetMonitor parameter to its default value . <nl> + * / <nl> + void resetDisableStreamable ( ) { <nl> + const auto defaultParameter = kDefaultParameter [ kDisableStreamableFlagName ] ; <nl> + const auto foundParameter = findDisableStreamableServerParameter ( ) ; <nl> + <nl> + uassertStatusOK ( foundParameter - > second - > set ( defaultParameter ) ) ; <nl> + } <nl> + <nl> + / * * <nl> + * Finds the disableStreamableReplicaSetMonitor ServerParameter . <nl> + * / <nl> + ServerParameter : : Map : : const_iterator findDisableStreamableServerParameter ( ) { <nl> + const ServerParameter : : Map & parameterMap = ServerParameterSet : : getGlobal ( ) - > getMap ( ) ; <nl> + return parameterMap . find ( kDisableStreamableFlagName ) ; <nl> + } <nl> + <nl> + static inline const std : : string kDisableStreamableFlagName = <nl> + " disableStreamableReplicaSetMonitor " ; <nl> + <nl> + / * * <nl> + * A BSONObj containing the default for the disableStreamableReplicaSetMonitor flag . <nl> + * / <nl> + static inline const BSONObj kDefaultParameter = <nl> + BSON ( kDisableStreamableFlagName < < disableStreamableReplicaSetMonitor . load ( ) ) ; <nl> + } ; <nl> + <nl> + / * * <nl> + * Basic fixture with one primary and one secondary . <nl> + * / <nl> + class BasicRS : public DBClientRSTest { <nl> + protected : <nl> + void setUp ( ) { <nl> + DBClientRSTest : : setUp ( ) ; <nl> ReplicaSetMonitor : : cleanup ( ) ; <nl> <nl> _replSet . reset ( new MockReplicaSet ( " test " , 2 ) ) ; <nl> class BasicRS : public unittest : : Test { <nl> mongo : : ScopedDbConnection : : clearPool ( ) ; <nl> <nl> ReplicaSetMonitor : : shutdown ( ) ; <nl> + DBClientRSTest : : tearDown ( ) ; <nl> } <nl> <nl> MockReplicaSet * getReplSet ( ) { <nl> TEST_F ( BasicRS , CommandSecondaryPreferred ) { <nl> / * * <nl> * Setup for 2 member replica set will all of the nodes down . <nl> * / <nl> - class AllNodesDown : public unittest : : Test { <nl> + class AllNodesDown : public DBClientRSTest { <nl> protected : <nl> void setUp ( ) { <nl> - auto serviceContext = ServiceContext : : make ( ) ; <nl> - setGlobalServiceContext ( std : : move ( serviceContext ) ) ; <nl> - <nl> + DBClientRSTest : : setUp ( ) ; <nl> ReplicaSetMonitor : : cleanup ( ) ; <nl> <nl> _replSet . reset ( new MockReplicaSet ( " test " , 2 ) ) ; <nl> class AllNodesDown : public unittest : : Test { <nl> _replSet . reset ( ) ; <nl> <nl> mongo : : ScopedDbConnection : : clearPool ( ) ; <nl> + DBClientRSTest : : tearDown ( ) ; <nl> } <nl> <nl> MockReplicaSet * getReplSet ( ) { <nl> TEST_F ( AllNodesDown , CommandNearest ) { <nl> / * * <nl> * Setup for 2 member replica set with the primary down . <nl> * / <nl> - class PrimaryDown : public unittest : : Test { <nl> + class PrimaryDown : public DBClientRSTest { <nl> protected : <nl> void setUp ( ) { <nl> - auto serviceContext = ServiceContext : : make ( ) ; <nl> - setGlobalServiceContext ( std : : move ( serviceContext ) ) ; <nl> - <nl> + DBClientRSTest : : setUp ( ) ; <nl> ReplicaSetMonitor : : cleanup ( ) ; <nl> <nl> _replSet . reset ( new MockReplicaSet ( " test " , 2 ) ) ; <nl> class PrimaryDown : public unittest : : Test { <nl> _replSet . reset ( ) ; <nl> <nl> mongo : : ScopedDbConnection : : clearPool ( ) ; <nl> + DBClientRSTest : : tearDown ( ) ; <nl> } <nl> <nl> MockReplicaSet * getReplSet ( ) { <nl> TEST_F ( PrimaryDown , Nearest ) { <nl> / * * <nl> * Setup for 2 member replica set with the secondary down . <nl> * / <nl> - class SecondaryDown : public unittest : : Test { <nl> + class SecondaryDown : public DBClientRSTest { <nl> protected : <nl> void setUp ( ) { <nl> - auto serviceContext = ServiceContext : : make ( ) ; <nl> - setGlobalServiceContext ( std : : move ( serviceContext ) ) ; <nl> - <nl> + DBClientRSTest : : setUp ( ) ; <nl> ReplicaSetMonitor : : cleanup ( ) ; <nl> <nl> _replSet . reset ( new MockReplicaSet ( " test " , 2 ) ) ; <nl> class SecondaryDown : public unittest : : Test { <nl> _replSet . reset ( ) ; <nl> <nl> mongo : : ScopedDbConnection : : clearPool ( ) ; <nl> + DBClientRSTest : : tearDown ( ) ; <nl> } <nl> <nl> MockReplicaSet * getReplSet ( ) { <nl> TEST_F ( SecondaryDown , CommandNearest ) { <nl> * Warning : Tests running this fixture cannot be run in parallel with other tests <nl> * that uses ConnectionString : : setConnectionHook <nl> * / <nl> - class TaggedFiveMemberRS : public unittest : : Test { <nl> + class TaggedFiveMemberRS : public DBClientRSTest { <nl> protected : <nl> void setUp ( ) { <nl> - auto serviceContext = ServiceContext : : make ( ) ; <nl> - setGlobalServiceContext ( std : : move ( serviceContext ) ) ; <nl> + DBClientRSTest : : setUp ( ) ; <nl> <nl> / / Tests for pinning behavior require this . <nl> ReplicaSetMonitor : : useDeterministicHostSelection = true ; <nl> class TaggedFiveMemberRS : public unittest : : Test { <nl> _replSet . reset ( ) ; <nl> <nl> mongo : : ScopedDbConnection : : clearPool ( ) ; <nl> + DBClientRSTest : : tearDown ( ) ; <nl> } <nl> <nl> MockReplicaSet * getReplSet ( ) { <nl>
SERVER - 46417 Temporarily make dbclient_rs_test use the ScanningReplicaSetMonitor only
mongodb/mongo
c40e990daba274d2fd70c76b51d0a65dff98fdb4
2020-02-28T15:58:41Z
mmm a / examples / netsniff . coffee <nl> ppp b / examples / netsniff . coffee <nl> <nl> - if not Date : : . toISOString <nl> - Date : : . toISOString = - > <nl> + if not Date : : toISOString <nl> + Date : : toISOString = - > <nl> pad = ( n ) - > <nl> if n < 10 then ' 0 ' + n else n <nl> ms = ( n ) - > <nl>
netsniff . coffee : Remove dot in prototype check .
ariya/phantomjs
e09da4e1d82f82549b996a34ea70e5062a91fcce
2011-06-17T07:05:19Z
mmm a / src / containers / archive / archive . hpp <nl> ppp b / src / containers / archive / archive . hpp <nl> class read_stream_t { <nl> DISABLE_COPYING ( read_stream_t ) ; <nl> } ; <nl> <nl> - / / Deserialize functions return 0 upon success , a positive or negative error <nl> - / / code upon failure . - 1 means there was an error on the socket , - 2 means EOF on <nl> - / / the socket , - 3 means a " range error " , + 1 means specific error info was <nl> - / / discarded , the error code got used as a boolean . <nl> + / / The return value of deserialization functions . <nl> enum class archive_result_t { <nl> - SUCCESS = 0 , <nl> - SOCK_ERROR = - 1 , <nl> - SOCK_EOF = - 2 , <nl> - RANGE_ERROR = - 3 , <nl> + / / Success . <nl> + SUCCESS , <nl> + / / An error on the socket happened . <nl> + SOCK_ERROR , <nl> + / / An EOF on the socket happened . <nl> + SOCK_EOF , <nl> + / / The value deserialized was out of range . <nl> + RANGE_ERROR , <nl> } ; <nl> <nl> inline bool bad ( archive_result_t res ) { <nl>
Removed explicit values for archive_result_t .
rethinkdb/rethinkdb
1a8bdf52f562af2ff5467ed8899d2760a20058a9
2014-02-21T11:30:50Z
mmm a / src / caffe / solver . cpp <nl> ppp b / src / caffe / solver . cpp <nl> template < typename Dtype > <nl> Solver < Dtype > : : Solver ( const SolverParameter & param ) <nl> : param_ ( param ) , net_ ( ) , test_net_ ( ) { <nl> / / Scaffolding code <nl> - NetParameter train_net_param ; <nl> - ReadProtoFromTextFileOrDie ( param_ . train_net ( ) , & train_net_param ) ; <nl> LOG ( INFO ) < < " Creating training net . " ; <nl> - net_ . reset ( new Net < Dtype > ( train_net_param ) ) ; <nl> + net_ . reset ( new Net < Dtype > ( param_ . train_net ( ) ) ) ; <nl> if ( param_ . has_test_net ( ) ) { <nl> LOG ( INFO ) < < " Creating testing net . " ; <nl> - NetParameter test_net_param ; <nl> - ReadProtoFromTextFileOrDie ( param_ . test_net ( ) , & test_net_param ) ; <nl> - test_net_ . reset ( new Net < Dtype > ( test_net_param ) ) ; <nl> + test_net_ . reset ( new Net < Dtype > ( param_ . test_net ( ) ) ) ; <nl> CHECK_GT ( param_ . test_iter ( ) , 0 ) ; <nl> CHECK_GT ( param_ . test_interval ( ) , 0 ) ; <nl> } <nl> mmm a / src / caffe / util / upgrade_proto . cpp <nl> ppp b / src / caffe / util / upgrade_proto . cpp <nl> bool UpgradeV0LayerConnection ( const V0LayerConnection & v0_layer_connection , <nl> layer_param - > set_name ( v0_layer_param . name ( ) ) ; <nl> } <nl> const string & type = v0_layer_param . type ( ) ; <nl> + if ( v0_layer_param . has_type ( ) ) { <nl> + layer_param - > set_type ( type ) ; <nl> + } <nl> if ( v0_layer_param . has_num_output ( ) ) { <nl> if ( type = = " conv " ) { <nl> layer_param - > mutable_convolution_param ( ) - > set_num_output ( <nl> bool UpgradeV0LayerConnection ( const V0LayerConnection & v0_layer_connection , <nl> if ( type = = " pool " ) { <nl> V0LayerParameter_PoolMethod pool = v0_layer_param . pool ( ) ; <nl> switch ( pool ) { <nl> - V0LayerParameter_PoolMethod_MAX : <nl> + case V0LayerParameter_PoolMethod_MAX : <nl> layer_param - > mutable_pooling_param ( ) - > set_pool ( <nl> PoolingParameter_PoolMethod_MAX ) ; <nl> break ; <nl> - V0LayerParameter_PoolMethod_AVE : <nl> + case V0LayerParameter_PoolMethod_AVE : <nl> layer_param - > mutable_pooling_param ( ) - > set_pool ( <nl> PoolingParameter_PoolMethod_AVE ) ; <nl> break ; <nl> - V0LayerParameter_PoolMethod_STOCHASTIC : <nl> + case V0LayerParameter_PoolMethod_STOCHASTIC : <nl> layer_param - > mutable_pooling_param ( ) - > set_pool ( <nl> PoolingParameter_PoolMethod_STOCHASTIC ) ; <nl> break ; <nl>
make solver use upgrade_proto ( by constructing net with a string ) and
BVLC/caffe
7e25c8ce40bd16ff240393f2c663069565f9fb08
2014-03-28T06:42:28Z
mmm a / tensorflow / c / experimental / gradients / BUILD <nl> ppp b / tensorflow / c / experimental / gradients / BUILD <nl> filegroup ( <nl> ] , <nl> ) <nl> <nl> - cc_library ( <nl> - name = " model_factory_helper " , <nl> - hdrs = [ " model_factory_helper . h " ] , <nl> - ) <nl> - <nl> cc_library ( <nl> name = " grad_test_helper " , <nl> testonly = True , <nl> tf_cuda_cc_test ( <nl> ] , <nl> args = [ " - - heap_check = local " ] , <nl> linkstatic = tf_kernel_tests_linkstatic ( ) , <nl> - tags = tf_cuda_tests_tags ( ) + [ " nomac " ] , <nl> + tags = tf_cuda_tests_tags ( ) , <nl> deps = [ <nl> " : grad_test_helper " , <nl> - " : model_factory_helper " , <nl> " : nn_grad " , <nl> " / / tensorflow / c / eager : c_api_test_util " , <nl> " / / tensorflow / c / experimental / gradients / tape : tape_context " , <nl> deleted file mode 100644 <nl> index 02dc0e7b9351a . . 0000000000000 <nl> mmm a / tensorflow / c / experimental / gradients / model_factory_helper . h <nl> ppp / dev / null <nl> <nl> - / * Copyright 2020 The TensorFlow Authors . All Rights Reserved . <nl> - <nl> - Licensed under the Apache License , Version 2 . 0 ( the " License " ) ; <nl> - you may not use this file except in compliance with the License . <nl> - You may obtain a copy of the License at <nl> - <nl> - http : / / www . apache . org / licenses / LICENSE - 2 . 0 <nl> - <nl> - Unless required by applicable law or agreed to in writing , software <nl> - distributed under the License is distributed on an " AS IS " BASIS , <nl> - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied . <nl> - See the License for the specific language governing permissions and <nl> - limitations under the License . <nl> - = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = * / <nl> - # ifndef TENSORFLOW_C_EXPERIMENTAL_GRADIENTS_MODEL_FACTORY_MACRO_H_ <nl> - # define TENSORFLOW_C_EXPERIMENTAL_GRADIENTS_MODEL_FACTORY_MACRO_H_ <nl> - <nl> - / / This macro will expand to a function that defines a ` Model ` . This ` Model ` is <nl> - / / then used for testing by ` nn_grad_test ` and ` math_grad_test ` . ` ops_call ` is a <nl> - / / statement that calls to a ` ops : : ` and should be wrapped around by ` { } ` . <nl> - / / ` ops_call ` has access to ` inputs ` . The output parameter of the ops should <nl> - / / always be ` absl : : MakeSpan ( temp_outputs ) ` . This macro supports most one - ops <nl> - / / model . <nl> - / / TODO ( vnvo2409 ) : Extend support for more complex model . <nl> - # define TF_MODEL_FACTORY ( name , num_inputs , num_outputs , ops_call ) \ <nl> - Status name ( AbstractContext * ctx , \ <nl> - absl : : Span < AbstractTensorHandle * const > inputs , \ <nl> - absl : : Span < AbstractTensorHandle * > outputs , \ <nl> - const GradientRegistry & registry ) { \ <nl> - auto tape = new Tape ( / * persistent = * / false ) ; \ <nl> - for ( int i { } ; i < num_inputs ; + + i ) { \ <nl> - tape - > Watch ( ToId ( inputs [ i ] ) ) ; \ <nl> - } \ <nl> - \ <nl> - AbstractTensorHandle * temp_outputs [ num_outputs ] = { } ; \ <nl> - AbstractContextPtr tape_ctx ( new TapeContext ( ctx , tape , registry ) ) ; \ <nl> - ops_call ; \ <nl> - \ <nl> - for ( int i { } ; i < num_outputs ; + + i ) { \ <nl> - outputs [ i ] = temp_outputs [ i ] ; \ <nl> - } \ <nl> - delete tape ; \ <nl> - return Status : : OK ( ) ; \ <nl> - } <nl> - <nl> - / / This macro will expand to a function that defines a ` GradModel ` . This <nl> - / / ` GradModel ` is then used for testing by ` nn_grad_test ` and ` math_grad_test ` . <nl> - / / ` ops_call ` is a statement that calls to a ` ops : : ` and should be wrapped <nl> - / / around by ` { } ` . ` ops_call ` has access to ` inputs ` . The output parameter of <nl> - / / the ops should always be ` absl : : MakeSpan ( temp_outputs ) ` . This macro supports <nl> - / / most one - ops model . <nl> - / / TODO ( vnvo2409 ) : Extend support for more complex model . <nl> - # define TF_GRAD_MODEL_FACTORY ( name , num_inputs , num_outputs , num_grad_outputs , \ <nl> - ops_call ) \ <nl> - Status name ( AbstractContext * ctx , \ <nl> - absl : : Span < AbstractTensorHandle * const > inputs , \ <nl> - absl : : Span < AbstractTensorHandle * > outputs , \ <nl> - const GradientRegistry & registry ) { \ <nl> - TapeVSpace vspace ( ctx ) ; \ <nl> - auto tape = new Tape ( / * persistent = * / false ) ; \ <nl> - for ( int i { } ; i < num_inputs ; + + i ) { \ <nl> - tape - > Watch ( ToId ( inputs [ i ] ) ) ; \ <nl> - } \ <nl> - \ <nl> - AbstractTensorHandle * temp_outputs [ num_outputs ] = { } ; \ <nl> - AbstractContextPtr tape_ctx ( new TapeContext ( ctx , tape , registry ) ) ; \ <nl> - ops_call ; \ <nl> - \ <nl> - std : : unordered_map < tensorflow : : int64 , TapeTensor > \ <nl> - source_tensors_that_are_targets ; \ <nl> - std : : vector < AbstractTensorHandle * > out_grads ( num_grad_outputs ) ; \ <nl> - \ <nl> - int64 target_tensor_ids [ num_outputs ] = { } ; \ <nl> - for ( int i { } ; i < num_outputs ; + + i ) { \ <nl> - target_tensor_ids [ i ] = ToId ( temp_outputs [ i ] ) ; \ <nl> - } \ <nl> - \ <nl> - int64 source_tensor_ids [ num_inputs ] = { } ; \ <nl> - for ( int i { } ; i < num_inputs ; + + i ) { \ <nl> - source_tensor_ids [ i ] = ToId ( inputs [ i ] ) ; \ <nl> - } \ <nl> - \ <nl> - TF_RETURN_IF_ERROR ( tape - > ComputeGradient ( \ <nl> - vspace , target_tensor_ids , source_tensor_ids , \ <nl> - source_tensors_that_are_targets , / * output_gradients = * / { } , & out_grads , \ <nl> - / * build_default_zeros_grads = * / false ) ) ; \ <nl> - \ <nl> - for ( int i { } ; i < num_outputs ; + + i ) { \ <nl> - temp_outputs [ i ] - > Unref ( ) ; \ <nl> - } \ <nl> - for ( int i { } ; i < num_grad_outputs ; + + i ) { \ <nl> - outputs [ i ] = out_grads [ i ] ; \ <nl> - } \ <nl> - delete tape ; \ <nl> - return Status : : OK ( ) ; \ <nl> - } <nl> - <nl> - # endif / / TENSORFLOW_C_EXPERIMENTAL_GRADIENTS_MODEL_FACTORY_MACRO_H_ <nl> mmm a / tensorflow / c / experimental / gradients / nn_grad_test . cc <nl> ppp b / tensorflow / c / experimental / gradients / nn_grad_test . cc <nl> limitations under the License . <nl> <nl> # include " tensorflow / c / eager / c_api_test_util . h " <nl> # include " tensorflow / c / experimental / gradients / grad_test_helper . h " <nl> - # include " tensorflow / c / experimental / gradients / model_factory_helper . h " <nl> # include " tensorflow / c / experimental / gradients / tape / tape_context . h " <nl> # include " tensorflow / core / platform / test . h " <nl> <nl> namespace { <nl> using tensorflow : : TF_StatusPtr ; <nl> using tracing : : TracingOperation ; <nl> <nl> - TF_MODEL_FACTORY ( BiasAddModel , 2 , 1 , { <nl> - TF_RETURN_IF_ERROR ( ops : : BiasAdd ( tape_ctx . get ( ) , inputs , <nl> - absl : : MakeSpan ( temp_outputs ) , " BiasAdd " ) ) ; <nl> - } ) <nl> + Status BiasAddModel ( AbstractContext * ctx , <nl> + absl : : Span < AbstractTensorHandle * const > inputs , <nl> + absl : : Span < AbstractTensorHandle * > outputs , <nl> + const GradientRegistry & registry ) { <nl> + return ops : : BiasAdd ( ctx , inputs , outputs , " BiasAdd " ) ; <nl> + } <nl> <nl> - TF_GRAD_MODEL_FACTORY ( BiasAddGradModel , 2 , 1 , 2 , { <nl> + Status BiasAddGradModel ( AbstractContext * ctx , <nl> + absl : : Span < AbstractTensorHandle * const > inputs , <nl> + absl : : Span < AbstractTensorHandle * > outputs , <nl> + const GradientRegistry & registry ) { <nl> + TapeVSpace vspace ( ctx ) ; <nl> + auto tape = new Tape ( / * persistent = * / false ) ; <nl> + tape - > Watch ( ToId ( inputs [ 0 ] ) ) ; / / Watch A . <nl> + tape - > Watch ( ToId ( inputs [ 1 ] ) ) ; / / Watch Bias . <nl> + std : : vector < AbstractTensorHandle * > temp_outputs ( 1 ) ; <nl> + AbstractContextPtr tape_ctx ( new TapeContext ( ctx , tape , registry ) ) ; <nl> TF_RETURN_IF_ERROR ( ops : : BiasAdd ( tape_ctx . get ( ) , inputs , <nl> absl : : MakeSpan ( temp_outputs ) , " BiasAddGrad " ) ) ; <nl> - } ) <nl> + std : : unordered_map < tensorflow : : int64 , TapeTensor > <nl> + source_tensors_that_are_targets ; <nl> + <nl> + std : : vector < AbstractTensorHandle * > out_grads ; <nl> + TF_RETURN_IF_ERROR ( tape - > ComputeGradient ( <nl> + vspace , / * target_tensor_ids = * / { ToId ( temp_outputs [ 0 ] ) } , <nl> + / * source_tensor_ids = * / { ToId ( inputs [ 0 ] ) , ToId ( inputs [ 1 ] ) } , <nl> + source_tensors_that_are_targets , <nl> + / * output_gradients = * / { } , & out_grads , <nl> + / * build_default_zeros_grads = * / false ) ) ; <nl> + for ( auto temp_output : temp_outputs ) { <nl> + temp_output - > Unref ( ) ; <nl> + } <nl> + outputs [ 0 ] = out_grads [ 0 ] ; <nl> + outputs [ 1 ] = out_grads [ 1 ] ; <nl> + delete tape ; <nl> + return Status : : OK ( ) ; <nl> + } <nl> <nl> Status RegisterGradients ( GradientRegistry * registry ) { <nl> TF_RETURN_IF_ERROR ( registry - > Register ( " BiasAdd " , BiasAddRegisterer ) ) ; <nl>
remove MODEL macro
tensorflow/tensorflow
fa1cfa16ec7e1f65e6b37cc09c0a508f54e1a031
2020-12-03T14:10:55Z
mmm a / LICENSE <nl> ppp b / LICENSE <nl> <nl> - GNU LESSER GENERAL PUBLIC LICENSE <nl> + GNU GENERAL PUBLIC LICENSE <nl> Version 3 , 29 June 2007 <nl> <nl> Copyright ( C ) 2007 Free Software Foundation , Inc . < http : / / fsf . org / > <nl> Everyone is permitted to copy and distribute verbatim copies <nl> of this license document , but changing it is not allowed . <nl> <nl> - <nl> - This version of the GNU Lesser General Public License incorporates <nl> - the terms and conditions of version 3 of the GNU General Public <nl> - License , supplemented by the additional permissions listed below . <nl> - <nl> - 0 . Additional Definitions . <nl> - <nl> - As used herein , " this License " refers to version 3 of the GNU Lesser <nl> - General Public License , and the " GNU GPL " refers to version 3 of the GNU <nl> - General Public License . <nl> - <nl> - " The Library " refers to a covered work governed by this License , <nl> - other than an Application or a Combined Work as defined below . <nl> - <nl> - An " Application " is any work that makes use of an interface provided <nl> - by the Library , but which is not otherwise based on the Library . <nl> - Defining a subclass of a class defined by the Library is deemed a mode <nl> - of using an interface provided by the Library . <nl> - <nl> - A " Combined Work " is a work produced by combining or linking an <nl> - Application with the Library . The particular version of the Library <nl> - with which the Combined Work was made is also called the " Linked <nl> - Version " . <nl> - <nl> - The " Minimal Corresponding Source " for a Combined Work means the <nl> - Corresponding Source for the Combined Work , excluding any source code <nl> - for portions of the Combined Work that , considered in isolation , are <nl> - based on the Application , and not on the Linked Version . <nl> - <nl> - The " Corresponding Application Code " for a Combined Work means the <nl> - object code and / or source code for the Application , including any data <nl> - and utility programs needed for reproducing the Combined Work from the <nl> - Application , but excluding the System Libraries of the Combined Work . <nl> - <nl> - 1 . Exception to Section 3 of the GNU GPL . <nl> - <nl> - You may convey a covered work under sections 3 and 4 of this License <nl> - without being bound by section 3 of the GNU GPL . <nl> - <nl> - 2 . Conveying Modified Versions . <nl> - <nl> - If you modify a copy of the Library , and , in your modifications , a <nl> - facility refers to a function or data to be supplied by an Application <nl> - that uses the facility ( other than as an argument passed when the <nl> - facility is invoked ) , then you may convey a copy of the modified <nl> - version : <nl> - <nl> - a ) under this License , provided that you make a good faith effort to <nl> - ensure that , in the event an Application does not supply the <nl> - function or data , the facility still operates , and performs <nl> - whatever part of its purpose remains meaningful , or <nl> - <nl> - b ) under the GNU GPL , with none of the additional permissions of <nl> - this License applicable to that copy . <nl> - <nl> - 3 . Object Code Incorporating Material from Library Header Files . <nl> - <nl> - The object code form of an Application may incorporate material from <nl> - a header file that is part of the Library . You may convey such object <nl> - code under terms of your choice , provided that , if the incorporated <nl> - material is not limited to numerical parameters , data structure <nl> - layouts and accessors , or small macros , inline functions and templates <nl> - ( ten or fewer lines in length ) , you do both of the following : <nl> - <nl> - a ) Give prominent notice with each copy of the object code that the <nl> - Library is used in it and that the Library and its use are <nl> - covered by this License . <nl> - <nl> - b ) Accompany the object code with a copy of the GNU GPL and this license <nl> - document . <nl> - <nl> - 4 . Combined Works . <nl> - <nl> - You may convey a Combined Work under terms of your choice that , <nl> - taken together , effectively do not restrict modification of the <nl> - portions of the Library contained in the Combined Work and reverse <nl> - engineering for debugging such modifications , if you also do each of <nl> - the following : <nl> - <nl> - a ) Give prominent notice with each copy of the Combined Work that <nl> - the Library is used in it and that the Library and its use are <nl> - covered by this License . <nl> - <nl> - b ) Accompany the Combined Work with a copy of the GNU GPL and this license <nl> - document . <nl> - <nl> - c ) For a Combined Work that displays copyright notices during <nl> - execution , include the copyright notice for the Library among <nl> - these notices , as well as a reference directing the user to the <nl> - copies of the GNU GPL and this license document . <nl> - <nl> - d ) Do one of the following : <nl> - <nl> - 0 ) Convey the Minimal Corresponding Source under the terms of this <nl> - License , and the Corresponding Application Code in a form <nl> - suitable for , and under terms that permit , the user to <nl> - recombine or relink the Application with a modified version of <nl> - the Linked Version to produce a modified Combined Work , in the <nl> - manner specified by section 6 of the GNU GPL for conveying <nl> - Corresponding Source . <nl> - <nl> - 1 ) Use a suitable shared library mechanism for linking with the <nl> - Library . A suitable mechanism is one that ( a ) uses at run time <nl> - a copy of the Library already present on the user ' s computer <nl> - system , and ( b ) will operate properly with a modified version <nl> - of the Library that is interface - compatible with the Linked <nl> - Version . <nl> - <nl> - e ) Provide Installation Information , but only if you would otherwise <nl> - be required to provide such information under section 6 of the <nl> - GNU GPL , and only to the extent that such information is <nl> - necessary to install and execute a modified version of the <nl> - Combined Work produced by recombining or relinking the <nl> - Application with a modified version of the Linked Version . ( If <nl> - you use option 4d0 , the Installation Information must accompany <nl> - the Minimal Corresponding Source and Corresponding Application <nl> - Code . If you use option 4d1 , you must provide the Installation <nl> - Information in the manner specified by section 6 of the GNU GPL <nl> - for conveying Corresponding Source . ) <nl> - <nl> - 5 . Combined Libraries . <nl> - <nl> - You may place library facilities that are a work based on the <nl> - Library side by side in a single library together with other library <nl> - facilities that are not Applications and are not covered by this <nl> - License , and convey such a combined library under terms of your <nl> - choice , if you do both of the following : <nl> - <nl> - a ) Accompany the combined library with a copy of the same work based <nl> - on the Library , uncombined with any other library facilities , <nl> - conveyed under the terms of this License . <nl> - <nl> - b ) Give prominent notice with the combined library that part of it <nl> - is a work based on the Library , and explaining where to find the <nl> - accompanying uncombined form of the same work . <nl> - <nl> - 6 . Revised Versions of the GNU Lesser General Public License . <nl> - <nl> - The Free Software Foundation may publish revised and / or new versions <nl> - of the GNU Lesser General Public License from time to time . Such new <nl> - versions will be similar in spirit to the present version , but may <nl> - differ in detail to address new problems or concerns . <nl> - <nl> - Each version is given a distinguishing version number . If the <nl> - Library as you received it specifies that a certain numbered version <nl> - of the GNU Lesser General Public License " or any later version " <nl> - applies to it , you have the option of following the terms and <nl> - conditions either of that published version or of any later version <nl> - published by the Free Software Foundation . If the Library as you <nl> - received it does not specify a version number of the GNU Lesser <nl> - General Public License , you may choose any version of the GNU Lesser <nl> - General Public License ever published by the Free Software Foundation . <nl> - <nl> - If the Library as you received it specifies that a proxy can decide <nl> - whether future versions of the GNU Lesser General Public License shall <nl> - apply , that proxy ' s public statement of acceptance of any version is <nl> - permanent authorization for you to choose that version for the <nl> - Library . <nl> + Preamble <nl> + <nl> + The GNU General Public License is a free , copyleft license for <nl> + software and other kinds of works . <nl> + <nl> + The licenses for most software and other practical works are designed <nl> + to take away your freedom to share and change the works . By contrast , <nl> + the GNU General Public License is intended to guarantee your freedom to <nl> + share and change all versions of a program - - to make sure it remains free <nl> + software for all its users . We , the Free Software Foundation , use the <nl> + GNU General Public License for most of our software ; it applies also to <nl> + any other work released this way by its authors . You can apply it to <nl> + your programs , too . <nl> + <nl> + When we speak of free software , we are referring to freedom , not <nl> + price . Our General Public Licenses are designed to make sure that you <nl> + have the freedom to distribute copies of free software ( and charge for <nl> + them if you wish ) , that you receive source code or can get it if you <nl> + want it , that you can change the software or use pieces of it in new <nl> + free programs , and that you know you can do these things . <nl> + <nl> + To protect your rights , we need to prevent others from denying you <nl> + these rights or asking you to surrender the rights . Therefore , you have <nl> + certain responsibilities if you distribute copies of the software , or if <nl> + you modify it : responsibilities to respect the freedom of others . <nl> + <nl> + For example , if you distribute copies of such a program , whether <nl> + gratis or for a fee , you must pass on to the recipients the same <nl> + freedoms that you received . You must make sure that they , too , receive <nl> + or can get the source code . And you must show them these terms so they <nl> + know their rights . <nl> + <nl> + Developers that use the GNU GPL protect your rights with two steps : <nl> + ( 1 ) assert copyright on the software , and ( 2 ) offer you this License <nl> + giving you legal permission to copy , distribute and / or modify it . <nl> + <nl> + For the developers ' and authors ' protection , the GPL clearly explains <nl> + that there is no warranty for this free software . For both users ' and <nl> + authors ' sake , the GPL requires that modified versions be marked as <nl> + changed , so that their problems will not be attributed erroneously to <nl> + authors of previous versions . <nl> + <nl> + Some devices are designed to deny users access to install or run <nl> + modified versions of the software inside them , although the manufacturer <nl> + can do so . This is fundamentally incompatible with the aim of <nl> + protecting users ' freedom to change the software . The systematic <nl> + pattern of such abuse occurs in the area of products for individuals to <nl> + use , which is precisely where it is most unacceptable . Therefore , we <nl> + have designed this version of the GPL to prohibit the practice for those <nl> + products . If such problems arise substantially in other domains , we <nl> + stand ready to extend this provision to those domains in future versions <nl> + of the GPL , as needed to protect the freedom of users . <nl> + <nl> + Finally , every program is threatened constantly by software patents . <nl> + States should not allow patents to restrict development and use of <nl> + software on general - purpose computers , but in those that do , we wish to <nl> + avoid the special danger that patents applied to a free program could <nl> + make it effectively proprietary . To prevent this , the GPL assures that <nl> + patents cannot be used to render the program non - free . <nl> + <nl> + The precise terms and conditions for copying , distribution and <nl> + modification follow . <nl> + <nl> + TERMS AND CONDITIONS <nl> + <nl> + 0 . Definitions . <nl> + <nl> + " This License " refers to version 3 of the GNU General Public License . <nl> + <nl> + " Copyright " also means copyright - like laws that apply to other kinds of <nl> + works , such as semiconductor masks . <nl> + <nl> + " The Program " refers to any copyrightable work licensed under this <nl> + License . Each licensee is addressed as " you " . " Licensees " and <nl> + " recipients " may be individuals or organizations . <nl> + <nl> + To " modify " a work means to copy from or adapt all or part of the work <nl> + in a fashion requiring copyright permission , other than the making of an <nl> + exact copy . The resulting work is called a " modified version " of the <nl> + earlier work or a work " based on " the earlier work . <nl> + <nl> + A " covered work " means either the unmodified Program or a work based <nl> + on the Program . <nl> + <nl> + To " propagate " a work means to do anything with it that , without <nl> + permission , would make you directly or secondarily liable for <nl> + infringement under applicable copyright law , except executing it on a <nl> + computer or modifying a private copy . Propagation includes copying , <nl> + distribution ( with or without modification ) , making available to the <nl> + public , and in some countries other activities as well . <nl> + <nl> + To " convey " a work means any kind of propagation that enables other <nl> + parties to make or receive copies . Mere interaction with a user through <nl> + a computer network , with no transfer of a copy , is not conveying . <nl> + <nl> + An interactive user interface displays " Appropriate Legal Notices " <nl> + to the extent that it includes a convenient and prominently visible <nl> + feature that ( 1 ) displays an appropriate copyright notice , and ( 2 ) <nl> + tells the user that there is no warranty for the work ( except to the <nl> + extent that warranties are provided ) , that licensees may convey the <nl> + work under this License , and how to view a copy of this License . If <nl> + the interface presents a list of user commands or options , such as a <nl> + menu , a prominent item in the list meets this criterion . <nl> + <nl> + 1 . Source Code . <nl> + <nl> + The " source code " for a work means the preferred form of the work <nl> + for making modifications to it . " Object code " means any non - source <nl> + form of a work . <nl> + <nl> + A " Standard Interface " means an interface that either is an official <nl> + standard defined by a recognized standards body , or , in the case of <nl> + interfaces specified for a particular programming language , one that <nl> + is widely used among developers working in that language . <nl> + <nl> + The " System Libraries " of an executable work include anything , other <nl> + than the work as a whole , that ( a ) is included in the normal form of <nl> + packaging a Major Component , but which is not part of that Major <nl> + Component , and ( b ) serves only to enable use of the work with that <nl> + Major Component , or to implement a Standard Interface for which an <nl> + implementation is available to the public in source code form . A <nl> + " Major Component " , in this context , means a major essential component <nl> + ( kernel , window system , and so on ) of the specific operating system <nl> + ( if any ) on which the executable work runs , or a compiler used to <nl> + produce the work , or an object code interpreter used to run it . <nl> + <nl> + The " Corresponding Source " for a work in object code form means all <nl> + the source code needed to generate , install , and ( for an executable <nl> + work ) run the object code and to modify the work , including scripts to <nl> + control those activities . However , it does not include the work ' s <nl> + System Libraries , or general - purpose tools or generally available free <nl> + programs which are used unmodified in performing those activities but <nl> + which are not part of the work . For example , Corresponding Source <nl> + includes interface definition files associated with source files for <nl> + the work , and the source code for shared libraries and dynamically <nl> + linked subprograms that the work is specifically designed to require , <nl> + such as by intimate data communication or control flow between those <nl> + subprograms and other parts of the work . <nl> + <nl> + The Corresponding Source need not include anything that users <nl> + can regenerate automatically from other parts of the Corresponding <nl> + Source . <nl> + <nl> + The Corresponding Source for a work in source code form is that <nl> + same work . <nl> + <nl> + 2 . Basic Permissions . <nl> + <nl> + All rights granted under this License are granted for the term of <nl> + copyright on the Program , and are irrevocable provided the stated <nl> + conditions are met . This License explicitly affirms your unlimited <nl> + permission to run the unmodified Program . The output from running a <nl> + covered work is covered by this License only if the output , given its <nl> + content , constitutes a covered work . This License acknowledges your <nl> + rights of fair use or other equivalent , as provided by copyright law . <nl> + <nl> + You may make , run and propagate covered works that you do not <nl> + convey , without conditions so long as your license otherwise remains <nl> + in force . You may convey covered works to others for the sole purpose <nl> + of having them make modifications exclusively for you , or provide you <nl> + with facilities for running those works , provided that you comply with <nl> + the terms of this License in conveying all material for which you do <nl> + not control copyright . Those thus making or running the covered works <nl> + for you must do so exclusively on your behalf , under your direction <nl> + and control , on terms that prohibit them from making any copies of <nl> + your copyrighted material outside their relationship with you . <nl> + <nl> + Conveying under any other circumstances is permitted solely under <nl> + the conditions stated below . Sublicensing is not allowed ; section 10 <nl> + makes it unnecessary . <nl> + <nl> + 3 . Protecting Users ' Legal Rights From Anti - Circumvention Law . <nl> + <nl> + No covered work shall be deemed part of an effective technological <nl> + measure under any applicable law fulfilling obligations under article <nl> + 11 of the WIPO copyright treaty adopted on 20 December 1996 , or <nl> + similar laws prohibiting or restricting circumvention of such <nl> + measures . <nl> + <nl> + When you convey a covered work , you waive any legal power to forbid <nl> + circumvention of technological measures to the extent such circumvention <nl> + is effected by exercising rights under this License with respect to <nl> + the covered work , and you disclaim any intention to limit operation or <nl> + modification of the work as a means of enforcing , against the work ' s <nl> + users , your or third parties ' legal rights to forbid circumvention of <nl> + technological measures . <nl> + <nl> + 4 . Conveying Verbatim Copies . <nl> + <nl> + You may convey verbatim copies of the Program ' s source code as you <nl> + receive it , in any medium , provided that you conspicuously and <nl> + appropriately publish on each copy an appropriate copyright notice ; <nl> + keep intact all notices stating that this License and any <nl> + non - permissive terms added in accord with section 7 apply to the code ; <nl> + keep intact all notices of the absence of any warranty ; and give all <nl> + recipients a copy of this License along with the Program . <nl> + <nl> + You may charge any price or no price for each copy that you convey , <nl> + and you may offer support or warranty protection for a fee . <nl> + <nl> + 5 . Conveying Modified Source Versions . <nl> + <nl> + You may convey a work based on the Program , or the modifications to <nl> + produce it from the Program , in the form of source code under the <nl> + terms of section 4 , provided that you also meet all of these conditions : <nl> + <nl> + a ) The work must carry prominent notices stating that you modified <nl> + it , and giving a relevant date . <nl> + <nl> + b ) The work must carry prominent notices stating that it is <nl> + released under this License and any conditions added under section <nl> + 7 . This requirement modifies the requirement in section 4 to <nl> + " keep intact all notices " . <nl> + <nl> + c ) You must license the entire work , as a whole , under this <nl> + License to anyone who comes into possession of a copy . This <nl> + License will therefore apply , along with any applicable section 7 <nl> + additional terms , to the whole of the work , and all its parts , <nl> + regardless of how they are packaged . This License gives no <nl> + permission to license the work in any other way , but it does not <nl> + invalidate such permission if you have separately received it . <nl> + <nl> + d ) If the work has interactive user interfaces , each must display <nl> + Appropriate Legal Notices ; however , if the Program has interactive <nl> + interfaces that do not display Appropriate Legal Notices , your <nl> + work need not make them do so . <nl> + <nl> + A compilation of a covered work with other separate and independent <nl> + works , which are not by their nature extensions of the covered work , <nl> + and which are not combined with it such as to form a larger program , <nl> + in or on a volume of a storage or distribution medium , is called an <nl> + " aggregate " if the compilation and its resulting copyright are not <nl> + used to limit the access or legal rights of the compilation ' s users <nl> + beyond what the individual works permit . Inclusion of a covered work <nl> + in an aggregate does not cause this License to apply to the other <nl> + parts of the aggregate . <nl> + <nl> + 6 . Conveying Non - Source Forms . <nl> + <nl> + You may convey a covered work in object code form under the terms <nl> + of sections 4 and 5 , provided that you also convey the <nl> + machine - readable Corresponding Source under the terms of this License , <nl> + in one of these ways : <nl> + <nl> + a ) Convey the object code in , or embodied in , a physical product <nl> + ( including a physical distribution medium ) , accompanied by the <nl> + Corresponding Source fixed on a durable physical medium <nl> + customarily used for software interchange . <nl> + <nl> + b ) Convey the object code in , or embodied in , a physical product <nl> + ( including a physical distribution medium ) , accompanied by a <nl> + written offer , valid for at least three years and valid for as <nl> + long as you offer spare parts or customer support for that product <nl> + model , to give anyone who possesses the object code either ( 1 ) a <nl> + copy of the Corresponding Source for all the software in the <nl> + product that is covered by this License , on a durable physical <nl> + medium customarily used for software interchange , for a price no <nl> + more than your reasonable cost of physically performing this <nl> + conveying of source , or ( 2 ) access to copy the <nl> + Corresponding Source from a network server at no charge . <nl> + <nl> + c ) Convey individual copies of the object code with a copy of the <nl> + written offer to provide the Corresponding Source . This <nl> + alternative is allowed only occasionally and noncommercially , and <nl> + only if you received the object code with such an offer , in accord <nl> + with subsection 6b . <nl> + <nl> + d ) Convey the object code by offering access from a designated <nl> + place ( gratis or for a charge ) , and offer equivalent access to the <nl> + Corresponding Source in the same way through the same place at no <nl> + further charge . You need not require recipients to copy the <nl> + Corresponding Source along with the object code . If the place to <nl> + copy the object code is a network server , the Corresponding Source <nl> + may be on a different server ( operated by you or a third party ) <nl> + that supports equivalent copying facilities , provided you maintain <nl> + clear directions next to the object code saying where to find the <nl> + Corresponding Source . Regardless of what server hosts the <nl> + Corresponding Source , you remain obligated to ensure that it is <nl> + available for as long as needed to satisfy these requirements . <nl> + <nl> + e ) Convey the object code using peer - to - peer transmission , provided <nl> + you inform other peers where the object code and Corresponding <nl> + Source of the work are being offered to the general public at no <nl> + charge under subsection 6d . <nl> + <nl> + A separable portion of the object code , whose source code is excluded <nl> + from the Corresponding Source as a System Library , need not be <nl> + included in conveying the object code work . <nl> + <nl> + A " User Product " is either ( 1 ) a " consumer product " , which means any <nl> + tangible personal property which is normally used for personal , family , <nl> + or household purposes , or ( 2 ) anything designed or sold for incorporation <nl> + into a dwelling . In determining whether a product is a consumer product , <nl> + doubtful cases shall be resolved in favor of coverage . For a particular <nl> + product received by a particular user , " normally used " refers to a <nl> + typical or common use of that class of product , regardless of the status <nl> + of the particular user or of the way in which the particular user <nl> + actually uses , or expects or is expected to use , the product . A product <nl> + is a consumer product regardless of whether the product has substantial <nl> + commercial , industrial or non - consumer uses , unless such uses represent <nl> + the only significant mode of use of the product . <nl> + <nl> + " Installation Information " for a User Product means any methods , <nl> + procedures , authorization keys , or other information required to install <nl> + and execute modified versions of a covered work in that User Product from <nl> + a modified version of its Corresponding Source . The information must <nl> + suffice to ensure that the continued functioning of the modified object <nl> + code is in no case prevented or interfered with solely because <nl> + modification has been made . <nl> + <nl> + If you convey an object code work under this section in , or with , or <nl> + specifically for use in , a User Product , and the conveying occurs as <nl> + part of a transaction in which the right of possession and use of the <nl> + User Product is transferred to the recipient in perpetuity or for a <nl> + fixed term ( regardless of how the transaction is characterized ) , the <nl> + Corresponding Source conveyed under this section must be accompanied <nl> + by the Installation Information . But this requirement does not apply <nl> + if neither you nor any third party retains the ability to install <nl> + modified object code on the User Product ( for example , the work has <nl> + been installed in ROM ) . <nl> + <nl> + The requirement to provide Installation Information does not include a <nl> + requirement to continue to provide support service , warranty , or updates <nl> + for a work that has been modified or installed by the recipient , or for <nl> + the User Product in which it has been modified or installed . Access to a <nl> + network may be denied when the modification itself materially and <nl> + adversely affects the operation of the network or violates the rules and <nl> + protocols for communication across the network . <nl> + <nl> + Corresponding Source conveyed , and Installation Information provided , <nl> + in accord with this section must be in a format that is publicly <nl> + documented ( and with an implementation available to the public in <nl> + source code form ) , and must require no special password or key for <nl> + unpacking , reading or copying . <nl> + <nl> + 7 . Additional Terms . <nl> + <nl> + " Additional permissions " are terms that supplement the terms of this <nl> + License by making exceptions from one or more of its conditions . <nl> + Additional permissions that are applicable to the entire Program shall <nl> + be treated as though they were included in this License , to the extent <nl> + that they are valid under applicable law . If additional permissions <nl> + apply only to part of the Program , that part may be used separately <nl> + under those permissions , but the entire Program remains governed by <nl> + this License without regard to the additional permissions . <nl> + <nl> + When you convey a copy of a covered work , you may at your option <nl> + remove any additional permissions from that copy , or from any part of <nl> + it . ( Additional permissions may be written to require their own <nl> + removal in certain cases when you modify the work . ) You may place <nl> + additional permissions on material , added by you to a covered work , <nl> + for which you have or can give appropriate copyright permission . <nl> + <nl> + Notwithstanding any other provision of this License , for material you <nl> + add to a covered work , you may ( if authorized by the copyright holders of <nl> + that material ) supplement the terms of this License with terms : <nl> + <nl> + a ) Disclaiming warranty or limiting liability differently from the <nl> + terms of sections 15 and 16 of this License ; or <nl> + <nl> + b ) Requiring preservation of specified reasonable legal notices or <nl> + author attributions in that material or in the Appropriate Legal <nl> + Notices displayed by works containing it ; or <nl> + <nl> + c ) Prohibiting misrepresentation of the origin of that material , or <nl> + requiring that modified versions of such material be marked in <nl> + reasonable ways as different from the original version ; or <nl> + <nl> + d ) Limiting the use for publicity purposes of names of licensors or <nl> + authors of the material ; or <nl> + <nl> + e ) Declining to grant rights under trademark law for use of some <nl> + trade names , trademarks , or service marks ; or <nl> + <nl> + f ) Requiring indemnification of licensors and authors of that <nl> + material by anyone who conveys the material ( or modified versions of <nl> + it ) with contractual assumptions of liability to the recipient , for <nl> + any liability that these contractual assumptions directly impose on <nl> + those licensors and authors . <nl> + <nl> + All other non - permissive additional terms are considered " further <nl> + restrictions " within the meaning of section 10 . If the Program as you <nl> + received it , or any part of it , contains a notice stating that it is <nl> + governed by this License along with a term that is a further <nl> + restriction , you may remove that term . If a license document contains <nl> + a further restriction but permits relicensing or conveying under this <nl> + License , you may add to a covered work material governed by the terms <nl> + of that license document , provided that the further restriction does <nl> + not survive such relicensing or conveying . <nl> + <nl> + If you add terms to a covered work in accord with this section , you <nl> + must place , in the relevant source files , a statement of the <nl> + additional terms that apply to those files , or a notice indicating <nl> + where to find the applicable terms . <nl> + <nl> + Additional terms , permissive or non - permissive , may be stated in the <nl> + form of a separately written license , or stated as exceptions ; <nl> + the above requirements apply either way . <nl> + <nl> + 8 . Termination . <nl> + <nl> + You may not propagate or modify a covered work except as expressly <nl> + provided under this License . Any attempt otherwise to propagate or <nl> + modify it is void , and will automatically terminate your rights under <nl> + this License ( including any patent licenses granted under the third <nl> + paragraph of section 11 ) . <nl> + <nl> + However , if you cease all violation of this License , then your <nl> + license from a particular copyright holder is reinstated ( a ) <nl> + provisionally , unless and until the copyright holder explicitly and <nl> + finally terminates your license , and ( b ) permanently , if the copyright <nl> + holder fails to notify you of the violation by some reasonable means <nl> + prior to 60 days after the cessation . <nl> + <nl> + Moreover , your license from a particular copyright holder is <nl> + reinstated permanently if the copyright holder notifies you of the <nl> + violation by some reasonable means , this is the first time you have <nl> + received notice of violation of this License ( for any work ) from that <nl> + copyright holder , and you cure the violation prior to 30 days after <nl> + your receipt of the notice . <nl> + <nl> + Termination of your rights under this section does not terminate the <nl> + licenses of parties who have received copies or rights from you under <nl> + this License . If your rights have been terminated and not permanently <nl> + reinstated , you do not qualify to receive new licenses for the same <nl> + material under section 10 . <nl> + <nl> + 9 . Acceptance Not Required for Having Copies . <nl> + <nl> + You are not required to accept this License in order to receive or <nl> + run a copy of the Program . Ancillary propagation of a covered work <nl> + occurring solely as a consequence of using peer - to - peer transmission <nl> + to receive a copy likewise does not require acceptance . However , <nl> + nothing other than this License grants you permission to propagate or <nl> + modify any covered work . These actions infringe copyright if you do <nl> + not accept this License . Therefore , by modifying or propagating a <nl> + covered work , you indicate your acceptance of this License to do so . <nl> + <nl> + 10 . Automatic Licensing of Downstream Recipients . <nl> + <nl> + Each time you convey a covered work , the recipient automatically <nl> + receives a license from the original licensors , to run , modify and <nl> + propagate that work , subject to this License . You are not responsible <nl> + for enforcing compliance by third parties with this License . <nl> + <nl> + An " entity transaction " is a transaction transferring control of an <nl> + organization , or substantially all assets of one , or subdividing an <nl> + organization , or merging organizations . If propagation of a covered <nl> + work results from an entity transaction , each party to that <nl> + transaction who receives a copy of the work also receives whatever <nl> + licenses to the work the party ' s predecessor in interest had or could <nl> + give under the previous paragraph , plus a right to possession of the <nl> + Corresponding Source of the work from the predecessor in interest , if <nl> + the predecessor has it or can get it with reasonable efforts . <nl> + <nl> + You may not impose any further restrictions on the exercise of the <nl> + rights granted or affirmed under this License . For example , you may <nl> + not impose a license fee , royalty , or other charge for exercise of <nl> + rights granted under this License , and you may not initiate litigation <nl> + ( including a cross - claim or counterclaim in a lawsuit ) alleging that <nl> + any patent claim is infringed by making , using , selling , offering for <nl> + sale , or importing the Program or any portion of it . <nl> + <nl> + 11 . Patents . <nl> + <nl> + A " contributor " is a copyright holder who authorizes use under this <nl> + License of the Program or a work on which the Program is based . The <nl> + work thus licensed is called the contributor ' s " contributor version " . <nl> + <nl> + A contributor ' s " essential patent claims " are all patent claims <nl> + owned or controlled by the contributor , whether already acquired or <nl> + hereafter acquired , that would be infringed by some manner , permitted <nl> + by this License , of making , using , or selling its contributor version , <nl> + but do not include claims that would be infringed only as a <nl> + consequence of further modification of the contributor version . For <nl> + purposes of this definition , " control " includes the right to grant <nl> + patent sublicenses in a manner consistent with the requirements of <nl> + this License . <nl> + <nl> + Each contributor grants you a non - exclusive , worldwide , royalty - free <nl> + patent license under the contributor ' s essential patent claims , to <nl> + make , use , sell , offer for sale , import and otherwise run , modify and <nl> + propagate the contents of its contributor version . <nl> + <nl> + In the following three paragraphs , a " patent license " is any express <nl> + agreement or commitment , however denominated , not to enforce a patent <nl> + ( such as an express permission to practice a patent or covenant not to <nl> + sue for patent infringement ) . To " grant " such a patent license to a <nl> + party means to make such an agreement or commitment not to enforce a <nl> + patent against the party . <nl> + <nl> + If you convey a covered work , knowingly relying on a patent license , <nl> + and the Corresponding Source of the work is not available for anyone <nl> + to copy , free of charge and under the terms of this License , through a <nl> + publicly available network server or other readily accessible means , <nl> + then you must either ( 1 ) cause the Corresponding Source to be so <nl> + available , or ( 2 ) arrange to deprive yourself of the benefit of the <nl> + patent license for this particular work , or ( 3 ) arrange , in a manner <nl> + consistent with the requirements of this License , to extend the patent <nl> + license to downstream recipients . " Knowingly relying " means you have <nl> + actual knowledge that , but for the patent license , your conveying the <nl> + covered work in a country , or your recipient ' s use of the covered work <nl> + in a country , would infringe one or more identifiable patents in that <nl> + country that you have reason to believe are valid . <nl> + <nl> + If , pursuant to or in connection with a single transaction or <nl> + arrangement , you convey , or propagate by procuring conveyance of , a <nl> + covered work , and grant a patent license to some of the parties <nl> + receiving the covered work authorizing them to use , propagate , modify <nl> + or convey a specific copy of the covered work , then the patent license <nl> + you grant is automatically extended to all recipients of the covered <nl> + work and works based on it . <nl> + <nl> + A patent license is " discriminatory " if it does not include within <nl> + the scope of its coverage , prohibits the exercise of , or is <nl> + conditioned on the non - exercise of one or more of the rights that are <nl> + specifically granted under this License . You may not convey a covered <nl> + work if you are a party to an arrangement with a third party that is <nl> + in the business of distributing software , under which you make payment <nl> + to the third party based on the extent of your activity of conveying <nl> + the work , and under which the third party grants , to any of the <nl> + parties who would receive the covered work from you , a discriminatory <nl> + patent license ( a ) in connection with copies of the covered work <nl> + conveyed by you ( or copies made from those copies ) , or ( b ) primarily <nl> + for and in connection with specific products or compilations that <nl> + contain the covered work , unless you entered into that arrangement , <nl> + or that patent license was granted , prior to 28 March 2007 . <nl> + <nl> + Nothing in this License shall be construed as excluding or limiting <nl> + any implied license or other defenses to infringement that may <nl> + otherwise be available to you under applicable patent law . <nl> + <nl> + 12 . No Surrender of Others ' Freedom . <nl> + <nl> + If conditions are imposed on you ( whether by court order , agreement or <nl> + otherwise ) that contradict the conditions of this License , they do not <nl> + excuse you from the conditions of this License . If you cannot convey a <nl> + covered work so as to satisfy simultaneously your obligations under this <nl> + License and any other pertinent obligations , then as a consequence you may <nl> + not convey it at all . For example , if you agree to terms that obligate you <nl> + to collect a royalty for further conveying from those to whom you convey <nl> + the Program , the only way you could satisfy both those terms and this <nl> + License would be to refrain entirely from conveying the Program . <nl> + <nl> + 13 . Use with the GNU Affero General Public License . <nl> + <nl> + Notwithstanding any other provision of this License , you have <nl> + permission to link or combine any covered work with a work licensed <nl> + under version 3 of the GNU Affero General Public License into a single <nl> + combined work , and to convey the resulting work . The terms of this <nl> + License will continue to apply to the part which is the covered work , <nl> + but the special requirements of the GNU Affero General Public License , <nl> + section 13 , concerning interaction through a network will apply to the <nl> + combination as such . <nl> + <nl> + 14 . Revised Versions of this License . <nl> + <nl> + The Free Software Foundation may publish revised and / or new versions of <nl> + the GNU General Public License from time to time . Such new versions will <nl> + be similar in spirit to the present version , but may differ in detail to <nl> + address new problems or concerns . <nl> + <nl> + Each version is given a distinguishing version number . If the <nl> + Program specifies that a certain numbered version of the GNU General <nl> + Public License " or any later version " applies to it , you have the <nl> + option of following the terms and conditions either of that numbered <nl> + version or of any later version published by the Free Software <nl> + Foundation . If the Program does not specify a version number of the <nl> + GNU General Public License , you may choose any version ever published <nl> + by the Free Software Foundation . <nl> + <nl> + If the Program specifies that a proxy can decide which future <nl> + versions of the GNU General Public License can be used , that proxy ' s <nl> + public statement of acceptance of a version permanently authorizes you <nl> + to choose that version for the Program . <nl> + <nl> + Later license versions may give you additional or different <nl> + permissions . However , no additional obligations are imposed on any <nl> + author or copyright holder as a result of your choosing to follow a <nl> + later version . <nl> + <nl> + 15 . Disclaimer of Warranty . <nl> + <nl> + THERE IS NO WARRANTY FOR THE PROGRAM , TO THE EXTENT PERMITTED BY <nl> + APPLICABLE LAW . EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT <nl> + HOLDERS AND / OR OTHER PARTIES PROVIDE THE PROGRAM " AS IS " WITHOUT WARRANTY <nl> + OF ANY KIND , EITHER EXPRESSED OR IMPLIED , INCLUDING , BUT NOT LIMITED TO , <nl> + THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR <nl> + PURPOSE . THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM <nl> + IS WITH YOU . SHOULD THE PROGRAM PROVE DEFECTIVE , YOU ASSUME THE COST OF <nl> + ALL NECESSARY SERVICING , REPAIR OR CORRECTION . <nl> + <nl> + 16 . Limitation of Liability . <nl> + <nl> + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING <nl> + WILL ANY COPYRIGHT HOLDER , OR ANY OTHER PARTY WHO MODIFIES AND / OR CONVEYS <nl> + THE PROGRAM AS PERMITTED ABOVE , BE LIABLE TO YOU FOR DAMAGES , INCLUDING ANY <nl> + GENERAL , SPECIAL , INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE <nl> + USE OR INABILITY TO USE THE PROGRAM ( INCLUDING BUT NOT LIMITED TO LOSS OF <nl> + DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD <nl> + PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS ) , <nl> + EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF <nl> + SUCH DAMAGES . <nl> + <nl> + 17 . Interpretation of Sections 15 and 16 . <nl> + <nl> + If the disclaimer of warranty and limitation of liability provided <nl> + above cannot be given local legal effect according to their terms , <nl> + reviewing courts shall apply local law that most closely approximates <nl> + an absolute waiver of all civil liability in connection with the <nl> + Program , unless a warranty or assumption of liability accompanies a <nl> + copy of the Program in return for a fee . <nl> + <nl> + END OF TERMS AND CONDITIONS <nl> + <nl> + How to Apply These Terms to Your New Programs <nl> + <nl> + If you develop a new program , and you want it to be of the greatest <nl> + possible use to the public , the best way to achieve this is to make it <nl> + free software which everyone can redistribute and change under these terms . <nl> + <nl> + To do so , attach the following notices to the program . It is safest <nl> + to attach them to the start of each source file to most effectively <nl> + state the exclusion of warranty ; and each file should have at least <nl> + the " copyright " line and a pointer to where the full notice is found . <nl> + <nl> + < one line to give the program ' s name and a brief idea of what it does . > <nl> + Copyright ( C ) < year > < name of author > <nl> + <nl> + This program is free software : you can redistribute it and / or modify <nl> + it under the terms of the GNU General Public License as published by <nl> + the Free Software Foundation , either version 3 of the License , or <nl> + ( at your option ) any later version . <nl> + <nl> + This program is distributed in the hope that it will be useful , <nl> + but WITHOUT ANY WARRANTY ; without even the implied warranty of <nl> + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the <nl> + GNU General Public License for more details . <nl> + <nl> + You should have received a copy of the GNU General Public License <nl> + along with this program . If not , see < http : / / www . gnu . org / licenses / > . <nl> + <nl> + Also add information on how to contact you by electronic and paper mail . <nl> + <nl> + If the program does terminal interaction , make it output a short <nl> + notice like this when it starts in an interactive mode : <nl> + <nl> + < program > Copyright ( C ) < year > < name of author > <nl> + This program comes with ABSOLUTELY NO WARRANTY ; for details type ` show w ' . <nl> + This is free software , and you are welcome to redistribute it <nl> + under certain conditions ; type ` show c ' for details . <nl> + <nl> + The hypothetical commands ` show w ' and ` show c ' should show the appropriate <nl> + parts of the General Public License . Of course , your program ' s commands <nl> + might be different ; for a GUI interface , you would use an " about box " . <nl> + <nl> + You should also get your employer ( if you work as a programmer ) or school , <nl> + if any , to sign a " copyright disclaimer " for the program , if necessary . <nl> + For more information on this , and how to apply and follow the GNU GPL , see <nl> + < http : / / www . gnu . org / licenses / > . <nl> + <nl> + The GNU General Public License does not permit incorporating your program <nl> + into proprietary programs . If your program is a subroutine library , you <nl> + may consider it more useful to permit linking proprietary applications with <nl> + the library . If this is what you want to do , use the GNU Lesser General <nl> + Public License instead of this License . But first , please read <nl> + < http : / / www . gnu . org / philosophy / why - not - lgpl . html > . <nl> mmm a / README <nl> ppp b / README <nl> x64_dbg is an open - source x32 / x64 debugger for windows . <nl> - Memory breakpoints sometimes fail ( TitanEngine bug ) <nl> <nl> > License : <nl> - x64_dbg is licensed under LGLPv3 , which means you can freely distribute <nl> + x64_dbg is licensed under GLPv3 , which means you can freely distribute <nl> and modify the source of x64_dbg , as long as you share your changes <nl> - with us . Plugins do not have to be open source or free to distribute . <nl> + with us . The only exception of is that plugins you write do not have <nl> + to comply with the original GLPv3 license ( they do not have to be open - <nl> + source ) , except if you directly use code from inside x64_dbg . In that <nl> + case you would still have to share the changes with us . <nl> <nl> > Credits : <nl> - Debugger core by TitanEngine Community Edition <nl> mmm a / help / template / fixedtop . htm <nl> ppp b / help / template / fixedtop . htm <nl> <nl> < p > < FONT face = Courier > ( $ content $ ) < / FONT > < / p > <nl> < hr > <nl> < font face = " Courier " color = " # 808080 " size = " 1 " > < A <nl> - href = " http : / / x64dbg . com " target = _blank > x64_dbg < / A > , & nbsp ; LGPLv3 < / font > & nbsp ; < / div > <nl> + href = " http : / / x64dbg . com " target = _blank > x64_dbg < / A > , & nbsp ; GPLv3 < / font > & nbsp ; < / div > <nl> <nl> < / body > <nl> <nl>
Revert " PROJECT : changed GPL to LGPL "
x64dbg/x64dbg
cef97919f87d999c4b5c14be0f61edc5000639f5
2014-07-01T18:55:40Z
mmm a / src / json . hpp <nl> ppp b / src / json . hpp <nl> class basic_json <nl> <nl> private : <nl> static constexpr size_t s_capacity = 30 ; <nl> - std : : array < char , s_capacity + 2 > m_buf { } ; <nl> + std : : array < char , s_capacity + 2 > m_buf { } ; / / + 2 for leading ' - ' <nl> + / / and trailing ' \ 0 ' <nl> <nl> template < typename T > <nl> void x_write ( T x , std : : true_type ) <nl> { <nl> + static_assert ( std : : numeric_limits < T > : : digits10 < = s_capacity , " " ) ; <nl> + <nl> const bool is_neg = x < 0 ; <nl> size_t i = 0 ; <nl> <nl> class basic_json <nl> x / = 10 ; <nl> } <nl> <nl> - if ( i = = s_capacity ) <nl> - { <nl> - std : : runtime_error ( <nl> - " Number is unexpectedly long : " <nl> - + std : : to_string ( x ) ) ; <nl> - } <nl> + assert ( i < s_capacity ) ; <nl> <nl> if ( i = = 0 ) <nl> { <nl> class basic_json <nl> <nl> snprintf ( m_buf . data ( ) , m_buf . size ( ) , fmt , x ) ; <nl> <nl> + # if 0 <nl> + / / C locales and C + + locales are similar but <nl> + / / different . <nl> + / / <nl> + / / If working with C + + streams we ' d ' ve used <nl> + / / these , but for C formatting functions we <nl> + / / have to use C locales ( setlocale / localeconv ) , <nl> + / / rather than C + + locales ( std : : locale installed <nl> + / / by std : : locale : : global ( ) ) . <nl> const std : : locale loc ; <nl> <nl> - / / erase thousands separator <nl> - { <nl> - const char sep = <nl> - std : : use_facet < std : : numpunct < char > > ( <nl> - loc ) . thousands_sep ( ) ; <nl> + const char thousands_sep = <nl> + std : : use_facet < std : : numpunct < char > > ( <nl> + loc ) . thousands_sep ( ) ; <nl> + <nl> + const char decimal_point = <nl> + std : : use_facet < std : : numpunct < char > > ( <nl> + loc ) . decimal_point ( ) ; <nl> + # else <nl> + const auto loc = localeconv ( ) ; <nl> + assert ( loc ! = nullptr ) ; <nl> + const char thousands_sep = ! loc - > thousands_sep ? ' \ 0 ' <nl> + : loc - > thousands_sep [ 0 ] ; <nl> <nl> + const char decimal_point = ! loc - > decimal_point ? ' \ 0 ' <nl> + : loc - > decimal_point [ 0 ] ; <nl> + # endif <nl> + <nl> + / / erase thousands separator <nl> + if ( thousands_sep ) { <nl> auto end = std : : remove ( m_buf . begin ( ) , <nl> m_buf . end ( ) , <nl> - sep ) ; <nl> + thousands_sep ) ; <nl> <nl> std : : fill ( end , m_buf . end ( ) , ' \ 0 ' ) ; <nl> } <nl> <nl> / / convert decimal point to ' . ' <nl> + if ( decimal_point and decimal_point ! = ' . ' ) <nl> { <nl> - const char decimal_point = <nl> - std : : use_facet < std : : numpunct < char > > ( <nl> - loc ) . decimal_point ( ) ; <nl> - <nl> - for ( auto & c : m_buf ) <nl> + for ( auto & c : m_buf ) <nl> { <nl> - if ( decimal_point = = ' . ' ) { <nl> - break ; <nl> - } <nl> - <nl> - if ( c = = decimal_point ) <nl> + if ( c = = decimal_point ) <nl> { <nl> c = ' . ' ; <nl> break ; <nl> mmm a / src / json . hpp . re2c <nl> ppp b / src / json . hpp . re2c <nl> class basic_json <nl> <nl> private : <nl> static constexpr size_t s_capacity = 30 ; <nl> - std : : array < char , s_capacity + 2 > m_buf { } ; <nl> + std : : array < char , s_capacity + 2 > m_buf { } ; / / + 2 for leading ' - ' <nl> + / / and trailing ' \ 0 ' <nl> <nl> template < typename T > <nl> void x_write ( T x , std : : true_type ) <nl> { <nl> + static_assert ( std : : numeric_limits < T > : : digits10 < = s_capacity , " " ) ; <nl> + <nl> const bool is_neg = x < 0 ; <nl> size_t i = 0 ; <nl> <nl> class basic_json <nl> x / = 10 ; <nl> } <nl> <nl> - if ( i = = s_capacity ) <nl> - { <nl> - std : : runtime_error ( <nl> - " Number is unexpectedly long : " <nl> - + std : : to_string ( x ) ) ; <nl> - } <nl> + assert ( i < s_capacity ) ; <nl> <nl> if ( i = = 0 ) <nl> { <nl> class basic_json <nl> <nl> snprintf ( m_buf . data ( ) , m_buf . size ( ) , fmt , x ) ; <nl> <nl> + # if 0 <nl> + / / C locales and C + + locales are similar but <nl> + / / different . <nl> + / / <nl> + / / If working with C + + streams we ' d ' ve used <nl> + / / these , but for C formatting functions we <nl> + / / have to use C locales ( setlocale / localeconv ) , <nl> + / / rather than C + + locales ( std : : locale installed <nl> + / / by std : : locale : : global ( ) ) . <nl> const std : : locale loc ; <nl> <nl> - / / erase thousands separator <nl> - { <nl> - const char sep = <nl> - std : : use_facet < std : : numpunct < char > > ( <nl> - loc ) . thousands_sep ( ) ; <nl> + const char thousands_sep = <nl> + std : : use_facet < std : : numpunct < char > > ( <nl> + loc ) . thousands_sep ( ) ; <nl> + <nl> + const char decimal_point = <nl> + std : : use_facet < std : : numpunct < char > > ( <nl> + loc ) . decimal_point ( ) ; <nl> + # else <nl> + const auto loc = localeconv ( ) ; <nl> + assert ( loc ! = nullptr ) ; <nl> + const char thousands_sep = ! loc - > thousands_sep ? ' \ 0 ' <nl> + : loc - > thousands_sep [ 0 ] ; <nl> <nl> + const char decimal_point = ! loc - > decimal_point ? ' \ 0 ' <nl> + : loc - > decimal_point [ 0 ] ; <nl> + # endif <nl> + <nl> + / / erase thousands separator <nl> + if ( thousands_sep ) { <nl> auto end = std : : remove ( m_buf . begin ( ) , <nl> m_buf . end ( ) , <nl> - sep ) ; <nl> + thousands_sep ) ; <nl> <nl> std : : fill ( end , m_buf . end ( ) , ' \ 0 ' ) ; <nl> } <nl> <nl> / / convert decimal point to ' . ' <nl> + if ( decimal_point and decimal_point ! = ' . ' ) <nl> { <nl> - const char decimal_point = <nl> - std : : use_facet < std : : numpunct < char > > ( <nl> - loc ) . decimal_point ( ) ; <nl> - <nl> - for ( auto & c : m_buf ) <nl> + for ( auto & c : m_buf ) <nl> { <nl> - if ( decimal_point = = ' . ' ) { <nl> - break ; <nl> - } <nl> - <nl> - if ( c = = decimal_point ) <nl> + if ( c = = decimal_point ) <nl> { <nl> c = ' . ' ; <nl> break ; <nl>
Bugfix : when working with C formatting functions we need to query C locales ( localeconv ) rather than std : : locale
nlohmann/json
738d4629554eae1870ea4f79308b22d12107c83f
2016-12-06T05:23:58Z
mmm a / src / compiler / node - properties . cc <nl> ppp b / src / compiler / node - properties . cc <nl> void NodeProperties : : ChangeOp ( Node * node , const Operator * new_op ) { <nl> Node * NodeProperties : : FindFrameStateBefore ( Node * node ) { <nl> Node * effect = NodeProperties : : GetEffectInput ( node ) ; <nl> while ( effect - > opcode ( ) ! = IrOpcode : : kCheckpoint ) { <nl> + if ( effect - > opcode ( ) = = IrOpcode : : kDead ) return effect ; <nl> DCHECK_EQ ( 1 , effect - > op ( ) - > EffectInputCount ( ) ) ; <nl> effect = NodeProperties : : GetEffectInput ( effect ) ; <nl> } <nl> new file mode 100644 <nl> index 00000000000 . . f0c696e14b2 <nl> mmm / dev / null <nl> ppp b / test / mjsunit / regress / regress - crbug - 617567 . js <nl> <nl> + / / Copyright 2016 the V8 project authors . All rights reserved . <nl> + / / Use of this source code is governed by a BSD - style license that can be <nl> + / / found in the LICENSE file . <nl> + <nl> + / / Flags : - - turbo - filter = * - - allow - natives - syntax <nl> + <nl> + var v1 = { } ; <nl> + function g ( ) { <nl> + v1 = [ ] ; <nl> + for ( var i = 0 ; i < 1 ; i + + ) { <nl> + v1 [ i ] ( ) ; <nl> + } <nl> + } <nl> + <nl> + var v2 = { } ; <nl> + var v3 = { } ; <nl> + function f ( ) { <nl> + v3 = v2 ; <nl> + g ( ) ; <nl> + } <nl> + <nl> + assertThrows ( g ) ; <nl> + % OptimizeFunctionOnNextCall ( f ) ; <nl> + assertThrows ( f ) ; <nl>
[ turbofan ] Make FindFrameStateBefore handle dead paths .
v8/v8
826627d9fd0e452a423948536076027c2b22a49b
2016-06-06T12:34:53Z
mmm a / caffe2 / operators / quantized / int8_max_pool_op . h <nl> ppp b / caffe2 / operators / quantized / int8_max_pool_op . h <nl> <nl> # ifndef CAFFE2_OPERATORS_INT8_MAX_POOL_OP_H_ <nl> # define CAFFE2_OPERATORS_INT8_MAX_POOL_OP_H_ <nl> <nl> + # include < qnnpack . h > <nl> + <nl> # include " caffe2 / core / context . h " <nl> # include " caffe2 / core / operator . h " <nl> # include " caffe2 / core / tensor_int8 . h " <nl> # include " caffe2 / operators / conv_pool_op_base . h " <nl> - # include " caffe2 / operators / quantized / int8_simd . h " <nl> # include " caffe2 / operators / quantized / int8_utils . h " <nl> <nl> namespace caffe2 { <nl> <nl> namespace int8 { <nl> <nl> - namespace { <nl> - <nl> - / * <nl> - * Implementation based on TensorFlow Lite kernels : <nl> - * - Repo : https : / / github . com / tensorflow / tensorflow <nl> - * - Path : tensorflow / contrib / lite / kernels / internal / optimized / optimized_ops . h <nl> - * - Hash : d4ad9c73969c45d1a224ebfc43eb645b9860216b <nl> - * / <nl> - <nl> - / * Copyright 2017 The TensorFlow Authors . All Rights Reserved . <nl> - Licensed under the Apache License , Version 2 . 0 ( the " License " ) ; <nl> - you may not use this file except in compliance with the License . <nl> - You may obtain a copy of the License at <nl> - http : / / www . apache . org / licenses / LICENSE - 2 . 0 <nl> - Unless required by applicable law or agreed to in writing , software <nl> - distributed under the License is distributed on an " AS IS " BASIS , <nl> - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied . <nl> - See the License for the specific language governing permissions and <nl> - limitations under the License . <nl> - = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = * / <nl> - <nl> - void Int8MaxPool ( <nl> - const uint8_t * input_data , <nl> - at : : IntList input_dims , <nl> - int stride_width , <nl> - int stride_height , <nl> - int pad_width , <nl> - int pad_height , <nl> - int filter_width , <nl> - int filter_height , <nl> - uint8_t * output_data , <nl> - at : : IntList output_dims , <nl> - uint8_t output_activation_min , <nl> - uint8_t output_activation_max ) { <nl> - const int batches = input_dims [ 0 ] ; <nl> - const int depth = input_dims [ 3 ] ; <nl> - const int input_height = input_dims [ 1 ] ; <nl> - const int input_width = input_dims [ 2 ] ; <nl> - const int output_height = output_dims [ 1 ] ; <nl> - const int output_width = output_dims [ 2 ] ; <nl> - for ( int batch = 0 ; batch < batches ; + + batch ) { <nl> - for ( int out_y = 0 ; out_y < output_height ; + + out_y ) { <nl> - for ( int out_x = 0 ; out_x < output_width ; + + out_x ) { <nl> - const int in_x_origin = ( out_x * stride_width ) - pad_width ; <nl> - const int in_y_origin = ( out_y * stride_height ) - pad_height ; <nl> - const int filter_x_start = std : : max ( 0 , - in_x_origin ) ; <nl> - const int filter_x_end = <nl> - std : : min ( filter_width , input_width - in_x_origin ) ; <nl> - const int filter_y_start = std : : max ( 0 , - in_y_origin ) ; <nl> - const int filter_y_end = <nl> - std : : min ( filter_height , input_height - in_y_origin ) ; <nl> - / / 2048 required by Inception v3 <nl> - static constexpr int kAccBufferMaxSize = 2048 ; <nl> - CHECK_LE ( depth , kAccBufferMaxSize ) ; <nl> - uint8_t acc [ kAccBufferMaxSize ] ; <nl> - memset ( acc , 0 , depth * sizeof ( acc [ 0 ] ) ) ; <nl> - <nl> - const uint8_t * input_ptr = <nl> - & input_data <nl> - [ in_x_origin * depth + in_y_origin * input_width * depth + <nl> - batch * input_height * input_width * depth ] ; <nl> - <nl> - for ( int fy = filter_y_start ; fy < filter_y_end ; fy + + ) { <nl> - const uint8_t * input_row_ptr = <nl> - & input_ptr [ fy * input_width * depth + filter_x_start * depth ] ; <nl> - <nl> - for ( int fx = filter_x_start ; fx < filter_x_end ; fx + + ) { <nl> - int channel = 0 ; <nl> - # ifdef INT8_NEON_SIMD <nl> - for ( ; channel < = depth - 16 ; channel + = 16 ) { <nl> - uint8x16_t acc_reg = vld1q_u8 ( acc + channel ) ; <nl> - uint8x16_t input_reg = vld1q_u8 ( input_row_ptr ) ; <nl> - input_row_ptr + = 16 ; <nl> - acc_reg = vmaxq_u8 ( acc_reg , input_reg ) ; <nl> - vst1q_u8 ( acc + channel , acc_reg ) ; <nl> - } <nl> - <nl> - for ( ; channel < = depth - 8 ; channel + = 8 ) { <nl> - uint8x8_t acc_reg = vld1_u8 ( acc + channel ) ; <nl> - uint8x8_t input_reg = vld1_u8 ( input_row_ptr ) ; <nl> - input_row_ptr + = 8 ; <nl> - acc_reg = vmax_u8 ( acc_reg , input_reg ) ; <nl> - vst1_u8 ( acc + channel , acc_reg ) ; <nl> - } <nl> - # endif <nl> - for ( ; channel < depth ; + + channel ) { <nl> - acc [ channel ] = std : : max ( acc [ channel ] , * input_row_ptr + + ) ; <nl> - } <nl> - } <nl> - } <nl> - uint8_t * output_ptr = <nl> - & output_data <nl> - [ out_x * depth + out_y * output_width * depth + <nl> - batch * output_height * output_width * depth ] ; <nl> - int channel = 0 ; <nl> - # ifdef INT8_NEON_SIMD <nl> - for ( ; channel < = depth - 16 ; channel + = 16 ) { <nl> - uint8x16_t a = vld1q_u8 ( acc + channel ) ; <nl> - a = vminq_u8 ( a , vdupq_n_u8 ( output_activation_max ) ) ; <nl> - a = vmaxq_u8 ( a , vdupq_n_u8 ( output_activation_min ) ) ; <nl> - vst1q_u8 ( output_ptr + channel , a ) ; <nl> - } <nl> - for ( ; channel < = depth - 8 ; channel + = 8 ) { <nl> - uint8x8_t a = vld1_u8 ( acc + channel ) ; <nl> - a = vmin_u8 ( a , vdup_n_u8 ( output_activation_max ) ) ; <nl> - a = vmax_u8 ( a , vdup_n_u8 ( output_activation_min ) ) ; <nl> - vst1_u8 ( output_ptr + channel , a ) ; <nl> - } <nl> - # endif <nl> - for ( ; channel < depth ; + + channel ) { <nl> - uint8_t a = acc [ channel ] ; <nl> - a = std : : max < uint8_t > ( a , output_activation_min ) ; <nl> - a = std : : min < uint8_t > ( a , output_activation_max ) ; <nl> - output_ptr [ channel ] = static_cast < uint8_t > ( a ) ; <nl> - } <nl> - } <nl> - } <nl> - } <nl> - } <nl> - <nl> - } / / namespace <nl> - <nl> template < Activation Ac > <nl> class Int8MaxPoolOp final : public ConvPoolOpBase < CPUContext > { <nl> public : <nl> Int8MaxPoolOp ( const OperatorDef & operator_def , Workspace * ws ) <nl> : ConvPoolOpBase < CPUContext > ( operator_def , ws ) { <nl> OPERATOR_NEEDS_FEATURE ( <nl> - this - > order_ = = StorageOrder : : NHWC , " Int8 only supports NCHW order . " ) ; <nl> + this - > order_ = = StorageOrder : : NHWC , " Int8 only supports NHWC order . " ) ; <nl> + } <nl> + <nl> + ~ Int8MaxPoolOp ( ) { <nl> + if ( this - > qnnpackOperator_ ! = nullptr ) { <nl> + qnnp_delete_operator ( this - > qnnpackOperator_ ) ; <nl> + this - > qnnpackOperator_ = nullptr ; <nl> + } <nl> } <nl> <nl> bool RunOnDeviceWithOrderNHWC ( ) override { <nl> class Int8MaxPoolOp final : public ConvPoolOpBase < CPUContext > { <nl> auto * Y = Outputs ( ) [ 0 ] - > template GetMutable < Int8TensorCPU > ( ) ; <nl> Y - > scale = X . scale ; <nl> Y - > zero_point = X . zero_point ; <nl> - const int32_t Y_offset = <nl> + const int32_t Y_zero_point = <nl> this - > template GetSingleArgument < int > ( " Y_zero_point " , 0 ) ; <nl> const float Y_scale = this - > template GetSingleArgument < float > ( " Y_scale " , 1 ) ; <nl> - CHECK_EQ ( Y_offset , X . zero_point ) ; <nl> + CHECK_EQ ( Y_zero_point , X . zero_point ) ; <nl> CHECK_EQ ( Y_scale , X . scale ) ; <nl> <nl> CHECK_EQ ( X . t . dim ( ) , 4 ) ; <nl> - const int height = X . t . dim32 ( 1 ) ; <nl> - const int width = X . t . dim32 ( 2 ) ; <nl> const int channels = X . t . dim32 ( 3 ) ; <nl> ConvPoolOpBase < CPUContext > : : SetOutputSize ( X . t , & ( Y - > t ) , channels ) ; <nl> <nl> - Int8MaxPool ( <nl> - X . t . template data < uint8_t > ( ) , <nl> - X . t . sizes ( ) , <nl> - stride_w ( ) , <nl> - stride_h ( ) , <nl> - pad_l ( ) , <nl> - pad_t ( ) , <nl> - kernel_w ( ) , <nl> - kernel_h ( ) , <nl> - Y - > t . template mutable_data < uint8_t > ( ) , <nl> - Y - > t . sizes ( ) , <nl> + initQNNPACK ( ) ; <nl> + <nl> + if ( this - > qnnpackOperator_ = = nullptr ) { <nl> + const qnnp_status createStatus = qnnp_create_max_pooling2d_nhwc_u8 ( <nl> + pad_t ( ) , pad_r ( ) , pad_b ( ) , pad_l ( ) , <nl> + kernel_h ( ) , kernel_w ( ) , <nl> + stride_h ( ) , stride_w ( ) , <nl> + 1 / * dilation height * / , 1 / * dilation width * / , <nl> + channels , <nl> activationLimits ( Y - > scale , Y - > zero_point , Ac ) . first , <nl> - activationLimits ( Y - > scale , Y - > zero_point , Ac ) . second ) ; <nl> + activationLimits ( Y - > scale , Y - > zero_point , Ac ) . second , <nl> + & this - > qnnpackOperator_ ) ; <nl> + CAFFE_ENFORCE ( <nl> + createStatus = = qnnp_status_success , <nl> + " failed to create QNNPACK Max Pooling operator " ) ; <nl> + CAFFE_ENFORCE ( this - > qnnpackOperator_ ! = nullptr ) ; <nl> + } <nl> + <nl> + const qnnp_status setupStatus = qnnp_setup_max_pooling2d_nhwc_u8 ( <nl> + this - > qnnpackOperator_ , <nl> + X . t . dim32 ( 0 ) , X . t . dim32 ( 1 ) , X . t . dim32 ( 2 ) , <nl> + X . t . template data < uint8_t > ( ) , channels , <nl> + Y - > t . template mutable_data < uint8_t > ( ) , channels , <nl> + nullptr / * thread pool * / ) ; <nl> + CAFFE_ENFORCE ( <nl> + setupStatus = = qnnp_status_success , <nl> + " failed to setup QNNPACK Max Pooling operator " ) ; <nl> + <nl> + # ifdef FBCODE_CAFFE2 <nl> + const qnnp_status runStatus = <nl> + qnnp_run_operator ( this - > qnnpackOperator_ , nullptr / * thread pool * / ) ; <nl> + # else <nl> + pthreadpool_t threadpool = <nl> + reinterpret_cast < pthreadpool_t > ( ws_ - > GetThreadPool ( ) ) ; <nl> + const qnnp_status runStatus = <nl> + qnnp_run_operator ( this - > qnnpackOperator_ , threadpool ) ; <nl> + # endif <nl> + CAFFE_ENFORCE ( <nl> + runStatus = = qnnp_status_success , <nl> + " failed to run QNNPACK Max Pooling operator " ) ; <nl> return true ; <nl> } <nl> + <nl> + private : <nl> + / / QNNPACK Max Pooling operator <nl> + qnnp_operator_t qnnpackOperator_ { nullptr } ; <nl> } ; <nl> <nl> } / / namespace int8 <nl> mmm a / third_party / QNNPACK <nl> ppp b / third_party / QNNPACK <nl> @ @ - 1 + 1 @ @ <nl> - Subproject commit ef05e87cef6b8e719989ce875b5e1c9fdb304c05 <nl> + Subproject commit 4705428ca588cc9317d20cc6bf9440d815c451bf <nl>
Switch Int8MaxPool operator to QNNPACK ( )
pytorch/pytorch
bd3eb87258b2eefb6216043def8d65f5ee90f624
2018-12-06T23:14:28Z
new file mode 100644 <nl> index 0000000000 . . 461dc5847a <nl> mmm / dev / null <nl> ppp b / change / @ office - iss - react - native - win32 - 2020 - 02 - 27 - 05 - 32 - 05 - missing - license . json <nl> <nl> + { <nl> + " type " : " prerelease " , <nl> + " comment " : " Add missing license to react - native - win32 " , <nl> + " packageName " : " @ office - iss / react - native - win32 " , <nl> + " email " : " ngerlem @ microsoft . com " , <nl> + " commit " : " 56b9c03fba7c40dddad1e9b063f5fb26229dd8f0 " , <nl> + " dependentChangeType " : " patch " , <nl> + " date " : " 2020 - 02 - 27T13 : 32 : 05 . 222Z " <nl> + } <nl> \ No newline at end of file <nl> mmm a / packages / react - native - win32 / package . json <nl> ppp b / packages / react - native - win32 / package . json <nl> <nl> " name " : " @ office - iss / react - native - win32 " , <nl> " version " : " 0 . 61 . 0 - beta . 8 " , <nl> " description " : " Implementation of react native on top of Office ' s Win32 platform . " , <nl> + " license " : " MIT " , <nl> " main " : " . / Libraries / react - native / react - native - implementation . win32 . js " , <nl> " typings " : " . / Libraries / react - native / typings - main . d . ts " , <nl> " scripts " : { <nl>
Add missing license to react - native - win32 ( )
microsoft/react-native-windows
778cd8c5ef232668ef627fe43ec6c56b5e30fcc0
2020-02-27T14:33:36Z
mmm a / DEPS <nl> ppp b / DEPS <nl> deps = { <nl> ' v8 / test / mozilla / data ' : <nl> Var ( ' chromium_url ' ) + ' / v8 / deps / third_party / mozilla - tests . git ' + ' @ ' + ' f6c578a10ea707b1a8ab0b88943fe5115ce2b9be ' , <nl> ' v8 / test / test262 / data ' : <nl> - Var ( ' chromium_url ' ) + ' / external / github . com / tc39 / test262 . git ' + ' @ ' + ' 3682ddd7e377388fe3715593cc1fb41a283cc23d ' , <nl> + Var ( ' chromium_url ' ) + ' / external / github . com / tc39 / test262 . git ' + ' @ ' + ' 31f1bb5a75347351b7b00c2463158932fe0eb6c6 ' , <nl> ' v8 / test / test262 / harness ' : <nl> Var ( ' chromium_url ' ) + ' / external / github . com / test262 - utils / test262 - harness - py . git ' + ' @ ' + ' 4555345a943d0c99a9461182705543fb171dda4b ' , <nl> ' v8 / third_party / qemu - linux - x64 ' : { <nl> mmm a / test / test262 / test262 . status <nl> ppp b / test / test262 / test262 . status <nl> <nl> # https : / / bugs . chromium . org / p / v8 / issues / detail ? id = 7472 <nl> ' intl402 / NumberFormat / currency - digits ' : [ FAIL ] , <nl> <nl> - # https : / / bugs . chromium . org / p / v8 / issues / detail ? id = 10069 <nl> - ' intl402 / DisplayNames / options - type - valid ' : [ FAIL ] , <nl> - ' intl402 / DisplayNames / prototype / resolvedOptions / option - type ' : [ FAIL ] , <nl> - <nl> # https : / / bugs . chromium . org / p / v8 / issues / detail ? id = 7831 <nl> ' language / statements / generators / generator - created - after - decl - inst ' : [ FAIL ] , <nl> ' language / expressions / generators / generator - created - after - decl - inst ' : [ FAIL ] , <nl> <nl> # These tests require exception handling support which is currently <nl> # blocked on landing https : / / chromium - review . googlesource . com / c / v8 / v8 / + / 1655655 <nl> ' built - ins / FinalizationGroup / FinalizationGroupCleanupIteratorPrototype / next - job - not - active - throws ' : [ FAIL ] , <nl> + ' built - ins / FinalizationGroup / prototype / cleanupSome / cleanup - throws - in - callback ' : [ FAIL ] , <nl> ' built - ins / FinalizationGroup / prototype / cleanupSome / poisoned - callback - throws ' : [ FAIL ] , <nl> # ' built - ins / FinalizationGroup / prototype / cleanupSome / poisoned - cleanup - callback - throws ' : [ FAIL ] , <nl> <nl>
Roll test262
v8/v8
92cc62e5b9dd91235ddbb8f9ab4d806a2408d08f
2019-12-24T08:46:02Z
mmm a / xbmc / Application . cpp <nl> ppp b / xbmc / Application . cpp <nl> bool CApplication : : PlayFile ( const CFileItem & item , bool bRestart ) <nl> # endif <nl> } <nl> m_bPlaybackStarting = false ; <nl> - if ( bResult ) <nl> - { <nl> - / / we must have started , otherwise player might send this later <nl> - if ( IsPlaying ( ) ) <nl> - OnPlayBackStarted ( ) ; <nl> - else <nl> - OnPlayBackEnded ( ) ; <nl> - } <nl> - else <nl> + <nl> + if ( ! bResult ) <nl> { <nl> / / we send this if it isn ' t playlistplayer that is doing this <nl> int next = g_playlistPlayer . GetNextSong ( ) ; <nl> float CApplication : : GetPercentage ( ) const <nl> { <nl> if ( IsPlaying ( ) & & m_pPlayer ) <nl> { <nl> - if ( IsPlayingAudio ( ) & & m_itemCurrentFile - > HasMusicInfoTag ( ) ) <nl> - { <nl> - const CMusicInfoTag & tag = * m_itemCurrentFile - > GetMusicInfoTag ( ) ; <nl> - if ( tag . GetDuration ( ) > 0 ) <nl> - return ( float ) ( GetTime ( ) / tag . GetDuration ( ) * 100 ) ; <nl> - } <nl> - <nl> if ( m_itemCurrentFile - > IsStack ( ) & & m_currentStack - > Size ( ) > 0 ) <nl> return ( float ) ( GetTime ( ) / GetTotalTime ( ) * 100 ) ; <nl> else <nl> mmm a / xbmc / CueDocument . cpp <nl> ppp b / xbmc / CueDocument . cpp <nl> bool CCueDocument : : Parse ( const CStdString & strFile ) <nl> { <nl> if ( ! ReadNextLine ( strLine ) ) <nl> break ; <nl> - if ( strLine . Left ( 7 ) = = " INDEX 0 " ) <nl> + if ( strLine . Left ( 8 ) = = " INDEX 01 " ) <nl> { <nl> if ( bCurrentFileChanged ) <nl> { <nl> mmm a / xbmc / cores / paplayer / PAPlayer . cpp <nl> ppp b / xbmc / cores / paplayer / PAPlayer . cpp <nl> inline bool PAPlayer : : ProcessStream ( StreamInfo * si , double & delay , double & buffe <nl> / * see if it is time yet to FF / RW or a direct seek * / <nl> if ( ! si - > m_playNextTriggered & & ( ( m_playbackSpeed ! = 1 & & si - > m_framesSent > = si - > m_seekNextAtFrame ) | | si - > m_seekFrame > - 1 ) ) <nl> { <nl> + int64_t time = ( int64_t ) 0 ; <nl> / * if its a direct seek * / <nl> if ( si - > m_seekFrame > - 1 ) <nl> { <nl> - si - > m_framesSent = si - > m_seekFrame ; <nl> + time = ( int64_t ) ( ( float ) si - > m_seekFrame / ( float ) si - > m_sampleRate * 1000 . 0f ) ; <nl> + si - > m_framesSent = ( int ) ( si - > m_seekFrame - ( ( float ) si - > m_startOffset * ( float ) si - > m_sampleRate ) / 1000 . 0f ) ; <nl> si - > m_seekFrame = - 1 ; <nl> } <nl> / * if its FF / RW * / <nl> inline bool PAPlayer : : ProcessStream ( StreamInfo * si , double & delay , double & buffe <nl> { <nl> si - > m_framesSent + = si - > m_sampleRate * ( m_playbackSpeed - 1 ) ; <nl> si - > m_seekNextAtFrame = si - > m_framesSent + si - > m_sampleRate / 2 ; <nl> + time = ( int64_t ) ( ( ( float ) si - > m_framesSent / ( float ) si - > m_sampleRate * 1000 . 0f ) + ( float ) si - > m_startOffset ) ; <nl> } <nl> <nl> - int64_t time = ( int64_t ) ( si - > m_startOffset + ( ( float ) si - > m_framesSent / ( float ) si - > m_sampleRate * 1000 . 0f ) ) ; <nl> - <nl> / * if we are seeking back before the start of the track start normal playback * / <nl> if ( time < si - > m_startOffset | | si - > m_framesSent < 0 ) <nl> { <nl> time = si - > m_startOffset ; <nl> - si - > m_framesSent = 0 ; <nl> + si - > m_framesSent = ( int ) ( si - > m_startOffset * si - > m_sampleRate / 1000 ) ; <nl> si - > m_seekNextAtFrame = 0 ; <nl> ToFFRW ( 1 ) ; <nl> } <nl> inline bool PAPlayer : : ProcessStream ( StreamInfo * si , double & delay , double & buffe <nl> int status = si - > m_decoder . GetStatus ( ) ; <nl> if ( status = = STATUS_ENDED | | <nl> status = = STATUS_NO_FILE | | <nl> - si - > m_decoder . ReadSamples ( PACKET_SIZE ) = = RET_ERROR ) <nl> + si - > m_decoder . ReadSamples ( PACKET_SIZE ) = = RET_ERROR | | <nl> + ( ( si - > m_endOffset ) & & ( si - > m_framesSent / si - > m_sampleRate > = ( si - > m_endOffset - si - > m_startOffset ) / 1000 ) ) ) <nl> { <nl> CLog : : Log ( LOGINFO , " PAPlayer : : ProcessStream - Stream Finished " ) ; <nl> return false ; <nl> int64_t PAPlayer : : GetTime ( ) <nl> if ( ! m_currentStream ) <nl> return 0 ; <nl> <nl> - double time = ( double ) m_currentStream - > m_framesSent / ( double ) m_currentStream - > m_sampleRate ; <nl> + double time = ( ( double ) m_currentStream - > m_framesSent / ( double ) m_currentStream - > m_sampleRate ) <nl> + / * - ( ( double ) m_currentStream - > m_startOffset / 1000 . 0 ) * / ; <nl> if ( m_currentStream - > m_stream ) <nl> time - = m_currentStream - > m_stream - > GetDelay ( ) ; <nl> <nl> void PAPlayer : : SeekTime ( int64_t iTime / * = 0 * / ) <nl> return ; <nl> <nl> int seekOffset = ( int ) ( iTime - GetTime ( ) ) ; <nl> - if ( m_currentStream - > m_startOffset ) <nl> - iTime + = m_currentStream - > m_startOffset ; <nl> + / * if ( m_currentStream - > m_startOffset ) <nl> + iTime + = m_currentStream - > m_startOffset ; * / <nl> <nl> if ( m_playbackSpeed ! = 1 ) <nl> ToFFRW ( 1 ) ; <nl> <nl> - m_currentStream - > m_seekFrame = ( int ) ( m_currentStream - > m_sampleRate * ( iTime / 1000 ) ) ; <nl> + m_currentStream - > m_seekFrame = ( int ) ( ( float ) m_currentStream - > m_sampleRate * ( ( float ) iTime + ( float ) m_currentStream - > m_startOffset ) / 1000 . 0f ) ; <nl> m_callback . OnPlayBackSeek ( ( int ) iTime , seekOffset ) ; <nl> } <nl> <nl>
Fix erroneous cuesheet handling , seekbar calc , eof calc and duplicate messaging on select .
xbmc/xbmc
dd02c0c6d7738d0f3c86417b3d99a9571e1aec10
2012-05-31T23:04:37Z
mmm a / BUILD . gn <nl> ppp b / BUILD . gn <nl> if ( is_component_build ) { <nl> v8_executable ( " d8 " ) { <nl> sources = [ <nl> " $ target_gen_dir / d8 - js . cc " , <nl> + " src / async - hooks - wrapper . cc " , <nl> + " src / async - hooks - wrapper . h " , <nl> " src / d8 - console . cc " , <nl> " src / d8 - console . h " , <nl> " src / d8 . cc " , <nl> new file mode 100644 <nl> index 00000000000 . . a33a6008093 <nl> mmm / dev / null <nl> ppp b / src / async - hooks - wrapper . cc <nl> <nl> + / / Copyright 2018 the V8 project authors . All rights reserved . <nl> + / / Use of this source code is governed by a BSD - style license that can be <nl> + / / found in the LICENSE file . <nl> + <nl> + # include " src / async - hooks - wrapper . h " <nl> + # include " src / d8 . h " <nl> + <nl> + namespace v8 { <nl> + <nl> + void AsyncHooksWrap : : Enable ( ) { enabled = true ; } <nl> + <nl> + void AsyncHooksWrap : : Disable ( ) { enabled = false ; } <nl> + <nl> + v8 : : Local < v8 : : Function > AsyncHooksWrap : : init_function ( ) const { <nl> + return init_function_ . Get ( isolate_ ) ; <nl> + } <nl> + void AsyncHooksWrap : : set_init_function ( v8 : : Local < v8 : : Function > value ) { <nl> + init_function_ . Reset ( isolate_ , value ) ; <nl> + } <nl> + v8 : : Local < v8 : : Function > AsyncHooksWrap : : before_function ( ) const { <nl> + return before_function_ . Get ( isolate_ ) ; <nl> + } <nl> + void AsyncHooksWrap : : set_before_function ( v8 : : Local < v8 : : Function > value ) { <nl> + before_function_ . Reset ( isolate_ , value ) ; <nl> + } <nl> + v8 : : Local < v8 : : Function > AsyncHooksWrap : : after_function ( ) const { <nl> + return after_function_ . Get ( isolate_ ) ; <nl> + } <nl> + void AsyncHooksWrap : : set_after_function ( v8 : : Local < v8 : : Function > value ) { <nl> + after_function_ . Reset ( isolate_ , value ) ; <nl> + } <nl> + v8 : : Local < v8 : : Function > AsyncHooksWrap : : promiseResolve_function ( ) const { <nl> + return promiseResolve_function_ . Get ( isolate_ ) ; <nl> + } <nl> + void AsyncHooksWrap : : set_promiseResolve_function ( <nl> + v8 : : Local < v8 : : Function > value ) { <nl> + promiseResolve_function_ . Reset ( isolate_ , value ) ; <nl> + } <nl> + <nl> + static AsyncHooksWrap * UnwrapHook ( <nl> + const v8 : : FunctionCallbackInfo < v8 : : Value > & args ) { <nl> + Isolate * isolate = args . GetIsolate ( ) ; <nl> + HandleScope scope ( isolate ) ; <nl> + Local < Object > hook = args . This ( ) ; <nl> + Local < External > wrap = Local < External > : : Cast ( hook - > GetInternalField ( 0 ) ) ; <nl> + void * ptr = wrap - > Value ( ) ; <nl> + return static_cast < AsyncHooksWrap * > ( ptr ) ; <nl> + } <nl> + <nl> + static void EnableHook ( const v8 : : FunctionCallbackInfo < v8 : : Value > & args ) { <nl> + AsyncHooksWrap * wrap = UnwrapHook ( args ) ; <nl> + wrap - > Enable ( ) ; <nl> + } <nl> + <nl> + static void DisableHook ( const v8 : : FunctionCallbackInfo < v8 : : Value > & args ) { <nl> + AsyncHooksWrap * wrap = UnwrapHook ( args ) ; <nl> + wrap - > Disable ( ) ; <nl> + } <nl> + <nl> + async_id_t AsyncHooks : : GetExecutionAsyncId ( ) const { <nl> + return asyncContexts . top ( ) . execution_async_id ; <nl> + } <nl> + <nl> + async_id_t AsyncHooks : : GetTriggerAsyncId ( ) const { <nl> + return asyncContexts . top ( ) . trigger_async_id ; <nl> + } <nl> + <nl> + Local < Object > AsyncHooks : : CreateHook ( <nl> + const v8 : : FunctionCallbackInfo < v8 : : Value > & args ) { <nl> + Isolate * isolate = args . GetIsolate ( ) ; <nl> + EscapableHandleScope handle_scope ( isolate ) ; <nl> + <nl> + Local < Context > currentContext = isolate - > GetCurrentContext ( ) ; <nl> + <nl> + AsyncHooksWrap * wrap = new AsyncHooksWrap ( isolate ) ; <nl> + <nl> + CHECK ( args [ 0 ] - > IsObject ( ) ) ; <nl> + <nl> + Local < Object > fn_obj = args [ 0 ] . As < Object > ( ) ; <nl> + <nl> + # define SET_HOOK_FN ( name ) \ <nl> + Local < Value > name # # _v = \ <nl> + fn_obj \ <nl> + - > Get ( currentContext , \ <nl> + String : : NewFromUtf8 ( isolate , # name , NewStringType : : kNormal ) \ <nl> + . ToLocalChecked ( ) ) \ <nl> + . ToLocalChecked ( ) ; \ <nl> + if ( name # # _v - > IsFunction ( ) ) { \ <nl> + wrap - > set_ # # name # # _function ( name # # _v . As < Function > ( ) ) ; \ <nl> + } <nl> + <nl> + SET_HOOK_FN ( init ) ; <nl> + SET_HOOK_FN ( before ) ; <nl> + SET_HOOK_FN ( after ) ; <nl> + SET_HOOK_FN ( promiseResolve ) ; <nl> + # undef SET_HOOK_FN <nl> + <nl> + async_wraps_ . push_back ( wrap ) ; <nl> + <nl> + Local < Object > obj = async_hooks_templ . Get ( isolate ) <nl> + - > NewInstance ( currentContext ) <nl> + . ToLocalChecked ( ) ; <nl> + obj - > SetInternalField ( 0 , External : : New ( isolate , wrap ) ) ; <nl> + <nl> + return handle_scope . Escape ( obj ) ; <nl> + } <nl> + <nl> + void AsyncHooks : : ShellPromiseHook ( PromiseHookType type , Local < Promise > promise , <nl> + Local < Value > parent ) { <nl> + AsyncHooks * hooks = Shell : : GetAsyncHooks ( ) ; <nl> + <nl> + HandleScope handle_scope ( hooks - > isolate_ ) ; <nl> + <nl> + Local < Context > currentContext = hooks - > isolate_ - > GetCurrentContext ( ) ; <nl> + <nl> + if ( type = = PromiseHookType : : kInit ) { <nl> + + + hooks - > current_async_id ; <nl> + Local < Integer > async_id = <nl> + Integer : : New ( hooks - > isolate_ , hooks - > current_async_id ) ; <nl> + <nl> + promise - > SetPrivate ( currentContext , <nl> + hooks - > async_id_smb . Get ( hooks - > isolate_ ) , async_id ) ; <nl> + if ( parent - > IsPromise ( ) ) { <nl> + Local < Promise > parent_promise = parent . As < Promise > ( ) ; <nl> + Local < Value > parent_async_id = <nl> + parent_promise <nl> + - > GetPrivate ( hooks - > isolate_ - > GetCurrentContext ( ) , <nl> + hooks - > async_id_smb . Get ( hooks - > isolate_ ) ) <nl> + . ToLocalChecked ( ) ; <nl> + promise - > SetPrivate ( currentContext , <nl> + hooks - > trigger_id_smb . Get ( hooks - > isolate_ ) , <nl> + parent_async_id ) ; <nl> + } else { <nl> + CHECK ( parent - > IsUndefined ( ) ) ; <nl> + Local < Integer > trigger_id = Integer : : New ( hooks - > isolate_ , 0 ) ; <nl> + promise - > SetPrivate ( currentContext , <nl> + hooks - > trigger_id_smb . Get ( hooks - > isolate_ ) , <nl> + trigger_id ) ; <nl> + } <nl> + } else if ( type = = PromiseHookType : : kBefore ) { <nl> + AsyncContext ctx ; <nl> + ctx . execution_async_id = <nl> + promise <nl> + - > GetPrivate ( hooks - > isolate_ - > GetCurrentContext ( ) , <nl> + hooks - > async_id_smb . Get ( hooks - > isolate_ ) ) <nl> + . ToLocalChecked ( ) <nl> + . As < Integer > ( ) <nl> + - > Value ( ) ; <nl> + ctx . trigger_async_id = <nl> + promise <nl> + - > GetPrivate ( hooks - > isolate_ - > GetCurrentContext ( ) , <nl> + hooks - > trigger_id_smb . Get ( hooks - > isolate_ ) ) <nl> + . ToLocalChecked ( ) <nl> + . As < Integer > ( ) <nl> + - > Value ( ) ; <nl> + hooks - > asyncContexts . push ( ctx ) ; <nl> + } else if ( type = = PromiseHookType : : kAfter ) { <nl> + hooks - > asyncContexts . pop ( ) ; <nl> + } <nl> + <nl> + for ( AsyncHooksWrap * wrap : hooks - > async_wraps_ ) { <nl> + PromiseHookDispatch ( type , promise , parent , wrap , hooks ) ; <nl> + } <nl> + } <nl> + <nl> + void AsyncHooks : : Initialize ( ) { <nl> + HandleScope handle_scope ( isolate_ ) ; <nl> + <nl> + async_hook_ctor . Reset ( isolate_ , FunctionTemplate : : New ( isolate_ ) ) ; <nl> + async_hook_ctor . Get ( isolate_ ) - > SetClassName ( <nl> + String : : NewFromUtf8 ( isolate_ , " AsyncHook " , NewStringType : : kNormal ) <nl> + . ToLocalChecked ( ) ) ; <nl> + <nl> + async_hooks_templ . Reset ( isolate_ , <nl> + async_hook_ctor . Get ( isolate_ ) - > InstanceTemplate ( ) ) ; <nl> + async_hooks_templ . Get ( isolate_ ) - > SetInternalFieldCount ( 1 ) ; <nl> + async_hooks_templ . Get ( isolate_ ) - > Set ( <nl> + String : : NewFromUtf8 ( isolate_ , " enable " ) , <nl> + FunctionTemplate : : New ( isolate_ , EnableHook ) ) ; <nl> + async_hooks_templ . Get ( isolate_ ) - > Set ( <nl> + String : : NewFromUtf8 ( isolate_ , " disable " ) , <nl> + FunctionTemplate : : New ( isolate_ , DisableHook ) ) ; <nl> + <nl> + async_id_smb . Reset ( isolate_ , Private : : New ( isolate_ ) ) ; <nl> + trigger_id_smb . Reset ( isolate_ , Private : : New ( isolate_ ) ) ; <nl> + <nl> + isolate_ - > SetPromiseHook ( ShellPromiseHook ) ; <nl> + } <nl> + <nl> + void AsyncHooks : : Deinitialize ( ) { <nl> + isolate_ - > SetPromiseHook ( nullptr ) ; <nl> + for ( AsyncHooksWrap * wrap : async_wraps_ ) { <nl> + delete wrap ; <nl> + } <nl> + } <nl> + <nl> + void AsyncHooks : : PromiseHookDispatch ( PromiseHookType type , <nl> + Local < Promise > promise , <nl> + Local < Value > parent , AsyncHooksWrap * wrap , <nl> + AsyncHooks * hooks ) { <nl> + if ( ! wrap - > IsEnabled ( ) ) { <nl> + return ; <nl> + } <nl> + <nl> + HandleScope handle_scope ( hooks - > isolate_ ) ; <nl> + <nl> + Local < Value > rcv = Undefined ( hooks - > isolate_ ) ; <nl> + Local < Value > async_id = <nl> + promise <nl> + - > GetPrivate ( hooks - > isolate_ - > GetCurrentContext ( ) , <nl> + hooks - > async_id_smb . Get ( hooks - > isolate_ ) ) <nl> + . ToLocalChecked ( ) ; <nl> + Local < Value > args [ 1 ] = { async_id } ; <nl> + <nl> + / / Sacrifice the brevity for readability and debugfulness <nl> + if ( type = = PromiseHookType : : kInit ) { <nl> + if ( ! wrap - > init_function ( ) . IsEmpty ( ) ) { <nl> + Local < Value > initArgs [ 4 ] = { <nl> + async_id , <nl> + String : : NewFromUtf8 ( hooks - > isolate_ , " PROMISE " , <nl> + NewStringType : : kNormal ) <nl> + . ToLocalChecked ( ) , <nl> + promise <nl> + - > GetPrivate ( hooks - > isolate_ - > GetCurrentContext ( ) , <nl> + hooks - > trigger_id_smb . Get ( hooks - > isolate_ ) ) <nl> + . ToLocalChecked ( ) , <nl> + promise } ; <nl> + wrap - > init_function ( ) - > Call ( rcv , 4 , initArgs ) ; <nl> + } <nl> + } else if ( type = = PromiseHookType : : kBefore ) { <nl> + if ( ! wrap - > before_function ( ) . IsEmpty ( ) ) { <nl> + wrap - > before_function ( ) - > Call ( rcv , 1 , args ) ; <nl> + } <nl> + } else if ( type = = PromiseHookType : : kAfter ) { <nl> + if ( ! wrap - > after_function ( ) . IsEmpty ( ) ) { <nl> + wrap - > after_function ( ) - > Call ( rcv , 1 , args ) ; <nl> + } <nl> + } else if ( type = = PromiseHookType : : kResolve ) { <nl> + if ( ! wrap - > promiseResolve_function ( ) . IsEmpty ( ) ) { <nl> + wrap - > promiseResolve_function ( ) - > Call ( rcv , 1 , args ) ; <nl> + } <nl> + } <nl> + } <nl> + <nl> + } / / namespace v8 <nl> new file mode 100644 <nl> index 00000000000 . . 29c8902c405 <nl> mmm / dev / null <nl> ppp b / src / async - hooks - wrapper . h <nl> <nl> + / / Copyright 2018 the V8 project authors . All rights reserved . <nl> + / / Use of this source code is governed by a BSD - style license that can be <nl> + / / found in the LICENSE file . <nl> + <nl> + # ifndef V8_ASYNC_HOOKS_WRAPPER_H_ <nl> + # define V8_ASYNC_HOOKS_WRAPPER_H_ <nl> + <nl> + # include < stack > <nl> + <nl> + # include " include / v8 . h " <nl> + # include " src / objects . h " <nl> + <nl> + namespace v8 { <nl> + <nl> + typedef double async_id_t ; <nl> + <nl> + struct AsyncContext { <nl> + async_id_t execution_async_id ; <nl> + async_id_t trigger_async_id ; <nl> + } ; <nl> + <nl> + class AsyncHooksWrap { <nl> + public : <nl> + explicit AsyncHooksWrap ( Isolate * isolate ) { isolate_ = isolate ; } <nl> + void Enable ( ) ; <nl> + void Disable ( ) ; <nl> + bool IsEnabled ( ) const { return enabled ; } <nl> + <nl> + inline v8 : : Local < v8 : : Function > init_function ( ) const ; <nl> + inline void set_init_function ( v8 : : Local < v8 : : Function > value ) ; <nl> + inline v8 : : Local < v8 : : Function > before_function ( ) const ; <nl> + inline void set_before_function ( v8 : : Local < v8 : : Function > value ) ; <nl> + inline v8 : : Local < v8 : : Function > after_function ( ) const ; <nl> + inline void set_after_function ( v8 : : Local < v8 : : Function > value ) ; <nl> + inline v8 : : Local < v8 : : Function > promiseResolve_function ( ) const ; <nl> + inline void set_promiseResolve_function ( v8 : : Local < v8 : : Function > value ) ; <nl> + <nl> + private : <nl> + Isolate * isolate_ ; <nl> + <nl> + Persistent < v8 : : Function > init_function_ ; <nl> + Persistent < v8 : : Function > before_function_ ; <nl> + Persistent < v8 : : Function > after_function_ ; <nl> + Persistent < v8 : : Function > promiseResolve_function_ ; <nl> + <nl> + bool enabled ; <nl> + } ; <nl> + <nl> + class AsyncHooks { <nl> + public : <nl> + explicit AsyncHooks ( Isolate * isolate ) { <nl> + isolate_ = isolate ; <nl> + <nl> + AsyncContext ctx ; <nl> + ctx . execution_async_id = 1 ; <nl> + ctx . trigger_async_id = 0 ; <nl> + asyncContexts . push ( ctx ) ; <nl> + current_async_id = 1 ; <nl> + <nl> + Initialize ( ) ; <nl> + } <nl> + ~ AsyncHooks ( ) { Deinitialize ( ) ; } <nl> + <nl> + async_id_t GetExecutionAsyncId ( ) const ; <nl> + async_id_t GetTriggerAsyncId ( ) const ; <nl> + <nl> + Local < Object > CreateHook ( const v8 : : FunctionCallbackInfo < v8 : : Value > & args ) ; <nl> + <nl> + private : <nl> + std : : vector < AsyncHooksWrap * > async_wraps_ ; <nl> + Isolate * isolate_ ; <nl> + Persistent < FunctionTemplate > async_hook_ctor ; <nl> + Persistent < ObjectTemplate > async_hooks_templ ; <nl> + Persistent < Private > async_id_smb ; <nl> + Persistent < Private > trigger_id_smb ; <nl> + <nl> + void Initialize ( ) ; <nl> + void Deinitialize ( ) ; <nl> + <nl> + static void ShellPromiseHook ( PromiseHookType type , Local < Promise > promise , <nl> + Local < Value > parent ) ; <nl> + static void PromiseHookDispatch ( PromiseHookType type , Local < Promise > promise , <nl> + Local < Value > parent , AsyncHooksWrap * wrap , <nl> + AsyncHooks * hooks ) ; <nl> + <nl> + std : : stack < AsyncContext > asyncContexts ; <nl> + async_id_t current_async_id ; <nl> + } ; <nl> + <nl> + } / / namespace v8 <nl> + <nl> + # endif / / V8_ASYNC_HOOKS_WRAPPER_H_ <nl> mmm a / src / d8 . cc <nl> ppp b / src / d8 . cc <nl> base : : LazyMutex Shell : : workers_mutex_ ; <nl> bool Shell : : allow_new_workers_ = true ; <nl> std : : vector < Worker * > Shell : : workers_ ; <nl> std : : vector < ExternalizedContents > Shell : : externalized_contents_ ; <nl> + AsyncHooks * Shell : : async_hooks_wrapper_ ; <nl> base : : LazyMutex Shell : : isolate_status_lock_ ; <nl> std : : map < v8 : : Isolate * , bool > Shell : : isolate_status_ ; <nl> base : : LazyMutex Shell : : cached_code_mutex_ ; <nl> void Shell : : RealmSharedSet ( Local < String > property , <nl> data - > realm_shared_ . Reset ( isolate , value ) ; <nl> } <nl> <nl> + / / async_hooks . createHook ( ) registers functions to be called for different <nl> + / / lifetime events of each async operation . <nl> + void Shell : : AsyncHooksCreateHook ( <nl> + const v8 : : FunctionCallbackInfo < v8 : : Value > & args ) { <nl> + Local < Object > wrap = async_hooks_wrapper_ - > CreateHook ( args ) ; <nl> + args . GetReturnValue ( ) . Set ( wrap ) ; <nl> + } <nl> + <nl> + / / async_hooks . executionAsyncId ( ) returns the asyncId of the current execution <nl> + / / context . <nl> + void Shell : : AsyncHooksExecutionAsyncId ( <nl> + const v8 : : FunctionCallbackInfo < v8 : : Value > & args ) { <nl> + Isolate * isolate = args . GetIsolate ( ) ; <nl> + HandleScope handle_scope ( isolate ) ; <nl> + args . GetReturnValue ( ) . Set ( <nl> + v8 : : Number : : New ( isolate , async_hooks_wrapper_ - > GetExecutionAsyncId ( ) ) ) ; <nl> + } <nl> + <nl> + void Shell : : AsyncHooksTriggerAsyncId ( <nl> + const v8 : : FunctionCallbackInfo < v8 : : Value > & args ) { <nl> + Isolate * isolate = args . GetIsolate ( ) ; <nl> + HandleScope handle_scope ( isolate ) ; <nl> + args . GetReturnValue ( ) . Set ( <nl> + v8 : : Number : : New ( isolate , async_hooks_wrapper_ - > GetTriggerAsyncId ( ) ) ) ; <nl> + } <nl> + <nl> void WriteToFile ( FILE * file , const v8 : : FunctionCallbackInfo < v8 : : Value > & args ) { <nl> for ( int i = 0 ; i < args . Length ( ) ; i + + ) { <nl> HandleScope handle_scope ( args . GetIsolate ( ) ) ; <nl> Local < ObjectTemplate > Shell : : CreateGlobalTemplate ( Isolate * isolate ) { <nl> . ToLocalChecked ( ) , <nl> os_templ ) ; <nl> <nl> + if ( i : : FLAG_expose_async_hooks ) { <nl> + Local < ObjectTemplate > async_hooks_templ = ObjectTemplate : : New ( isolate ) ; <nl> + async_hooks_templ - > Set ( <nl> + String : : NewFromUtf8 ( isolate , " createHook " , NewStringType : : kNormal ) <nl> + . ToLocalChecked ( ) , <nl> + FunctionTemplate : : New ( isolate , AsyncHooksCreateHook ) ) ; <nl> + async_hooks_templ - > Set ( <nl> + String : : NewFromUtf8 ( isolate , " executionAsyncId " , NewStringType : : kNormal ) <nl> + . ToLocalChecked ( ) , <nl> + FunctionTemplate : : New ( isolate , AsyncHooksExecutionAsyncId ) ) ; <nl> + async_hooks_templ - > Set ( <nl> + String : : NewFromUtf8 ( isolate , " triggerAsyncId " , NewStringType : : kNormal ) <nl> + . ToLocalChecked ( ) , <nl> + FunctionTemplate : : New ( isolate , AsyncHooksTriggerAsyncId ) ) ; <nl> + global_template - > Set ( <nl> + String : : NewFromUtf8 ( isolate , " async_hooks " , NewStringType : : kNormal ) <nl> + . ToLocalChecked ( ) , <nl> + async_hooks_templ ) ; <nl> + } <nl> + <nl> return global_template ; <nl> } <nl> <nl> void Shell : : Initialize ( Isolate * isolate ) { <nl> v8 : : Isolate : : kMessageError | v8 : : Isolate : : kMessageWarning | <nl> v8 : : Isolate : : kMessageInfo | v8 : : Isolate : : kMessageDebug | <nl> v8 : : Isolate : : kMessageLog ) ; <nl> + <nl> + / / TODO ( mslekova ) : dispose properly <nl> + async_hooks_wrapper_ = new AsyncHooks ( isolate ) ; <nl> } <nl> <nl> <nl> void Shell : : WriteLcovData ( v8 : : Isolate * isolate , const char * file ) { <nl> } <nl> <nl> void Shell : : OnExit ( v8 : : Isolate * isolate ) { <nl> + delete async_hooks_wrapper_ ; / / This uses the isolate <nl> + <nl> / / Dump basic block profiling data . <nl> if ( i : : BasicBlockProfiler * profiler = <nl> reinterpret_cast < i : : Isolate * > ( isolate ) - > basic_block_profiler ( ) ) { <nl> mmm a / src / d8 . h <nl> ppp b / src / d8 . h <nl> <nl> # include < vector > <nl> <nl> # include " src / allocation . h " <nl> + # include " src / async - hooks - wrapper . h " <nl> # include " src / base / platform / time . h " <nl> # include " src / string - hasher . h " <nl> # include " src / utils . h " <nl> class Shell : public i : : AllStatic { <nl> Local < Value > value , <nl> const PropertyCallbackInfo < void > & info ) ; <nl> <nl> + static void AsyncHooksCreateHook ( <nl> + const v8 : : FunctionCallbackInfo < v8 : : Value > & args ) ; <nl> + static void AsyncHooksExecutionAsyncId ( <nl> + const v8 : : FunctionCallbackInfo < v8 : : Value > & args ) ; <nl> + static void AsyncHooksTriggerAsyncId ( <nl> + const v8 : : FunctionCallbackInfo < v8 : : Value > & args ) ; <nl> + static AsyncHooks * GetAsyncHooks ( ) { return async_hooks_wrapper_ ; } <nl> + <nl> static void Print ( const v8 : : FunctionCallbackInfo < v8 : : Value > & args ) ; <nl> static void PrintErr ( const v8 : : FunctionCallbackInfo < v8 : : Value > & args ) ; <nl> static void Write ( const v8 : : FunctionCallbackInfo < v8 : : Value > & args ) ; <nl> class Shell : public i : : AllStatic { <nl> static std : : vector < Worker * > workers_ ; <nl> static std : : vector < ExternalizedContents > externalized_contents_ ; <nl> <nl> + static AsyncHooks * async_hooks_wrapper_ ; <nl> + <nl> static void WriteIgnitionDispatchCountersFile ( v8 : : Isolate * isolate ) ; <nl> / / Append LCOV coverage data to file . <nl> static void WriteLcovData ( v8 : : Isolate * isolate , const char * file ) ; <nl> mmm a / src / flag - definitions . h <nl> ppp b / src / flag - definitions . h <nl> DEFINE_BOOL ( enable_experimental_builtins , false , <nl> " enable new csa - based experimental builtins " ) <nl> DEFINE_BOOL ( disallow_code_generation_from_strings , false , <nl> " disallow eval and friends " ) <nl> + DEFINE_BOOL ( expose_async_hooks , false , " expose async_hooks object " ) <nl> <nl> / / builtins . cc <nl> DEFINE_BOOL ( allow_unsafe_function_constructor , false , <nl> mmm a / test / mjsunit / allocation - site - info . js <nl> ppp b / test / mjsunit / allocation - site - info . js <nl> assertKind ( elements_kind . fast , obj ) ; <nl> obj = newarraycase_list_smiobj ( 2 ) ; <nl> assertKind ( elements_kind . fast , obj ) ; <nl> <nl> + / / Perform a gc because without it the test below can experience an <nl> + / / allocation failure at an inconvenient point . Allocation mementos get <nl> + / / cleared on gc , and they can ' t deliver elements kind feedback when that <nl> + / / happens . <nl> + gc ( ) ; <nl> + <nl> / / Case : array constructor calls with out of date feedback . <nl> / / The boilerplate should incorporate all feedback , but the input array <nl> / / should be minimally transitioned based on immediate need . <nl> new file mode 100644 <nl> index 00000000000 . . 19ff9c00646 <nl> mmm / dev / null <nl> ppp b / test / mjsunit / async - hooks / api - methods . js <nl> <nl> + / / Copyright 2018 the V8 project authors . All rights reserved . <nl> + / / Redistribution and use in source and binary forms , with or without <nl> + / / modification , are permitted provided that the following conditions are <nl> + / / met : <nl> + / / <nl> + / / * Redistributions of source code must retain the above copyright <nl> + / / notice , this list of conditions and the following disclaimer . <nl> + / / * Redistributions in binary form must reproduce the above <nl> + / / copyright notice , this list of conditions and the following <nl> + / / disclaimer in the documentation and / or other materials provided <nl> + / / with the distribution . <nl> + / / * Neither the name of Google Inc . nor the names of its <nl> + / / contributors may be used to endorse or promote products derived <nl> + / / from this software without specific prior written permission . <nl> + / / <nl> + / / THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS <nl> + / / " AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT <nl> + / / LIMITED TO , THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR <nl> + / / A PARTICULAR PURPOSE ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT <nl> + / / OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , <nl> + / / SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT <nl> + / / LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , <nl> + / / DATA , OR PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND ON ANY <nl> + / / THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT <nl> + / / ( INCLUDING NEGLIGENCE OR OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE <nl> + / / OF THIS SOFTWARE , EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE . <nl> + <nl> + / / Flags : - - expose - async - hooks <nl> + <nl> + / / Check for correct API methods <nl> + ( function ( ) { <nl> + assertTrue ( async_hooks . hasOwnProperty ( ' createHook ' ) , <nl> + ' Async hooks missing createHook method ' ) ; <nl> + assertTrue ( async_hooks . hasOwnProperty ( ' executionAsyncId ' ) , <nl> + ' Async hooks missing executionAsyncId method ' ) ; <nl> + assertTrue ( async_hooks . hasOwnProperty ( ' triggerAsyncId ' ) , <nl> + ' Async hooks missing triggerAsyncId method ' ) ; <nl> + <nl> + let ah = async_hooks . createHook ( { } ) ; <nl> + assertTrue ( ah . hasOwnProperty ( ' enable ' ) , ' Async hooks missing enable method ' ) ; <nl> + assertTrue ( ah . hasOwnProperty ( ' disable ' ) , <nl> + ' Async hooks missing disable method ' ) ; <nl> + } ) ( ) ; <nl> + <nl> + / / Check for correct enabling / disabling of async hooks <nl> + ( function ( ) { <nl> + let storedPromise ; <nl> + let ah = async_hooks . createHook ( { <nl> + init ( asyncId , type , triggerAsyncId , resource ) { <nl> + storedPromise = resource . promise | | resource ; <nl> + } <nl> + } ) ; <nl> + ah . enable ( ) ; <nl> + <nl> + let createdPromise = new Promise ( function ( resolve ) { <nl> + resolve ( 42 ) ; <nl> + } ) ; <nl> + assertSame ( storedPromise , createdPromise , <nl> + " Async hooks weren ' t enabled correctly " ) ; <nl> + ah . disable ( ) ; <nl> + createdPromise = Promise . resolve ( 52 ) ; <nl> + assertNotSame ( storedPromise , createdPromise , <nl> + " Async hooks weren ' t disabled correctly " ) ; <nl> + ah . enable ( ) ; <nl> + createdPromise = Promise . resolve ( 62 ) ; <nl> + assertSame ( storedPromise , createdPromise , <nl> + " Async hooks weren ' t enabled correctly " ) ; <nl> + } ) ( ) ; <nl> new file mode 100644 <nl> index 00000000000 . . 955355cf31f <nl> mmm / dev / null <nl> ppp b / test / mjsunit / async - hooks / async - await - tree . js <nl> <nl> + / / Copyright 2018 the V8 project authors . All rights reserved . <nl> + / / Redistribution and use in source and binary forms , with or without <nl> + / / modification , are permitted provided that the following conditions are <nl> + / / met : <nl> + / / <nl> + / / * Redistributions of source code must retain the above copyright <nl> + / / notice , this list of conditions and the following disclaimer . <nl> + / / * Redistributions in binary form must reproduce the above <nl> + / / copyright notice , this list of conditions and the following <nl> + / / disclaimer in the documentation and / or other materials provided <nl> + / / with the distribution . <nl> + / / * Neither the name of Google Inc . nor the names of its <nl> + / / contributors may be used to endorse or promote products derived <nl> + / / from this software without specific prior written permission . <nl> + / / <nl> + / / THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS <nl> + / / " AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT <nl> + / / LIMITED TO , THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR <nl> + / / A PARTICULAR PURPOSE ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT <nl> + / / OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , <nl> + / / SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT <nl> + / / LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , <nl> + / / DATA , OR PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND ON ANY <nl> + / / THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT <nl> + / / ( INCLUDING NEGLIGENCE OR OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE <nl> + / / OF THIS SOFTWARE , EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE . <nl> + <nl> + / / Flags : - - expose - async - hooks <nl> + <nl> + / / Check for async / await asyncIds relation <nl> + ( function ( ) { <nl> + let asyncIds = [ ] , triggerIds = [ ] ; <nl> + let ah = async_hooks . createHook ( { <nl> + init ( asyncId , type , triggerAsyncId , resource ) { <nl> + if ( type ! = = ' PROMISE ' ) { <nl> + return ; <nl> + } <nl> + asyncIds . push ( asyncId ) ; <nl> + triggerIds . push ( triggerAsyncId ) ; <nl> + } , <nl> + } ) ; <nl> + ah . enable ( ) ; <nl> + <nl> + / / Simplified version of Node . js util . promisify ( setTimeout ) <nl> + function sleep ( callback , timeout ) { <nl> + const promise = new Promise ( function ( resolve , reject ) { <nl> + try { <nl> + setTimeout ( ( err , . . . values ) = > { <nl> + if ( err ) { <nl> + reject ( err ) ; <nl> + } else { <nl> + resolve ( values [ 0 ] ) ; <nl> + } <nl> + } , timeout ) ; <nl> + } catch ( err ) { <nl> + reject ( err ) ; <nl> + } <nl> + } ) ; <nl> + return promise ; <nl> + } <nl> + <nl> + async function foo ( ) { <nl> + await sleep ( 10 ) ; <nl> + } <nl> + <nl> + foo ( ) . then ( function ( ) { <nl> + assertEquals ( asyncIds . length , 6 ) ; <nl> + assertEquals ( triggerIds . length , 6 ) ; <nl> + assertEquals ( triggerIds [ 2 ] , asyncIds [ 0 ] ) ; <nl> + assertEquals ( triggerIds [ 3 ] , asyncIds [ 2 ] ) ; <nl> + assertEquals ( triggerIds [ 4 ] , asyncIds [ 0 ] ) ; <nl> + assertEquals ( triggerIds [ 5 ] , asyncIds [ 1 ] ) ; <nl> + } ) ; <nl> + } ) ( ) ; <nl> new file mode 100644 <nl> index 00000000000 . . 8b346530eb1 <nl> mmm / dev / null <nl> ppp b / test / mjsunit / async - hooks / chained - promises . js <nl> <nl> + / / Copyright 2018 the V8 project authors . All rights reserved . <nl> + / / Redistribution and use in source and binary forms , with or without <nl> + / / modification , are permitted provided that the following conditions are <nl> + / / met : <nl> + / / <nl> + / / * Redistributions of source code must retain the above copyright <nl> + / / notice , this list of conditions and the following disclaimer . <nl> + / / * Redistributions in binary form must reproduce the above <nl> + / / copyright notice , this list of conditions and the following <nl> + / / disclaimer in the documentation and / or other materials provided <nl> + / / with the distribution . <nl> + / / * Neither the name of Google Inc . nor the names of its <nl> + / / contributors may be used to endorse or promote products derived <nl> + / / from this software without specific prior written permission . <nl> + / / <nl> + / / THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS <nl> + / / " AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT <nl> + / / LIMITED TO , THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR <nl> + / / A PARTICULAR PURPOSE ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT <nl> + / / OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , <nl> + / / SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT <nl> + / / LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , <nl> + / / DATA , OR PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND ON ANY <nl> + / / THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT <nl> + / / ( INCLUDING NEGLIGENCE OR OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE <nl> + / / OF THIS SOFTWARE , EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE . <nl> + <nl> + / / Flags : - - expose - async - hooks <nl> + <nl> + / / Check for chained promises asyncIds relation <nl> + ( function ( ) { <nl> + let asyncIds = [ ] , triggerIds = [ ] ; <nl> + let ah = async_hooks . createHook ( { <nl> + init ( asyncId , type , triggerAsyncId , resource ) { <nl> + asyncIds . push ( asyncId ) ; <nl> + triggerIds . push ( triggerAsyncId ) ; <nl> + } , <nl> + } ) ; <nl> + ah . enable ( ) ; <nl> + let createdPromise = new Promise ( function ( resolve ) { <nl> + resolve ( 42 ) ; <nl> + } ) . then ( function ( ) { <nl> + assertEquals ( asyncIds . length , 2 , ' Exactly 2 promises should be inited ' ) ; <nl> + assertEquals ( triggerIds . length , 2 , ' Exactly 2 promises should be inited ' ) ; <nl> + assertEquals ( triggerIds [ 1 ] , asyncIds [ 0 ] , <nl> + " Parent promise asyncId doesn ' t correspond to child triggerAsyncId " ) ; <nl> + } ) ; <nl> + } ) ( ) ; <nl> new file mode 100644 <nl> index 00000000000 . . f63ecf0032d <nl> mmm / dev / null <nl> ppp b / test / mjsunit / async - hooks / execution - order . js <nl> <nl> + / / Copyright 2018 the V8 project authors . All rights reserved . <nl> + / / Redistribution and use in source and binary forms , with or without <nl> + / / modification , are permitted provided that the following conditions are <nl> + / / met : <nl> + / / <nl> + / / * Redistributions of source code must retain the above copyright <nl> + / / notice , this list of conditions and the following disclaimer . <nl> + / / * Redistributions in binary form must reproduce the above <nl> + / / copyright notice , this list of conditions and the following <nl> + / / disclaimer in the documentation and / or other materials provided <nl> + / / with the distribution . <nl> + / / * Neither the name of Google Inc . nor the names of its <nl> + / / contributors may be used to endorse or promote products derived <nl> + / / from this software without specific prior written permission . <nl> + / / <nl> + / / THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS <nl> + / / " AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT <nl> + / / LIMITED TO , THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR <nl> + / / A PARTICULAR PURPOSE ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT <nl> + / / OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , <nl> + / / SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT <nl> + / / LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , <nl> + / / DATA , OR PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND ON ANY <nl> + / / THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT <nl> + / / ( INCLUDING NEGLIGENCE OR OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE <nl> + / / OF THIS SOFTWARE , EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE . <nl> + <nl> + / / Flags : - - expose - async - hooks <nl> + <nl> + / / Check for correct execution of available hooks and asyncIds <nl> + ( function ( ) { <nl> + let inited = false , resolved = false , before = false , after = false ; <nl> + let storedAsyncId ; <nl> + let ah = async_hooks . createHook ( { <nl> + init ( asyncId , type , triggerAsyncId , resource ) { <nl> + if ( type ! = = ' PROMISE ' ) { <nl> + return ; <nl> + } <nl> + inited = true ; <nl> + storedAsyncId = asyncId ; <nl> + } , <nl> + promiseResolve ( asyncId ) { <nl> + assertEquals ( asyncId , storedAsyncId , ' AsyncId mismatch in resolve hook ' ) ; <nl> + resolved = true ; <nl> + } , <nl> + before ( asyncId ) { <nl> + assertEquals ( asyncId , storedAsyncId , ' AsyncId mismatch in before hook ' ) ; <nl> + before = true ; <nl> + } , <nl> + after ( asyncId ) { <nl> + assertEquals ( asyncId , storedAsyncId , ' AsyncId mismatch in after hook ' ) ; <nl> + after = true ; <nl> + } , <nl> + } ) ; <nl> + ah . enable ( ) ; <nl> + <nl> + new Promise ( function ( resolve ) { <nl> + resolve ( 42 ) ; <nl> + } ) . then ( function ( ) { <nl> + assertTrue ( inited , " Didn ' t call init hook " ) ; <nl> + assertTrue ( resolved , " Didn ' t call resolve hook " ) ; <nl> + assertTrue ( before , " Didn ' t call before hook before the callback " ) ; <nl> + assertFalse ( after , " Called after hook before the callback " ) ; <nl> + } ) ; <nl> + } ) ( ) ; <nl> new file mode 100644 <nl> index 00000000000 . . 2eba6ba6c51 <nl> mmm / dev / null <nl> ppp b / test / mjsunit / async - hooks / promises - async - await . js <nl> <nl> + / / Copyright 2018 the V8 project authors . All rights reserved . <nl> + / / Redistribution and use in source and binary forms , with or without <nl> + / / modification , are permitted provided that the following conditions are <nl> + / / met : <nl> + / / <nl> + / / * Redistributions of source code must retain the above copyright <nl> + / / notice , this list of conditions and the following disclaimer . <nl> + / / * Redistributions in binary form must reproduce the above <nl> + / / copyright notice , this list of conditions and the following <nl> + / / disclaimer in the documentation and / or other materials provided <nl> + / / with the distribution . <nl> + / / * Neither the name of Google Inc . nor the names of its <nl> + / / contributors may be used to endorse or promote products derived <nl> + / / from this software without specific prior written permission . <nl> + / / <nl> + / / THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS <nl> + / / " AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT <nl> + / / LIMITED TO , THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR <nl> + / / A PARTICULAR PURPOSE ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT <nl> + / / OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , <nl> + / / SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT <nl> + / / LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , <nl> + / / DATA , OR PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND ON ANY <nl> + / / THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT <nl> + / / ( INCLUDING NEGLIGENCE OR OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE <nl> + / / OF THIS SOFTWARE , EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE . <nl> + <nl> + / / Flags : - - expose - async - hooks <nl> + <nl> + / / Check for executionAsyncId / triggerAsyncId when chained promises and <nl> + / / async / await are combined <nl> + ( function ( ) { <nl> + let p ; <nl> + let outerExecutionAsyncId = - 1 , outerTriggerAsyncId = - 1 ; <nl> + <nl> + function inIrrelevantContext ( resolve ) { <nl> + resolve ( 42 ) ; <nl> + } <nl> + <nl> + function inContext1 ( foo ) { <nl> + foo ( ) ; <nl> + } <nl> + <nl> + function inContext2 ( foo ) { <nl> + foo ( ) ; <nl> + } <nl> + <nl> + outerExecutionAsyncId = async_hooks . executionAsyncId ( ) ; <nl> + outerTriggerAsyncId = async_hooks . triggerAsyncId ( ) ; <nl> + <nl> + inContext1 ( ( ) = > { <nl> + p = new Promise ( resolve = > { <nl> + assertEquals ( outerExecutionAsyncId , async_hooks . executionAsyncId ( ) ) ; <nl> + assertEquals ( outerTriggerAsyncId , async_hooks . triggerAsyncId ( ) ) ; <nl> + inIrrelevantContext ( resolve ) ; <nl> + } ) . then ( ( ) = > { <nl> + assertNotEquals ( outerExecutionAsyncId , async_hooks . executionAsyncId ( ) ) ; <nl> + assertNotEquals ( outerTriggerAsyncId , async_hooks . triggerAsyncId ( ) ) ; <nl> + } ) ; <nl> + } ) ; <nl> + <nl> + inContext2 ( async ( ) = > { <nl> + assertEquals ( outerExecutionAsyncId , async_hooks . executionAsyncId ( ) ) ; <nl> + assertEquals ( outerTriggerAsyncId , async_hooks . triggerAsyncId ( ) ) ; <nl> + await p ; <nl> + assertNotEquals ( outerExecutionAsyncId , async_hooks . executionAsyncId ( ) ) ; <nl> + assertNotEquals ( outerTriggerAsyncId , async_hooks . triggerAsyncId ( ) ) ; <nl> + } ) ; <nl> + <nl> + } ) ( ) ; <nl> new file mode 100644 <nl> index 00000000000 . . edc5a420acd <nl> mmm / dev / null <nl> ppp b / test / mjsunit / es8 / async - await - interleaved . js <nl> <nl> + / / Copyright 2018 the V8 project authors . All rights reserved . <nl> + / / Redistribution and use in source and binary forms , with or without <nl> + / / modification , are permitted provided that the following conditions are <nl> + / / met : <nl> + / / <nl> + / / * Redistributions of source code must retain the above copyright <nl> + / / notice , this list of conditions and the following disclaimer . <nl> + / / * Redistributions in binary form must reproduce the above <nl> + / / copyright notice , this list of conditions and the following <nl> + / / disclaimer in the documentation and / or other materials provided <nl> + / / with the distribution . <nl> + / / * Neither the name of Google Inc . nor the names of its <nl> + / / contributors may be used to endorse or promote products derived <nl> + / / from this software without specific prior written permission . <nl> + / / <nl> + / / THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS <nl> + / / " AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT <nl> + / / LIMITED TO , THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR <nl> + / / A PARTICULAR PURPOSE ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT <nl> + / / OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , <nl> + / / SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT <nl> + / / LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , <nl> + / / DATA , OR PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND ON ANY <nl> + / / THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT <nl> + / / ( INCLUDING NEGLIGENCE OR OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE <nl> + / / OF THIS SOFTWARE , EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE . <nl> + <nl> + / / Check for correct interleaving of Promises and async / await <nl> + ( function ( ) { <nl> + const iterations = 10 ; <nl> + let promiseCounter = iterations ; <nl> + let awaitCounter = 0 ; <nl> + <nl> + async function check ( v ) { <nl> + awaitCounter = v ; <nl> + / / The following checks ensure that " await " takes 3 ticks on the <nl> + / / microtask queue . Note : this will change in the future <nl> + if ( awaitCounter = = = 0 ) { <nl> + assertEquals ( iterations , promiseCounter ) ; <nl> + } else if ( awaitCounter < = Math . floor ( iterations / 3 ) ) { <nl> + assertEquals ( iterations - awaitCounter * 3 , promiseCounter ) ; <nl> + } else { <nl> + assertEquals ( 0 , promiseCounter ) ; <nl> + } <nl> + } <nl> + <nl> + async function f ( ) { <nl> + for ( let i = 0 ; i < iterations ; i + + ) { <nl> + await check ( i ) ; <nl> + } <nl> + return 0 ; <nl> + } <nl> + <nl> + function countdown ( v ) { <nl> + promiseCounter = v ; <nl> + if ( v > 0 ) Promise . resolve ( v - 1 ) . then ( countdown ) ; <nl> + } <nl> + <nl> + countdown ( iterations ) ; <nl> + f ( ) ; <nl> + } ) ( ) ; <nl>
[ async ] Expose async hooks to d8
v8/v8
3c4d0316e436d62179dfc96dbf270cc809984bd2
2018-06-06T13:06:12Z
mmm a / jstests / noPassthrough / drop_indexes_aborts_in_progress_index_builds_simple_name . js <nl> ppp b / jstests / noPassthrough / drop_indexes_aborts_in_progress_index_builds_simple_name . js <nl> <nl> * command will only abort in - progress index builds if the user specifies all of the indexes that a <nl> * single builder is building together , as we can only abort at the index builder granularity level . <nl> * <nl> + * This test also confirms that secondary reads are supported while index builds are in progress . <nl> + * <nl> * In this file , we test calling " dropIndexes " with a simple index name whose index build is <nl> * in - progress . <nl> + * @ tags : [ <nl> + * requires_replication , <nl> + * ] <nl> * / <nl> ( function ( ) { <nl> " use strict " ; <nl> <nl> load ( " jstests / noPassthrough / libs / index_build . js " ) ; <nl> - <nl> - const mongodOptions = { } ; <nl> - const conn = MongoRunner . runMongod ( mongodOptions ) ; <nl> + load ( ' jstests / replsets / libs / secondary_reads_test . js ' ) ; <nl> <nl> const dbName = " drop_indexes_aborts_in_progress_index_builds_simple_name " ; <nl> + <nl> + const secondaryReadsTest = new SecondaryReadsTest ( dbName ) ; <nl> + <nl> + let primaryDB = secondaryReadsTest . getPrimaryDB ( ) ; <nl> + const conn = primaryDB . getMongo ( ) ; <nl> + <nl> + if ( ! IndexBuildTest . supportsTwoPhaseIndexBuild ( conn ) ) { <nl> + jsTestLog ( ' Two phase index builds not enabled , skipping test . ' ) ; <nl> + secondaryReadsTest . stop ( ) ; <nl> + return ; <nl> + } <nl> + <nl> const collName = " test " ; <nl> <nl> TestData . dbName = dbName ; <nl> IndexBuildTest . pauseIndexBuilds ( testDB . getMongo ( ) ) ; <nl> const awaitIndexBuild = IndexBuildTest . startIndexBuild ( <nl> testDB . getMongo ( ) , coll . getFullName ( ) , { a : 1 } , { } , [ ErrorCodes . IndexBuildAborted ] ) ; <nl> IndexBuildTest . waitForIndexBuildToScanCollection ( testDB , collName , " a_1 " ) ; <nl> + IndexBuildTest . waitForIndexBuildToStart ( secondaryReadsTest . getSecondaryDB ( ) , collName , " a_1 " ) ; <nl> + <nl> + / / Test secondary reads during oplog application . <nl> + / / Prevent a batch from completing on the secondary . <nl> + const pauseAwait = secondaryReadsTest . pauseSecondaryBatchApplication ( ) ; <nl> + <nl> + for ( let i = 100 ; i < 200 ; i + + ) { <nl> + assert . commandWorked ( testDB . getCollection ( collName ) . insert ( { a : i } ) ) ; <nl> + } <nl> + <nl> + / / Wait for the batch application to pause . <nl> + pauseAwait ( ) ; <nl> + <nl> + / / Do a bunch of reads on the ' collName ' collection on the secondary . <nl> + / / No errors should be encountered on the secondary . <nl> + let readFn = function ( ) { <nl> + for ( let x = 0 ; x < TestData . nOps ; x + + ) { <nl> + assert . commandWorked ( db . runCommand ( { <nl> + find : TestData . collName , <nl> + filter : { a : x } , <nl> + } ) ) ; <nl> + / / Sleep a bit to make these reader threads less CPU intensive . <nl> + sleep ( 60 ) ; <nl> + } <nl> + } ; <nl> + TestData . nOps = 10 ; <nl> + const nReaders = 3 ; <nl> + secondaryReadsTest . startSecondaryReaders ( nReaders , readFn ) ; <nl> + <nl> + / / Disable the failpoint and let the batch complete . <nl> + secondaryReadsTest . resumeSecondaryBatchApplication ( ) ; <nl> + secondaryReadsTest . stopReaders ( ) ; <nl> <nl> const awaitDropIndex = startParallelShell ( ( ) = > { <nl> const testDB = db . getSiblingDB ( TestData . dbName ) ; <nl> awaitDropIndex ( ) ; <nl> <nl> assert . eq ( 1 , testDB . getCollection ( collName ) . getIndexes ( ) . length ) ; <nl> <nl> - MongoRunner . stopMongod ( conn ) ; <nl> + secondaryReadsTest . stop ( ) ; <nl> } ( ) ) ; <nl>
SERVER - 21307 add secondary reads to dropIndexes and index builds test
mongodb/mongo
5e57c0b0f7505035c37179d100fdd43ef2b6cc36
2020-02-27T20:20:49Z
mmm a / XBMC . xcodeproj / project . pbxproj <nl> ppp b / XBMC . xcodeproj / project . pbxproj <nl> <nl> 0E3036EC1760F68A00D93596 / * FavouritesDirectory . cpp in Sources * / = { isa = PBXBuildFile ; fileRef = 0E3036EA1760F68A00D93596 / * FavouritesDirectory . cpp * / ; } ; <nl> 0E3036ED1760F68A00D93596 / * FavouritesDirectory . cpp in Sources * / = { isa = PBXBuildFile ; fileRef = 0E3036EA1760F68A00D93596 / * FavouritesDirectory . cpp * / ; } ; <nl> 0E3036EE1760F68A00D93596 / * FavouritesDirectory . cpp in Sources * / = { isa = PBXBuildFile ; fileRef = 0E3036EA1760F68A00D93596 / * FavouritesDirectory . cpp * / ; } ; <nl> + 180F6C8117CE9A5700127892 / * smc . c in Sources * / = { isa = PBXBuildFile ; fileRef = 180F6C8017CE9A5700127892 / * smc . c * / ; } ; <nl> + 180F6C8217CE9A5700127892 / * smc . c in Sources * / = { isa = PBXBuildFile ; fileRef = 180F6C8017CE9A5700127892 / * smc . c * / ; } ; <nl> + 180F6C8317CE9A5700127892 / * smc . c in Sources * / = { isa = PBXBuildFile ; fileRef = 180F6C8017CE9A5700127892 / * smc . c * / ; } ; <nl> 183FDF8A11AF0B0500B81E9C / * PluginSource . cpp in Sources * / = { isa = PBXBuildFile ; fileRef = 183FDF8811AF0B0500B81E9C / * PluginSource . cpp * / ; } ; <nl> 18404DA61396C31B00863BBA / * SlingboxLib . a in Frameworks * / = { isa = PBXBuildFile ; fileRef = 18404DA51396C31B00863BBA / * SlingboxLib . a * / ; } ; <nl> 1840B74D13993D8A007C848B / * JSONVariantParser . cpp in Sources * / = { isa = PBXBuildFile ; fileRef = 1840B74B13993D8A007C848B / * JSONVariantParser . cpp * / ; } ; <nl> <nl> 0E30286C1759FCC200D93596 / * SettingsManager . h * / = { isa = PBXFileReference ; fileEncoding = 4 ; lastKnownFileType = sourcecode . c . h ; path = SettingsManager . h ; sourceTree = " < group > " ; } ; <nl> 0E3036EA1760F68A00D93596 / * FavouritesDirectory . cpp * / = { isa = PBXFileReference ; fileEncoding = 4 ; lastKnownFileType = sourcecode . cpp . cpp ; path = FavouritesDirectory . cpp ; sourceTree = " < group > " ; } ; <nl> 0E3036EB1760F68A00D93596 / * FavouritesDirectory . h * / = { isa = PBXFileReference ; fileEncoding = 4 ; lastKnownFileType = sourcecode . c . h ; path = FavouritesDirectory . h ; sourceTree = " < group > " ; } ; <nl> + 180F6C7F17CE9A5700127892 / * smc . h * / = { isa = PBXFileReference ; fileEncoding = 4 ; lastKnownFileType = sourcecode . c . h ; path = smc . h ; sourceTree = " < group > " ; } ; <nl> + 180F6C8017CE9A5700127892 / * smc . c * / = { isa = PBXFileReference ; fileEncoding = 4 ; lastKnownFileType = sourcecode . c . c ; path = smc . c ; sourceTree = " < group > " ; } ; <nl> 18308CB41303370800AA309E / * stat_utf8 . h * / = { isa = PBXFileReference ; fileEncoding = 4 ; lastKnownFileType = sourcecode . c . h ; path = stat_utf8 . h ; sourceTree = " < group > " ; } ; <nl> 18308CB51303370800AA309E / * stdio_utf8 . h * / = { isa = PBXFileReference ; fileEncoding = 4 ; lastKnownFileType = sourcecode . c . h ; path = stdio_utf8 . h ; sourceTree = " < group > " ; } ; <nl> 183FDF8811AF0B0500B81E9C / * PluginSource . cpp * / = { isa = PBXFileReference ; fileEncoding = 4 ; lastKnownFileType = sourcecode . cpp . cpp ; path = PluginSource . cpp ; sourceTree = " < group > " ; } ; <nl> <nl> 6E2FACD20E26E92800DF79EA / * Info . plist * / , <nl> F51CEEEE0F5C5D20004F4602 / * OSXGNUReplacements . c * / , <nl> F51CEEF00F5C5D28004F4602 / * OSXGNUReplacements . h * / , <nl> + 180F6C7F17CE9A5700127892 / * smc . h * / , <nl> + 180F6C8017CE9A5700127892 / * smc . c * / , <nl> E306D12C0DDF7B590052C2AD / * XBMCHelper . cpp * / , <nl> E306D12D0DDF7B590052C2AD / * XBMCHelper . h * / , <nl> 820023D9171A28A300667D1C / * OSXTextInputResponder . h * / , <nl> <nl> F59EED7E17AD5174005BB7C6 / * ApplicationPlayer . cpp in Sources * / , <nl> DF29668017B2B04300DF10F9 / * SettingRequirement . cpp in Sources * / , <nl> DF28DF4D17B8379E0077F41A / * ProfilesOperations . cpp in Sources * / , <nl> + 180F6C8117CE9A5700127892 / * smc . c in Sources * / , <nl> ) ; <nl> runOnlyForDeploymentPostprocessing = 0 ; <nl> } ; <nl> <nl> F59EED8017AD5174005BB7C6 / * ApplicationPlayer . cpp in Sources * / , <nl> DF29668217B2B04300DF10F9 / * SettingRequirement . cpp in Sources * / , <nl> DF28DF4F17B8379E0077F41A / * ProfilesOperations . cpp in Sources * / , <nl> + 180F6C8317CE9A5700127892 / * smc . c in Sources * / , <nl> ) ; <nl> runOnlyForDeploymentPostprocessing = 0 ; <nl> } ; <nl> <nl> F59EED7F17AD5174005BB7C6 / * ApplicationPlayer . cpp in Sources * / , <nl> DF29668117B2B04300DF10F9 / * SettingRequirement . cpp in Sources * / , <nl> DF28DF4E17B8379E0077F41A / * ProfilesOperations . cpp in Sources * / , <nl> + 180F6C8217CE9A5700127892 / * smc . c in Sources * / , <nl> ) ; <nl> runOnlyForDeploymentPostprocessing = 0 ; <nl> } ; <nl> mmm a / xbmc / GUIInfoManager . cpp <nl> ppp b / xbmc / GUIInfoManager . cpp <nl> <nl> # include " utils / SeekHandler . h " <nl> # include " URL . h " <nl> # include " addons / Skin . h " <nl> + # if defined ( TARGET_DARWIN ) <nl> + # include " osx / smc . h " <nl> + # endif <nl> <nl> / / stuff for current song <nl> # include " music / MusicInfoLoader . h " <nl> string CGUIInfoManager : : GetSystemHeatInfo ( int info ) <nl> <nl> CTemperature CGUIInfoManager : : GetGPUTemperature ( ) <nl> { <nl> + int value = 0 ; <nl> + char scale = 0 ; <nl> + <nl> + # if defined ( TARGET_DARWIN ) <nl> + value = SMCGetTemperature ( SMC_KEY_GPU_TEMP ) ; <nl> + return CTemperature : : CreateFromCelsius ( value ) ; <nl> + # else <nl> CStdString cmd = g_advancedSettings . m_gpuTempCmd ; <nl> - int value = 0 , <nl> - ret = 0 ; <nl> - char scale = 0 ; <nl> + int ret = 0 ; <nl> FILE * p = NULL ; <nl> <nl> if ( cmd . IsEmpty ( ) | | ! ( p = popen ( cmd . c_str ( ) , " r " ) ) ) <nl> CTemperature CGUIInfoManager : : GetGPUTemperature ( ) <nl> <nl> if ( ret ! = 2 ) <nl> return CTemperature ( ) ; <nl> + # endif <nl> <nl> if ( scale = = ' C ' | | scale = = ' c ' ) <nl> return CTemperature : : CreateFromCelsius ( value ) ; <nl> new file mode 100644 <nl> index 000000000000 . . 9bee2d73a297 <nl> mmm / dev / null <nl> ppp b / xbmc / osx / smc . c <nl> <nl> + / * <nl> + * Apple System Management Control ( SMC ) Tool <nl> + * Copyright ( C ) 2006 devnull <nl> + * <nl> + * This program is free software ; you can redistribute it and / or <nl> + * modify it under the terms of the GNU General Public License <nl> + * as published by the Free Software Foundation ; either version 2 <nl> + * of the License , or ( at your option ) any later version . <nl> + <nl> + * This program is distributed in the hope that it will be useful , <nl> + * but WITHOUT ANY WARRANTY ; without even the implied warranty of <nl> + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the <nl> + * GNU General Public License for more details . <nl> + <nl> + * You should have received a copy of the GNU General Public License <nl> + * along with this program ; if not , write to the Free Software <nl> + * Foundation , Inc . , 51 Franklin Street , Fifth Floor , Boston , MA 02110 - 1301 , USA . <nl> + * / <nl> + <nl> + # include < stdio . h > <nl> + # include < string . h > <nl> + # include < IOKit / IOKitLib . h > <nl> + <nl> + # include " smc . h " <nl> + <nl> + static io_connect_t conn ; <nl> + <nl> + UInt32 _strtoul ( char * str , int size , int base ) <nl> + { <nl> + UInt32 total = 0 ; <nl> + int i ; <nl> + <nl> + for ( i = 0 ; i < size ; i + + ) <nl> + { <nl> + if ( base = = 16 ) <nl> + total + = str [ i ] < < ( size - 1 - i ) * 8 ; <nl> + else <nl> + total + = ( unsigned char ) ( str [ i ] < < ( size - 1 - i ) * 8 ) ; <nl> + } <nl> + return total ; <nl> + } <nl> + <nl> + void _ultostr ( char * str , UInt32 val ) <nl> + { <nl> + str [ 0 ] = ' \ 0 ' ; <nl> + sprintf ( str , " % c % c % c % c " , <nl> + ( unsigned int ) val > > 24 , <nl> + ( unsigned int ) val > > 16 , <nl> + ( unsigned int ) val > > 8 , <nl> + ( unsigned int ) val ) ; <nl> + } <nl> + <nl> + kern_return_t SMCOpen ( void ) <nl> + { <nl> + kern_return_t result ; <nl> + mach_port_t masterPort ; <nl> + io_iterator_t iterator ; <nl> + io_object_t device ; <nl> + <nl> + result = IOMasterPort ( MACH_PORT_NULL , & masterPort ) ; <nl> + <nl> + CFMutableDictionaryRef matchingDictionary = IOServiceMatching ( " AppleSMC " ) ; <nl> + result = IOServiceGetMatchingServices ( masterPort , matchingDictionary , & iterator ) ; <nl> + if ( result ! = kIOReturnSuccess ) <nl> + { <nl> + printf ( " Error : IOServiceGetMatchingServices ( ) = % 08x \ n " , result ) ; <nl> + return 1 ; <nl> + } <nl> + <nl> + device = IOIteratorNext ( iterator ) ; <nl> + IOObjectRelease ( iterator ) ; <nl> + if ( device = = 0 ) <nl> + { <nl> + printf ( " Error : no SMC found \ n " ) ; <nl> + return 1 ; <nl> + } <nl> + <nl> + result = IOServiceOpen ( device , mach_task_self ( ) , 0 , & conn ) ; <nl> + IOObjectRelease ( device ) ; <nl> + if ( result ! = kIOReturnSuccess ) <nl> + { <nl> + printf ( " Error : IOServiceOpen ( ) = % 08x \ n " , result ) ; <nl> + return 1 ; <nl> + } <nl> + <nl> + return kIOReturnSuccess ; <nl> + } <nl> + <nl> + kern_return_t SMCClose ( ) <nl> + { <nl> + return IOServiceClose ( conn ) ; <nl> + } <nl> + <nl> + <nl> + kern_return_t SMCCall ( int index , SMCKeyData_t * inputStructure , SMCKeyData_t * outputStructure ) <nl> + { <nl> + size_t structureInputSize ; <nl> + size_t structureOutputSize ; <nl> + <nl> + structureInputSize = sizeof ( SMCKeyData_t ) ; <nl> + structureOutputSize = sizeof ( SMCKeyData_t ) ; <nl> + <nl> + # if MAC_OS_X_VERSION_10_5 <nl> + return IOConnectCallStructMethod ( conn , index , <nl> + / / inputStructure <nl> + inputStructure , structureInputSize , <nl> + / / ouputStructure <nl> + outputStructure , & structureOutputSize ) ; <nl> + # else <nl> + return IOConnectMethodStructureIStructureO ( conn , index , <nl> + structureInputSize , / * structureInputSize * / <nl> + & structureOutputSize , / * structureOutputSize * / <nl> + inputStructure , / * inputStructure * / <nl> + outputStructure ) ; / * ouputStructure * / <nl> + # endif <nl> + <nl> + } <nl> + <nl> + kern_return_t SMCReadKey ( UInt32Char_t key , SMCVal_t * val ) <nl> + { <nl> + kern_return_t result ; <nl> + SMCKeyData_t inputStructure ; <nl> + SMCKeyData_t outputStructure ; <nl> + <nl> + memset ( & inputStructure , 0 , sizeof ( SMCKeyData_t ) ) ; <nl> + memset ( & outputStructure , 0 , sizeof ( SMCKeyData_t ) ) ; <nl> + memset ( val , 0 , sizeof ( SMCVal_t ) ) ; <nl> + <nl> + inputStructure . key = _strtoul ( key , 4 , 16 ) ; <nl> + inputStructure . data8 = SMC_CMD_READ_KEYINFO ; <nl> + <nl> + result = SMCCall ( KERNEL_INDEX_SMC , & inputStructure , & outputStructure ) ; <nl> + if ( result ! = kIOReturnSuccess ) <nl> + return result ; <nl> + <nl> + val - > dataSize = outputStructure . keyInfo . dataSize ; <nl> + _ultostr ( val - > dataType , outputStructure . keyInfo . dataType ) ; <nl> + inputStructure . keyInfo . dataSize = val - > dataSize ; <nl> + inputStructure . data8 = SMC_CMD_READ_BYTES ; <nl> + <nl> + result = SMCCall ( KERNEL_INDEX_SMC , & inputStructure , & outputStructure ) ; <nl> + if ( result ! = kIOReturnSuccess ) <nl> + return result ; <nl> + <nl> + memcpy ( val - > bytes , outputStructure . bytes , sizeof ( outputStructure . bytes ) ) ; <nl> + <nl> + return kIOReturnSuccess ; <nl> + } <nl> + <nl> + double SMCGetTemperature ( char * key ) <nl> + { <nl> + SMCVal_t val ; <nl> + kern_return_t result ; <nl> + SMCOpen ( ) ; <nl> + result = SMCReadKey ( key , & val ) ; <nl> + SMCClose ( ) ; <nl> + if ( result = = kIOReturnSuccess ) { <nl> + / / read succeeded - check returned value <nl> + if ( val . dataSize > 0 ) { <nl> + if ( strcmp ( val . dataType , DATATYPE_SP78 ) = = 0 ) { <nl> + / / convert fp78 value to temperature <nl> + int intValue = ( val . bytes [ 0 ] * 256 + val . bytes [ 1 ] ) > > 2 ; <nl> + return intValue / 64 . 0 ; <nl> + } <nl> + } <nl> + } <nl> + / / read failed <nl> + return 0 . 0 ; <nl> + } <nl> + <nl> new file mode 100644 <nl> index 000000000000 . . 1a3f01423d52 <nl> mmm / dev / null <nl> ppp b / xbmc / osx / smc . h <nl> <nl> + / * <nl> + * Apple System Management Control ( SMC ) Tool <nl> + * Copyright ( C ) 2006 devnull <nl> + * <nl> + * This program is free software ; you can redistribute it and / or <nl> + * modify it under the terms of the GNU General Public License <nl> + * as published by the Free Software Foundation ; either version 2 <nl> + * of the License , or ( at your option ) any later version . <nl> + <nl> + * This program is distributed in the hope that it will be useful , <nl> + * but WITHOUT ANY WARRANTY ; without even the implied warranty of <nl> + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the <nl> + * GNU General Public License for more details . <nl> + <nl> + * You should have received a copy of the GNU General Public License <nl> + * along with this program ; if not , write to the Free Software <nl> + * Foundation , Inc . , 51 Franklin Street , Fifth Floor , Boston , MA 02110 - 1301 , USA . <nl> + * / <nl> + <nl> + # ifndef __SMC_H__ <nl> + # define __SMC_H__ <nl> + # endif <nl> + <nl> + # define SMC_VERSION " 0 . 01 " <nl> + <nl> + # define OP_NONE 0 <nl> + # define OP_LIST 1 <nl> + # define OP_READ 2 <nl> + # define OP_READ_FAN 3 <nl> + # define OP_WRITE 4 <nl> + <nl> + # define KERNEL_INDEX_SMC 2 <nl> + <nl> + # define SMC_CMD_READ_BYTES 5 <nl> + # define SMC_CMD_WRITE_BYTES 6 <nl> + # define SMC_CMD_READ_INDEX 8 <nl> + # define SMC_CMD_READ_KEYINFO 9 <nl> + # define SMC_CMD_READ_PLIMIT 11 <nl> + # define SMC_CMD_READ_VERS 12 <nl> + <nl> + # define DATATYPE_FPE2 " fpe2 " <nl> + # define DATATYPE_UINT8 " ui8 " <nl> + # define DATATYPE_UINT16 " ui16 " <nl> + # define DATATYPE_UINT32 " ui32 " <nl> + # define DATATYPE_SP78 " sp78 " <nl> + <nl> + / / key values <nl> + # define SMC_KEY_CPU_TEMP " TC0D " <nl> + # define SMC_KEY_GPU_TEMP " TG0D " <nl> + # define SMC_KEY_FAN0_RPM_MIN " F0Mn " <nl> + # define SMC_KEY_FAN1_RPM_MIN " F1Mn " <nl> + # define SMC_KEY_FAN0_RPM_CUR " F0Ac " <nl> + # define SMC_KEY_FAN1_RPM_CUR " F1Ac " <nl> + <nl> + <nl> + typedef struct { <nl> + char major ; <nl> + char minor ; <nl> + char build ; <nl> + char reserved [ 1 ] ; <nl> + UInt16 release ; <nl> + } SMCKeyData_vers_t ; <nl> + <nl> + typedef struct { <nl> + UInt16 version ; <nl> + UInt16 length ; <nl> + UInt32 cpuPLimit ; <nl> + UInt32 gpuPLimit ; <nl> + UInt32 memPLimit ; <nl> + } SMCKeyData_pLimitData_t ; <nl> + <nl> + typedef struct { <nl> + UInt32 dataSize ; <nl> + UInt32 dataType ; <nl> + char dataAttributes ; <nl> + } SMCKeyData_keyInfo_t ; <nl> + <nl> + typedef char SMCBytes_t [ 32 ] ; <nl> + <nl> + typedef struct { <nl> + UInt32 key ; <nl> + SMCKeyData_vers_t vers ; <nl> + SMCKeyData_pLimitData_t pLimitData ; <nl> + SMCKeyData_keyInfo_t keyInfo ; <nl> + char result ; <nl> + char status ; <nl> + char data8 ; <nl> + UInt32 data32 ; <nl> + SMCBytes_t bytes ; <nl> + } SMCKeyData_t ; <nl> + <nl> + typedef char UInt32Char_t [ 5 ] ; <nl> + <nl> + typedef struct { <nl> + UInt32Char_t key ; <nl> + UInt32 dataSize ; <nl> + UInt32Char_t dataType ; <nl> + SMCBytes_t bytes ; <nl> + } SMCVal_t ; <nl> + <nl> + # ifdef __cplusplus <nl> + extern " C " <nl> + { <nl> + # endif <nl> + <nl> + / / prototypes <nl> + double SMCGetTemperature ( char * key ) ; <nl> + <nl> + # ifdef __cplusplus <nl> + } <nl> + # endif <nl> mmm a / xbmc / utils / CPUInfo . cpp <nl> ppp b / xbmc / utils / CPUInfo . cpp <nl> <nl> # if defined ( TARGET_DARWIN ) <nl> # include < sys / types . h > <nl> # include < sys / sysctl . h > <nl> + # include " osx / smc . h " <nl> # ifdef __ppc__ <nl> # include < mach - o / arch . h > <nl> # endif <nl> float CCPUInfo : : getCPUFrequency ( ) <nl> <nl> bool CCPUInfo : : getTemperature ( CTemperature & temperature ) <nl> { <nl> - int value = 0 , <nl> - ret = 0 ; <nl> + int value = 0 ; <nl> char scale = 0 ; <nl> + <nl> + # if defined ( TARGET_DARWIN ) <nl> + value = SMCGetTemperature ( SMC_KEY_CPU_TEMP ) ; <nl> + scale = ' c ' ; <nl> + # else <nl> + int ret = 0 ; <nl> FILE * p = NULL ; <nl> CStdString cmd = g_advancedSettings . m_cpuTempCmd ; <nl> <nl> bool CCPUInfo : : getTemperature ( CTemperature & temperature ) <nl> <nl> if ( ret ! = 2 ) <nl> return false ; <nl> + # endif <nl> <nl> if ( scale = = ' C ' | | scale = = ' c ' ) <nl> temperature = CTemperature : : CreateFromCelsius ( value ) ; <nl>
[ osx ] talk to SMC to get CPU and GPU temp for System Info page
xbmc/xbmc
e831da6192a25e219754cbdb0672842fc262262e
2013-08-30T16:22:31Z
mmm a / example / speech - demo / README . md <nl> ppp b / example / speech - demo / README . md <nl> TRANSFORM scp : feat . scp <nl> scp : label . scp <nl> ` ` ` <nl> <nl> - Here the ` TRANSFORM ` is the transformation you want to apply to the features . By default we use ` NO_FEATURE_TRANSFORM ` . The ` scp : ` syntax is from Kaldi . The ` feat . scp ` is typically the file from ` data / sdm1 / train / feats . scp ` , and the ` label . scp ` is converted from the force - aligned labels located in ` exp / sdm1 / tri3a_ali ` . We use a script like below to prepare the feature files . Because the force - alignments are only generated on the training data , we simply split the training set into 90 / 10 parts , and use the 1 / 10 hold - out as the dev set ( validation set ) . <nl> - <nl> - ` ` ` bash <nl> - # ! / bin / bash <nl> - <nl> - # SDM - Signle Distant Microphone <nl> - micid = 1 # which mic from array should be used ? <nl> - mic = sdm $ micid <nl> - <nl> - # split the data : 90 % train 10 % cross - validation ( held - out ) , <nl> - dir = $ PWD / data / $ mic / train <nl> - [ ! - e $ { dir } _tr90 ] & & utils / subset_data_dir_tr_cv . sh $ dir $ { dir } _tr90 $ { dir } _cv10 <nl> - <nl> - # prepare listing data <nl> - dir = $ PWD / exp / $ mic / data - for - mxnet <nl> - mkdir - p $ dir <nl> - <nl> - # make post . scp , post . ark <nl> - ali - to - pdf exp / $ mic / tri3a_ali / final . mdl " ark : gunzip - c exp / $ mic / tri3a_ali / ali . * . gz | " \ <nl> - ark : - | ali - to - post ark : - ark , scp : $ dir / post . ark , $ dir / post . scp <nl> - <nl> - # generate dataset list , if the feature were unnomalized , make sure apply mean - variance normalization first ( e . g . apply - cmvn in kaldi ) <nl> - echo NO_FEATURE_TRANSFORM scp : $ PWD / data / $ mic / train_tr90 / feats . scp > $ dir / train . feats <nl> - echo scp : $ dir / post . scp > > $ dir / train . feats <nl> - <nl> - echo NO_FEATURE_TRANSFORM scp : $ PWD / data / $ mic / train_cv10 / feats . scp > $ dir / dev . feats <nl> - echo scp : $ dir / post . scp > > $ dir / dev . feats <nl> - ` ` ` <nl> + Here the ` TRANSFORM ` is the transformation you want to apply to the features . By default we use ` NO_FEATURE_TRANSFORM ` . The ` scp : ` syntax is from Kaldi . The ` feat . scp ` is typically the file from ` data / sdm1 / train / feats . scp ` , and the ` label . scp ` is converted from the force - aligned labels located in ` exp / sdm1 / tri3a_ali ` . Because the force - alignments are only generated on the training data , we split the training set into 90 / 10 parts , and use the 1 / 10 hold - out as the dev set ( validation set ) . The script [ run_ami . sh ] ( run_ami . sh ) will automatically do the spliting and format the file for MXNet . Please set the path in that script correctly before running . The [ run_ami . sh ] ( run_ami . sh ) script will actually run the full pipeline including training the acoustic model and decoding . So you can skip the following steps if that scripts successfully runs . <nl> <nl> # # # Run MXNet Acoustic Model Training <nl> <nl> The final frame accuracy was around 62 % . <nl> <nl> # # # Run decode on the trained acoustic model <nl> <nl> - 1 . Estimate senone priors by run ` python make_stats . py - - configfile = your - config . cfg | copy - feats ark : - ark : label_mean . ark ` ( edit necessary items like the path to the training dataset ) . It will generate the label counts in ` label_mean . ark ` . <nl> - 2 . Link to necessary Kaldi decode setup e . g . ` local / ` and ` utils / ` and Run ` . / run_ami . sh - - model prefix model - - num_epoch num ` . <nl> + 1 . Estimate senone priors by run ` python make_stats . py - - configfile = your - config . cfg | copy - feats ark : - ark : label_mean . ark ` ( edit necessary items like the path to the training dataset ) . It will generate the label counts in ` label_mean . ark ` . <nl> + 2 . Link to necessary Kaldi decode setup e . g . ` local / ` and ` utils / ` and Run ` . / run_ami . sh - - model prefix model - - num_epoch num ` . <nl> <nl> Here are the results on TIMIT and AMI test set ( using all default setup , 3 layer LSTM with projection layers ) : <nl> <nl> Here are the results on TIMIT and AMI test set ( using all default setup , 3 layer <nl> | AMI | 51 . 7 ( 42 . 2 ) | <nl> <nl> Note that for AMI 42 . 2 was evaluated non - overlapped speech . Kaldi - HMM baseline was 67 . 2 % and DNN was 57 . 5 % . <nl> - <nl> mmm a / example / speech - demo / make_stats . py <nl> ppp b / example / speech - demo / make_stats . py <nl> <nl> METHOD_TBPTT = ' truncated - bptt ' <nl> METHOD_SIMPLE = ' simple ' <nl> <nl> + <nl> def prepare_data ( args ) : <nl> batch_size = args . config . getint ( ' train ' , ' batch_size ' ) <nl> num_hidden = args . config . getint ( ' arch ' , ' num_hidden ' ) <nl> num_lstm_layer = args . config . getint ( ' arch ' , ' num_lstm_layer ' ) <nl> <nl> - init_c = [ ( ' l % d_init_c ' % l , ( batch_size , num_hidden ) ) for l in range ( num_lstm_layer ) ] <nl> - init_h = [ ( ' l % d_init_h ' % l , ( batch_size , num_hidden ) ) for l in range ( num_lstm_layer ) ] <nl> + init_c = [ ( ' l % d_init_c ' % l , ( batch_size , num_hidden ) ) for l in range ( num_lstm_layer ) ] <nl> + init_h = [ ( ' l % d_init_h ' % l , ( batch_size , num_hidden ) ) for l in range ( num_lstm_layer ) ] <nl> <nl> init_states = init_c + init_h <nl> <nl> def prepare_data ( args ) : <nl> " gpu_chunk " : 32768 , <nl> " lst_file " : file_test , <nl> " file_format " : file_format , <nl> - " separate_lines " : True , <nl> - " has_labels " : True <nl> + " separate_lines " : True , <nl> + " has_labels " : True <nl> } <nl> <nl> test_sets = DataReadStream ( test_data_args , feat_dim ) <nl> def prepare_data ( args ) : <nl> num_epoch = args . config . getint ( ' train ' , ' num_epoch ' ) <nl> model_name = get_checkpoint_path ( args ) <nl> logging . basicConfig ( level = logging . DEBUG , format = ' % ( asctime ) - 15s % ( message ) s ' ) <nl> - <nl> + <nl> # load the model <nl> label_mean = np . zeros ( ( label_dim , 1 ) , dtype = ' float32 ' ) <nl> data_test = TruncatedSentenceIter ( test_sets , batch_size , init_states , <nl> 20 , feat_dim = feat_dim , <nl> do_shuffling = False , pad_zeros = True , has_label = True ) <nl> - <nl> + <nl> for i , batch in enumerate ( data_test . labels ) : <nl> hist , edges = np . histogram ( batch . flat , bins = range ( 0 , label_dim + 1 ) ) <nl> label_mean + = hist . reshape ( label_dim , 1 ) <nl> def prepare_data ( args ) : <nl> <nl> <nl> args . config . write ( sys . stderr ) <nl> - <nl> mmm a / example / speech - demo / run_ami . sh <nl> ppp b / example / speech - demo / run_ami . sh <nl> <nl> <nl> # This script trains and evaluate LSTM models . There is no <nl> # discriminative training yet . <nl> - # In this recipe , CNTK directly read Kaldi features and labels , <nl> + # In this recipe , MXNet directly read Kaldi features and labels , <nl> # which makes the whole pipline much simpler . <nl> <nl> set - e # Exit on non - zero return code from any command <nl> - set - o pipefail # Exit if any of the commands in the pipeline will <nl> + set - o pipefail # Exit if any of the commands in the pipeline will <nl> # return non - zero return code <nl> set - u # Fail on an undefined variable <nl> <nl> if [ $ stage - le 0 ] ; then <nl> <nl> for n in $ ( seq $ njdec ) ; do <nl> cat $ dir / rawpost / post . $ { n } . scp | | exit 1 ; <nl> - done > $ dir / post . scp <nl> + done > $ dir / post . scp <nl> fi <nl> <nl> if [ $ stage - le 1 ] ; then <nl> fi <nl> # generate label counts <nl> if [ $ stage - le 2 ] ; then <nl> $ cmd JOB = 1 : 1 $ dir / log / gen_label_mean . JOB . log \ <nl> - python make_stats . py - - config $ config - - data_train $ dir / train . feats \ | copy - feats ark : - ark : $ dir / label_mean . ark <nl> + python make_stats . py - - configfile $ config - - data_train $ dir / train . feats \ | copy - feats ark : - ark : $ dir / label_mean . ark <nl> echo NO_FEATURE_TRANSFORM ark : $ dir / label_mean . ark > $ dir / label_mean . feats <nl> fi <nl> <nl> <nl> # training , note that weight decay is for the whole batch ( 0 . 00001 * 20 ( minibatch ) * 40 ( batch_size ) ) <nl> if [ $ stage - le 3 ] ; then <nl> - python train_lstm_proj . py - - config $ config - - data_train $ dir / train . feats - - data_dev $ dir / dev . feats - - train_prefix $ PWD / $ expdir / $ prefix - - train_optimizer speechSGD - - train_learning_rate 1 - - train_context $ deviceNumber - - train_weight_decay 0 . 008 - - train_show_every 1000 <nl> + python train_lstm_proj . py - - configfile $ config - - data_train $ dir / train . feats - - data_dev $ dir / dev . feats - - train_prefix $ PWD / $ expdir / $ prefix - - train_optimizer speechSGD - - train_learning_rate 1 - - train_context $ deviceNumber - - train_weight_decay 0 . 008 - - train_show_every 1000 <nl> fi <nl> <nl> # decoding <nl> if [ $ stage - le 4 ] ; then <nl> $ graph_src $ dev_src $ expdir / decode_ $ { prefix } _ $ ( basename $ dev_src ) " $ mxnet_string " | | exit 1 ; <nl> <nl> fi <nl> - <nl>
update some typos
apache/incubator-mxnet
75e22954c9298525d020ecbb678752a63e37ea12
2016-05-10T02:31:10Z
similarity index 92 % <nl> rename from src / bin / cli / karabiner . xcodeproj / project . pbxproj <nl> rename to src / bin / cli / karabiner_cli . xcodeproj / project . pbxproj <nl> mmm a / src / bin / cli / karabiner . xcodeproj / project . pbxproj <nl> ppp b / src / bin / cli / karabiner_cli . xcodeproj / project . pbxproj <nl> <nl> / * End PBXCopyFilesBuildPhase section * / <nl> <nl> / * Begin PBXFileReference section * / <nl> - 3452ECAA1E6BF17800A5144C / * karabiner * / = { isa = PBXFileReference ; explicitFileType = " compiled . mach - o . executable " ; includeInIndex = 0 ; path = karabiner ; sourceTree = BUILT_PRODUCTS_DIR ; } ; <nl> + 3452ECAA1E6BF17800A5144C / * karabiner_cli * / = { isa = PBXFileReference ; explicitFileType = " compiled . mach - o . executable " ; includeInIndex = 0 ; path = karabiner_cli ; sourceTree = BUILT_PRODUCTS_DIR ; } ; <nl> 3452ECAD1E6BF17800A5144C / * main . cpp * / = { isa = PBXFileReference ; lastKnownFileType = sourcecode . cpp . cpp ; path = main . cpp ; sourceTree = " < group > " ; } ; <nl> 3452ECB51E6BF4BC00A5144C / * configuration_monitor . hpp * / = { isa = PBXFileReference ; fileEncoding = 4 ; lastKnownFileType = sourcecode . cpp . h ; name = configuration_monitor . hpp ; path = . . / . . / . . / share / configuration_monitor . hpp ; sourceTree = " < group > " ; } ; <nl> 3452ECB61E6C5A1300A5144C / * constants . hpp * / = { isa = PBXFileReference ; fileEncoding = 4 ; lastKnownFileType = sourcecode . cpp . h ; name = constants . hpp ; path = . . / . . / . . / share / constants . hpp ; sourceTree = " < group > " ; } ; <nl> <nl> 3452ECA11E6BF17800A5144C = { <nl> isa = PBXGroup ; <nl> children = ( <nl> - 3452ECAC1E6BF17800A5144C / * karabiner * / , <nl> + 3452ECAC1E6BF17800A5144C / * karabiner_cli * / , <nl> 3452ECAB1E6BF17800A5144C / * Products * / , <nl> 3452ECBC1E6C5B9F00A5144C / * Frameworks * / , <nl> ) ; <nl> <nl> 3452ECAB1E6BF17800A5144C / * Products * / = { <nl> isa = PBXGroup ; <nl> children = ( <nl> - 3452ECAA1E6BF17800A5144C / * karabiner * / , <nl> + 3452ECAA1E6BF17800A5144C / * karabiner_cli * / , <nl> ) ; <nl> name = Products ; <nl> sourceTree = " < group > " ; <nl> } ; <nl> - 3452ECAC1E6BF17800A5144C / * karabiner * / = { <nl> + 3452ECAC1E6BF17800A5144C / * karabiner_cli * / = { <nl> isa = PBXGroup ; <nl> children = ( <nl> 3452ECAD1E6BF17800A5144C / * main . cpp * / , <nl> 3452ECB41E6BF4AF00A5144C / * share * / , <nl> ) ; <nl> - path = karabiner ; <nl> + path = karabiner_cli ; <nl> sourceTree = " < group > " ; <nl> } ; <nl> 3452ECB41E6BF4AF00A5144C / * share * / = { <nl> <nl> / * End PBXGroup section * / <nl> <nl> / * Begin PBXNativeTarget section * / <nl> - 3452ECA91E6BF17800A5144C / * karabiner * / = { <nl> + 3452ECA91E6BF17800A5144C / * karabiner_cli * / = { <nl> isa = PBXNativeTarget ; <nl> - buildConfigurationList = 3452ECB11E6BF17800A5144C / * Build configuration list for PBXNativeTarget " karabiner " * / ; <nl> + buildConfigurationList = 3452ECB11E6BF17800A5144C / * Build configuration list for PBXNativeTarget " karabiner_cli " * / ; <nl> buildPhases = ( <nl> 3452ECA61E6BF17800A5144C / * Sources * / , <nl> 3452ECA71E6BF17800A5144C / * Frameworks * / , <nl> <nl> ) ; <nl> dependencies = ( <nl> ) ; <nl> - name = karabiner ; <nl> - productName = karabiner ; <nl> - productReference = 3452ECAA1E6BF17800A5144C / * karabiner * / ; <nl> + name = karabiner_cli ; <nl> + productName = karabiner_cli ; <nl> + productReference = 3452ECAA1E6BF17800A5144C / * karabiner_cli * / ; <nl> productType = " com . apple . product - type . tool " ; <nl> } ; <nl> / * End PBXNativeTarget section * / <nl> <nl> } ; <nl> } ; <nl> } ; <nl> - buildConfigurationList = 3452ECA51E6BF17800A5144C / * Build configuration list for PBXProject " karabiner " * / ; <nl> + buildConfigurationList = 3452ECA51E6BF17800A5144C / * Build configuration list for PBXProject " karabiner_cli " * / ; <nl> compatibilityVersion = " Xcode 6 . 3 " ; <nl> developmentRegion = English ; <nl> hasScannedForEncodings = 0 ; <nl> <nl> projectDirPath = " " ; <nl> projectRoot = " " ; <nl> targets = ( <nl> - 3452ECA91E6BF17800A5144C / * karabiner * / , <nl> + 3452ECA91E6BF17800A5144C / * karabiner_cli * / , <nl> ) ; <nl> } ; <nl> / * End PBXProject section * / <nl> <nl> / * End XCBuildConfiguration section * / <nl> <nl> / * Begin XCConfigurationList section * / <nl> - 3452ECA51E6BF17800A5144C / * Build configuration list for PBXProject " karabiner " * / = { <nl> + 3452ECA51E6BF17800A5144C / * Build configuration list for PBXProject " karabiner_cli " * / = { <nl> isa = XCConfigurationList ; <nl> buildConfigurations = ( <nl> 3452ECB01E6BF17800A5144C / * Release * / , <nl> <nl> defaultConfigurationIsVisible = 0 ; <nl> defaultConfigurationName = Release ; <nl> } ; <nl> - 3452ECB11E6BF17800A5144C / * Build configuration list for PBXNativeTarget " karabiner " * / = { <nl> + 3452ECB11E6BF17800A5144C / * Build configuration list for PBXNativeTarget " karabiner_cli " * / = { <nl> isa = XCConfigurationList ; <nl> buildConfigurations = ( <nl> 3452ECB31E6BF17800A5144C / * Release * / , <nl> similarity index 68 % <nl> rename from src / bin / cli / karabiner . xcodeproj / project . xcworkspace / contents . xcworkspacedata <nl> rename to src / bin / cli / karabiner_cli . xcodeproj / project . xcworkspace / contents . xcworkspacedata <nl> mmm a / src / bin / cli / karabiner . xcodeproj / project . xcworkspace / contents . xcworkspacedata <nl> ppp b / src / bin / cli / karabiner_cli . xcodeproj / project . xcworkspace / contents . xcworkspacedata <nl> <nl> < Workspace <nl> version = " 1 . 0 " > <nl> < FileRef <nl> - location = " self : karabiner . xcodeproj " > <nl> + location = " self : karabiner_cli . xcodeproj " > <nl> < / FileRef > <nl> < / Workspace > <nl> similarity index 94 % <nl> rename from src / bin / cli / karabiner / main . cpp <nl> rename to src / bin / cli / karabiner_cli / main . cpp <nl> mmm a / src / bin / cli / karabiner / main . cpp <nl> ppp b / src / bin / cli / karabiner_cli / main . cpp <nl> class logger final { <nl> static spdlog : : logger & get_logger ( void ) { <nl> static std : : shared_ptr < spdlog : : logger > logger ; <nl> if ( ! logger ) { <nl> - logger = spdlog : : stdout_logger_mt ( " karabiner " , true ) ; <nl> + logger = spdlog : : stdout_logger_mt ( " karabiner_cli " , true ) ; <nl> logger - > set_pattern ( " [ % l ] % v " ) ; <nl> logger - > set_level ( spdlog : : level : : err ) ; <nl> } <nl> class logger final { <nl> int main ( int argc , char * argv [ ] ) { <nl> krbn : : thread_utility : : register_main_thread ( ) ; <nl> <nl> - cxxopts : : Options options ( " karabiner " , " A command line utility of Karabiner - Elements . " ) ; <nl> + cxxopts : : Options options ( " karabiner_cli " , " A command line utility of Karabiner - Elements . " ) ; <nl> <nl> options . add_options ( ) ( " select - profile " , " Select a profile by name . " , cxxopts : : value < std : : string > ( ) ) ; <nl> options . add_options ( ) ( " copy - current - profile - to - system - default - profile " , " Copy the current profile to system default profile . " ) ; <nl> int main ( int argc , char * argv [ ] ) { <nl> <nl> std : : cout < < options . help ( ) < < std : : endl ; <nl> std : : cout < < " Examples : " < < std : : endl ; <nl> - std : : cout < < " karabiner - - select - profile ' Default profile ' " < < std : : endl ; <nl> + std : : cout < < " karabiner_cli - - select - profile ' Default profile ' " < < std : : endl ; <nl> std : : cout < < std : : endl ; <nl> <nl> return 1 ; <nl>
rename cli / karabiner - > cli / karabiner_cli
pqrs-org/Karabiner-Elements
98a3dc67328d07962b922a03e1a8c6c0a71257dc
2017-03-05T15:30:56Z
mmm a / dbms / cmake / version . cmake <nl> ppp b / dbms / cmake / version . cmake <nl> set ( VERSION_REVISION 54426 ) <nl> set ( VERSION_MAJOR 19 ) <nl> set ( VERSION_MINOR 15 ) <nl> set ( VERSION_PATCH 1 ) <nl> - set ( VERSION_GITHASH 6f1a8c37abe6ee4e7ee74c0b5cb9c05a87417b61 ) <nl> - set ( VERSION_DESCRIBE v19 . 15 . 1 . 1 - prestable ) <nl> - set ( VERSION_STRING 19 . 15 . 1 . 1 ) <nl> + set ( VERSION_GITHASH 38f65a6a2120d2e76bcf71131068f41195149dfc ) <nl> + set ( VERSION_DESCRIBE v19 . 15 . 1 . 1398 - prestable ) <nl> + set ( VERSION_STRING 19 . 15 . 1 . 1398 ) <nl> # end of autochange <nl> <nl> set ( VERSION_EXTRA " " CACHE STRING " " ) <nl>
Auto version update to [ 19 . 15 . 1 . 1398 ] [ 54426 ]
ClickHouse/ClickHouse
ec86a9b9335e2008b1f8753aa826fde87075145b
2019-09-24T11:46:58Z
mmm a / script / lib / config . py <nl> ppp b / script / lib / config . py <nl> <nl> import sys <nl> <nl> BASE_URL = ' https : / / gh - contractor - zcbenz . s3 . amazonaws . com / libchromiumcontent ' <nl> - LIBCHROMIUMCONTENT_COMMIT = ' 2dfdf169b582e3f051e1fec3dd7df2bc179e1aa6 ' <nl> + LIBCHROMIUMCONTENT_COMMIT = ' bb95d5c7958c649bb346d59a13ee0d8f15464304 ' <nl> <nl> ARCH = { <nl> ' cygwin ' : ' 32bit ' , <nl>
Upgrade to Chrome 38 . 0 . 2125 . 122
electron/electron
b7816d85a133e67c3382b1be656fa1282f58018e
2014-11-15T03:20:47Z
mmm a / src / core / lib / iomgr / ev_epollex_linux . c <nl> ppp b / src / core / lib / iomgr / ev_epollex_linux . c <nl> const grpc_event_engine_vtable * grpc_init_epollex_linux ( <nl> return NULL ; <nl> } <nl> <nl> + # ifndef NDEBUG <nl> grpc_register_tracer ( & grpc_trace_pollable_refcount ) ; <nl> + # endif <nl> <nl> fd_global_init ( ) ; <nl> <nl>
Actually register tracer
grpc/grpc
14ee7c308d1ff5f7b502da3b631e034e54c4d64d
2017-10-05T02:34:09Z
mmm a / Makefile <nl> ppp b / Makefile <nl> endif <nl> <nl> libz . a : <nl> - rm - rf zlib - 1 . 2 . 8 <nl> - curl - O http : / / zlib . net / zlib - 1 . 2 . 8 . tar . gz <nl> + curl - O - L http : / / zlib . net / zlib - 1 . 2 . 8 . tar . gz <nl> tar xvzf zlib - 1 . 2 . 8 . tar . gz <nl> cd zlib - 1 . 2 . 8 & & CFLAGS = ' - fPIC ' . / configure - - static & & make <nl> cp zlib - 1 . 2 . 8 / libz . a . <nl> <nl> libbz2 . a : <nl> - rm - rf bzip2 - 1 . 0 . 6 <nl> - curl - O http : / / www . bzip . org / 1 . 0 . 6 / bzip2 - 1 . 0 . 6 . tar . gz <nl> + curl - O - L http : / / www . bzip . org / 1 . 0 . 6 / bzip2 - 1 . 0 . 6 . tar . gz <nl> tar xvzf bzip2 - 1 . 0 . 6 . tar . gz <nl> cd bzip2 - 1 . 0 . 6 & & make CFLAGS = ' - fPIC - O2 - g - D_FILE_OFFSET_BITS = 64 ' <nl> cp bzip2 - 1 . 0 . 6 / libbz2 . a . <nl> <nl> libsnappy . a : <nl> - - rm - rf snappy - 1 . 1 . 1 <nl> - curl - O https : / / snappy . googlecode . com / files / snappy - 1 . 1 . 1 . tar . gz <nl> - tar xvzf snappy - 1 . 1 . 1 . tar . gz <nl> - cd snappy - 1 . 1 . 1 & & . / configure - - with - pic - - enable - static <nl> - cd snappy - 1 . 1 . 1 & & make <nl> - cp snappy - 1 . 1 . 1 / . libs / libsnappy . a . <nl> + - rm - rf snappy - 1 . 1 . 3 <nl> + curl - O - L https : / / github . com / google / snappy / releases / download / 1 . 1 . 3 / snappy - 1 . 1 . 3 . tar . gz <nl> + tar xvzf snappy - 1 . 1 . 3 . tar . gz <nl> + cd snappy - 1 . 1 . 3 & & . / configure - - with - pic - - enable - static <nl> + cd snappy - 1 . 1 . 3 & & make <nl> + cp snappy - 1 . 1 . 3 / . libs / libsnappy . a . <nl> <nl> liblz4 . a : <nl> - rm - rf lz4 - r127 <nl> - curl - O https : / / codeload . github . com / Cyan4973 / lz4 / tar . gz / r127 <nl> + curl - O - L https : / / codeload . github . com / Cyan4973 / lz4 / tar . gz / r127 <nl> mv r127 lz4 - r127 . tar . gz <nl> tar xvzf lz4 - r127 . tar . gz <nl> cd lz4 - r127 / lib & & make CFLAGS = ' - fPIC ' all <nl> java_static_libobjects = $ ( patsubst % , jls / % , $ ( LIBOBJECTS ) ) <nl> CLEAN_FILES + = jls <nl> <nl> JAVA_STATIC_FLAGS = - DZLIB - DBZIP2 - DSNAPPY - DLZ4 <nl> - JAVA_STATIC_INCLUDES = - I . / zlib - 1 . 2 . 8 - I . / bzip2 - 1 . 0 . 6 - I . / snappy - 1 . 1 . 1 - I . / lz4 - r127 / lib <nl> + JAVA_STATIC_INCLUDES = - I . / zlib - 1 . 2 . 8 - I . / bzip2 - 1 . 0 . 6 - I . / snappy - 1 . 1 . 3 - I . / lz4 - r127 / lib <nl> <nl> $ ( java_static_libobjects ) : jls / % . o : % . cc libz . a libbz2 . a libsnappy . a liblz4 . a <nl> $ ( AM_V_CC ) mkdir - p $ ( @ D ) & & $ ( CXX ) $ ( CXXFLAGS ) $ ( JAVA_STATIC_FLAGS ) $ ( JAVA_STATIC_INCLUDES ) - fPIC - c $ < - o $ @ $ ( COVERAGEFLAGS ) <nl>
Update the download location of Snappy ( )
facebook/rocksdb
c8513cde0cb007d7a198fa61b8d245a71988281d
2016-08-29T03:24:08Z
mmm a / docs / tools / requirements . txt <nl> ppp b / docs / tools / requirements . txt <nl> sphinxcontrib - websupport = = 1 . 0 . 1 <nl> tornado = = 5 . 1 <nl> typing = = 3 . 6 . 2 <nl> Unidecode = = 1 . 0 . 23 <nl> - urllib3 = = 1 . 22 <nl> + urllib3 = = 1 . 23 <nl>
update urllib3 dependency
ClickHouse/ClickHouse
d09210f9be5cb9543fb3aefbba1275d000fab0a1
2018-12-12T17:08:24Z
mmm a / src / objective - c / GRPCClient / private / GRPCChannel . m <nl> ppp b / src / objective - c / GRPCClient / private / GRPCChannel . m <nl> - ( nullable instancetype ) initWithUnmanagedChannel : ( nullable grpc_channel * ) unman <nl> configuration : ( GRPCChannelConfiguration * ) configuration { <nl> if ( ( self = [ super init ] ) ) { <nl> _unmanagedChannel = unmanagedChannel ; <nl> - _configuration = configuration ; <nl> + _configuration = [ configuration copy ] ; <nl> _channelRef = [ [ GRPCChannelRef alloc ] initWithDestroyDelay : kChannelDestroyDelay <nl> destroyChannelCallback : ^ { <nl> [ self destroyChannel ] ; <nl>
copy configuration
grpc/grpc
5e3e744d448c8cd1271cae7e8d40d3bdeaff762f
2018-10-20T01:32:05Z
mmm a / tests / integration / test_row_policy / test . py <nl> ppp b / tests / integration / test_row_policy / test . py <nl> def test_join ( ) : <nl> <nl> <nl> def test_cannot_trick_row_policy_with_keyword_with ( ) : <nl> - assert instance . query ( " WITH 0 AS a SELECT * FROM mydb . filtered_table1 " ) = = " 1 \ t0 \ n1 \ t1 \ n " <nl> - assert instance . query ( " WITH 0 AS a SELECT a , b FROM mydb . filtered_table1 " ) = = " 1 \ t0 \ n1 \ t1 \ n " <nl> - assert instance . query ( " WITH 0 AS a SELECT a FROM mydb . filtered_table1 " ) = = " 1 \ n1 \ n " <nl> + assert instance . query ( " WITH 0 AS a SELECT * FROM mydb . filtered_table1 " ) = = " 0 \ t0 \ n0 \ t1 \ n " <nl> + assert instance . query ( " WITH 0 AS a SELECT a , b FROM mydb . filtered_table1 " ) = = " 0 \ t0 \ n0 \ t1 \ n " <nl> + assert instance . query ( " WITH 0 AS a SELECT a FROM mydb . filtered_table1 " ) = = " 0 \ n0 \ n " <nl> assert instance . query ( " WITH 0 AS a SELECT b FROM mydb . filtered_table1 " ) = = " 0 \ n1 \ n " <nl> <nl> <nl>
Update test . py
ClickHouse/ClickHouse
672c952d39786fa1a97865e2df01baec8bd762ba
2020-04-20T14:44:21Z
mmm a / xbmc / GUISettings . cpp <nl> ppp b / xbmc / GUISettings . cpp <nl> void CGUISettings : : Initialize ( ) <nl> audiomode . insert ( make_pair ( 420 , AUDIO_HDMI ) ) ; <nl> AddInt ( ao , " audiooutput . mode " , 337 , AUDIO_ANALOG , audiomode , SPIN_CONTROL_TEXT ) ; <nl> <nl> - / * hide this from apple users until CoreAudio has been updated to support this * / <nl> - # ifndef __APPLE__ <nl> map < int , int > channelLayout ; <nl> for ( int layout = 0 ; layout < PCM_MAX_LAYOUT ; + + layout ) <nl> channelLayout . insert ( make_pair ( 34101 + layout , layout ) ) ; <nl> AddInt ( ao , " audiooutput . channellayout " , 34100 , PCM_LAYOUT_2_0 , channelLayout , SPIN_CONTROL_TEXT ) ; <nl> AddBool ( ao , " audiooutput . dontnormalizelevels " , 346 , true ) ; <nl> - # endif <nl> <nl> AddBool ( ao , " audiooutput . ac3passthrough " , 364 , true ) ; <nl> AddBool ( ao , " audiooutput . dtspassthrough " , 254 , true ) ; <nl> void CGUISettings : : Initialize ( ) <nl> <nl> # ifdef __APPLE__ <nl> AddString ( ao , " audiooutput . audiodevice " , 545 , " Default " , SPIN_CONTROL_TEXT ) ; <nl> - / / AddString ( ao , " audiooutput . passthroughdevice " , 546 , " S / PDIF " , BUTTON_CONTROL_INPUT ) ; <nl> - AddBool ( ao , " audiooutput . downmixmultichannel " , 548 , true ) ; <nl> # elif defined ( _LINUX ) <nl> AddSeparator ( ao , " audiooutput . sep1 " ) ; <nl> AddString ( ao , " audiooutput . audiodevice " , 545 , " default " , SPIN_CONTROL_TEXT ) ; <nl> void CGUISettings : : Initialize ( ) <nl> AddString ( ao , " audiooutput . passthroughdevice " , 546 , " iec958 " , SPIN_CONTROL_TEXT ) ; <nl> AddString ( ao , " audiooutput . custompassthrough " , 1301 , " " , EDIT_CONTROL_INPUT ) ; <nl> AddSeparator ( ao , " audiooutput . sep3 " ) ; <nl> - / / AddBool ( ao , " audiooutput . downmixmultichannel " , 548 , true ) ; <nl> # elif defined ( _WIN32 ) <nl> AddString ( ao , " audiooutput . audiodevice " , 545 , " Default " , SPIN_CONTROL_TEXT ) ; <nl> - / / AddBool ( ao , " audiooutput . downmixmultichannel " , 548 , true ) ; <nl> # endif <nl> <nl> CSettingsCategory * in = AddCategory ( 4 , " input " , 14094 ) ; <nl> mmm a / xbmc / cores / AudioRenderers / CoreAudioRenderer . cpp <nl> ppp b / xbmc / cores / AudioRenderers / CoreAudioRenderer . cpp <nl> <nl> # include " utils / log . h " <nl> # include " utils / TimeUtils . h " <nl> <nl> + / / based on Win32WASAPI , with default 5 channel layout changed from 4 . 1 to 5 . 0 <nl> + const enum PCMChannels default_channel_layout [ ] [ 8 ] = <nl> + { <nl> + { PCM_FRONT_CENTER } , <nl> + { PCM_FRONT_LEFT , PCM_FRONT_RIGHT } , <nl> + { PCM_FRONT_LEFT , PCM_FRONT_RIGHT , PCM_LOW_FREQUENCY } , <nl> + { PCM_FRONT_LEFT , PCM_FRONT_RIGHT , PCM_BACK_LEFT , PCM_BACK_RIGHT } , <nl> + { PCM_FRONT_LEFT , PCM_FRONT_RIGHT , PCM_FRONT_CENTER , PCM_BACK_LEFT , PCM_BACK_RIGHT } , <nl> + { PCM_FRONT_LEFT , PCM_FRONT_RIGHT , PCM_FRONT_CENTER , PCM_LOW_FREQUENCY , PCM_BACK_LEFT , PCM_BACK_RIGHT } , <nl> + { PCM_FRONT_LEFT , PCM_FRONT_RIGHT , PCM_FRONT_CENTER , PCM_LOW_FREQUENCY , PCM_BACK_CENTER , PCM_BACK_LEFT , PCM_BACK_RIGHT } , <nl> + { PCM_FRONT_LEFT , PCM_FRONT_RIGHT , PCM_FRONT_CENTER , PCM_LOW_FREQUENCY , PCM_BACK_LEFT , PCM_BACK_RIGHT , PCM_SIDE_LEFT , PCM_SIDE_RIGHT } <nl> + } ; <nl> + <nl> + / / default order of output channels , based on Win32WASAPI <nl> + const enum PCMChannels channel_order [ ] = { PCM_FRONT_LEFT , PCM_FRONT_RIGHT , PCM_FRONT_CENTER , PCM_LOW_FREQUENCY , PCM_BACK_LEFT , PCM_BACK_RIGHT , PCM_FRONT_LEFT_OF_CENTER , PCM_FRONT_RIGHT_OF_CENTER , PCM_BACK_CENTER , PCM_SIDE_LEFT , PCM_SIDE_RIGHT } ; <nl> + <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> / / CAtomicAllocator : Wrapper class for lf_heap . <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> bool CCoreAudioRenderer : : Initialize ( IAudioCallback * pCallback , const CStdString & <nl> else <nl> { <nl> / / Standard PCM data <nl> - configured = InitializePCM ( iChannels , uiSamplesPerSec , uiBitsPerSample ) ; <nl> + configured = InitializePCM ( iChannels , uiSamplesPerSec , uiBitsPerSample , channelMap , bIsMusic ) ; <nl> / / TODO : wait for audio device startup <nl> Sleep ( 100 ) ; <nl> } <nl> bool CCoreAudioRenderer : : Initialize ( IAudioCallback * pCallback , const CStdString & <nl> CLog : : Log ( LOGDEBUG , " CoreAudioRenderer : : Initialize : Renderer Configuration - Chunk Len : % u , Max Cache : % lu ( % 0 . 0fms ) . " , m_ChunkLen , m_MaxCacheLen , 1000 . 0 * ( float ) m_MaxCacheLen / ( float ) m_AvgBytesPerSec ) ; <nl> CLog : : Log ( LOGINFO , " CoreAudioRenderer : : Initialize : Successfully configured audio output . " ) ; <nl> <nl> + / / Make space for remap processing <nl> + / / AddPackets will not accept more data than m_MaxCacheLen , so a fixed size buffer should be okay . <nl> + / / Do we need to catch memory allocation errors ? <nl> + m_RemapBuffer = new char [ m_MaxCacheLen ] ; <nl> + CLog : : Log ( LOGDEBUG , " CoreAudioRenderer : : Initialize : Allocated % lu bytes for channel remapping " , m_MaxCacheLen ) ; <nl> <nl> return true ; <nl> } <nl> bool CCoreAudioRenderer : : Deinitialize ( ) <nl> Stop ( ) ; <nl> / / Reset our state <nl> m_ChunkLen = 0 ; <nl> + delete [ ] m_RemapBuffer ; <nl> + m_RemapBuffer = NULL ; <nl> + CLog : : Log ( LOGDEBUG , " CoreAudioRenderer : : Deinitialize : deleted remapping buffer " ) ; <nl> + <nl> m_MaxCacheLen = 0 ; <nl> m_AvgBytesPerSec = 0 ; <nl> if ( m_Passthrough ) <nl> unsigned int CCoreAudioRenderer : : AddPackets ( const void * data , DWORD len ) <nl> unsigned int cacheSpace = GetSpace ( ) ; <nl> if ( len > cacheSpace ) <nl> return 0 ; / / Wait until we can accept all of it <nl> - <nl> - size_t bytesUsed = m_pCache - > AddData ( ( void * ) data , len ) ; <nl> + <nl> + / / Call channel remapping routine if available available and required <nl> + if ( m_remap . CanRemap ( ) & & ! m_Passthrough ) <nl> + m_remap . Remap ( ( void * ) data , ( void * ) m_RemapBuffer , len / m_BytesPerFrame ) ; <nl> + else <nl> + memcpy ( m_RemapBuffer , data , len ) ; <nl> + size_t bytesUsed = m_pCache - > AddData ( ( void * ) m_RemapBuffer , len ) ; <nl> <nl> # ifdef _DEBUG <nl> / / Update tracking variable <nl> OSStatus CCoreAudioRenderer : : DirectRenderCallback ( AudioDeviceID inDevice , const <nl> / / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * <nl> / / Audio Device Initialization Methods <nl> / / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * <nl> - bool CCoreAudioRenderer : : InitializePCM ( UInt32 channels , UInt32 samplesPerSecond , UInt32 bitsPerSample ) <nl> + bool CCoreAudioRenderer : : InitializePCM ( UInt32 channels , UInt32 samplesPerSecond , UInt32 bitsPerSample , enum PCMChannels * channelMap , bool bIsMusic ) <nl> { <nl> + / / Set up audio channel remaping <nl> + if ( ! channelMap ) <nl> + { <nl> + channelMap = ( PCMChannels * ) default_channel_layout [ channels - 1 ] ; <nl> + CLog : : Log ( LOGDEBUG , " CoreAudioRenderer : : InitializePCM : no channel map available for source , using % u channel default map " , channels ) ; <nl> + } <nl> + else <nl> + CLog : : Log ( LOGDEBUG , " CoreAudioRenderer : : InitializePCM : using supplied channel map for audio source " , channels ) ; <nl> + <nl> + PCMChannels * outLayout = m_remap . SetInputFormat ( channels , channelMap , bitsPerSample / 8 ) ; <nl> + <nl> + / / not used yet : count channels in layout selected in GUI ( copied from Win32WASAPI ) <nl> + / / this could be used to create the AudioUnit with only the downmixed number of channels , but this is not currently done : <nl> + / / - AudioUnit handles hardware with too few channels without problems ( so e . g . downmixing to a hardware 4 . 0 setup should work ) <nl> + / / - keeping the channel number the same gives the option of simply leaving out remap in AddPackets where there is a problem . <nl> + int layoutChannels = 0 ; <nl> + for ( PCMChannels * channel = outLayout ; * channel ! = PCM_INVALID ; channel + + ) <nl> + + + layoutChannels ; <nl> + <nl> / / Set the input stream format for the AudioUnit ( this is what is being sent to us ) <nl> AudioStreamBasicDescription inputFormat ; <nl> inputFormat . mFormatID = kAudioFormatLinearPCM ; / / Data encoding format <nl> bool CCoreAudioRenderer : : InitializePCM ( UInt32 channels , UInt32 samplesPerSecond , <nl> inputFormat . mReserved = 0 ; <nl> if ( ! m_AudioUnit . SetInputFormat ( & inputFormat ) ) <nl> return false ; <nl> - <nl> - / / TODO : Handle channel mapping <nl> + <nl> + / / Set output format for remap , using default speaker order for now . <nl> + / / The number of channels in the audio device is still based on the channels in the source ( 6 for 5 . 1 etc . ) even with downmixing active <nl> + / / We should probably get the speaker position / order information from the OSX audio settings . <nl> + m_remap . SetOutputFormat ( channels , ( PCMChannels * ) channel_order , false ) ; <nl> + if ( m_remap . CanRemap ( ) ) <nl> + CLog : : Log ( LOGDEBUG , " CoreAudioRenderer : : InitializePCM : Successfully created a remapping matrix . " ) ; <nl> <nl> m_BytesPerFrame = inputFormat . mBytesPerFrame ; <nl> m_AvgBytesPerSec = inputFormat . mSampleRate * inputFormat . mBytesPerFrame ; / / 1 sample per channel per frame <nl> bool CCoreAudioRenderer : : InitializePCMEncoded ( UInt32 sampleRate ) <nl> / / Set the Sample Rate as defined by the spec . <nl> m_AudioDevice . SetNominalSampleRate ( ( float ) sampleRate ) ; <nl> <nl> - if ( ! InitializePCM ( 2 , sampleRate , 16 ) ) <nl> + if ( ! InitializePCM ( 2 , sampleRate , 16 , NULL , false ) ) <nl> return false ; <nl> <nl> m_EnableVolumeControl = false ; / / Prevent attempts to change the output volume . It is not possible with encoded audio <nl> mmm a / xbmc / cores / AudioRenderers / CoreAudioRenderer . h <nl> ppp b / xbmc / cores / AudioRenderers / CoreAudioRenderer . h <nl> class CCoreAudioRenderer : public IAudioRenderer <nl> static OSStatus RenderCallback ( void * inRefCon , AudioUnitRenderActionFlags * ioActionFlags , const AudioTimeStamp * inTimeStamp , UInt32 inBusNumber , UInt32 inNumberFrames , AudioBufferList * ioData ) ; <nl> static OSStatus DirectRenderCallback ( AudioDeviceID inDevice , const AudioTimeStamp * inNow , const AudioBufferList * inInputData , const AudioTimeStamp * inInputTime , AudioBufferList * outOutputData , const AudioTimeStamp * inOutputTime , void * inClientData ) ; <nl> bool InitializeEncoded ( AudioDeviceID outputDevice , UInt32 sampleRate ) ; <nl> - bool InitializePCM ( UInt32 channels , UInt32 samplesPerSecond , UInt32 bitsPerSample ) ; <nl> + bool InitializePCM ( UInt32 channels , UInt32 samplesPerSecond , UInt32 bitsPerSample , enum PCMChannels * channelMap , bool bIsMusic ) ; <nl> bool InitializePCMEncoded ( UInt32 sampleRate ) ; <nl> <nl> bool m_Pause ; <nl> class CCoreAudioRenderer : public IAudioRenderer <nl> <nl> long m_CurrentVolume ; / / Courtesy of the jerk that made GetCurrentVolume a const . . . <nl> unsigned int m_ChunkLen ; / / Minimum amount of data accepted by AddPackets <nl> + char * m_RemapBuffer ; / / Temporary buffer for channel remapping <nl> CSliceQueue * m_pCache ; <nl> size_t m_MaxCacheLen ; / / Maximum number of bytes to be cached by the renderer . <nl> <nl> mmm a / xbmc / cores / dvdplayer / DVDCodecs / Audio / DVDAudioCodecFFmpeg . cpp <nl> ppp b / xbmc / cores / dvdplayer / DVDCodecs / Audio / DVDAudioCodecFFmpeg . cpp <nl> bool CDVDAudioCodecFFmpeg : : Open ( CDVDStreamInfo & hints , CDVDCodecOptions & options <nl> if ( m_pCodecContext - > bits_per_coded_sample = = 0 ) <nl> m_pCodecContext - > bits_per_coded_sample = 16 ; <nl> <nl> - / * for now , only set the requested layout for non - apple architecture * / <nl> - # ifdef __APPLE__ <nl> - / * if we need to downmix , do it in ffmpeg as codecs are smarter then we can ever be * / <nl> - / * wmapro does not support this * / <nl> - if ( hints . codec ! = CODEC_ID_WMAPRO & & g_guiSettings . GetBool ( " audiooutput . downmixmultichannel " ) ) <nl> - { <nl> - m_pCodecContext - > request_channel_layout = CH_LAYOUT_STEREO ; <nl> - / / below is required or center channel is missing with VC1 content under OSX . <nl> - m_pCodecContext - > request_channels = 2 ; <nl> - } <nl> - # endif <nl> - <nl> if ( hints . extradata & & hints . extrasize > 0 ) <nl> { <nl> m_pCodecContext - > extradata_size = hints . extrasize ; <nl> mmm a / xbmc / cores / dvdplayer / DVDCodecs / Audio / DVDAudioCodecLibDts . cpp <nl> ppp b / xbmc / cores / dvdplayer / DVDCodecs / Audio / DVDAudioCodecLibDts . cpp <nl> void CDVDAudioCodecLibDts : : SetupChannels ( int flags ) <nl> m_iOutputChannels = m_iSourceChannels ; <nl> m_iOutputFlags = m_iSourceFlags ; <nl> <nl> - / / If we can ' t support multichannel output downmix <nl> - if ( g_guiSettings . GetBool ( " audiooutput . downmixmultichannel " ) ) <nl> - { <nl> - m_iOutputChannels = 2 ; <nl> - m_pChannelMap = channelMaps [ 1 ] ; <nl> - m_iOutputFlags = DTS_STEREO ; <nl> - } <nl> - <nl> / * adjust level should always be set , to keep samples in proper range * / <nl> / * after any downmixing has been done * / <nl> m_iOutputFlags | = DTS_ADJUST_LEVEL ; <nl> mmm a / xbmc / cores / dvdplayer / DVDCodecs / Audio / DVDAudioCodecLibFaad . cpp <nl> ppp b / xbmc / cores / dvdplayer / DVDCodecs / Audio / DVDAudioCodecLibFaad . cpp <nl> bool CDVDAudioCodecLibFaad : : OpenDecoder ( ) <nl> <nl> / / modify some stuff here <nl> pConfiguration - > outputFormat = FAAD_FMT_16BIT ; / / already default <nl> - # ifdef __APPLE__ <nl> - pConfiguration - > downMatrix = g_guiSettings . GetBool ( " audiooutput . downmixmultichannel " ) ? 1 : 0 ; <nl> - # else <nl> pConfiguration - > downMatrix = 0 ; <nl> - # endif <nl> <nl> m_dll . faacDecSetConfiguration ( m_pHandle , pConfiguration ) ; <nl> <nl> mmm a / xbmc / cores / dvdplayer / DVDCodecs / Audio / DVDAudioCodecLiba52 . cpp <nl> ppp b / xbmc / cores / dvdplayer / DVDCodecs / Audio / DVDAudioCodecLiba52 . cpp <nl> void CDVDAudioCodecLiba52 : : SetupChannels ( int flags ) <nl> m_iOutputChannels = m_iSourceChannels ; <nl> m_iOutputFlags = m_iSourceFlags ; <nl> <nl> - / / If we can ' t support multichannel output downmix <nl> - if ( g_guiSettings . GetBool ( " audiooutput . downmixmultichannel " ) ) <nl> - { <nl> - m_iOutputChannels = 2 ; <nl> - m_pChannelMap = channelMaps [ 1 ] ; <nl> - m_iOutputFlags = A52_STEREO ; <nl> - if ( m_iSourceChannels > 2 ) <nl> - m_Gain = pow ( 2 . 0f , g_advancedSettings . m_ac3Gain / 6 . 0f ) ; / / Hack for downmix attenuation <nl> - } <nl> - <nl> / * adjust level should always be set , to keep samples in proper range * / <nl> / * after any downmixing has been done * / <nl> m_iOutputFlags | = A52_ADJUST_LEVEL ; <nl>
[ OSX ] analog audio downmix to user - selected format and remove code using audiooutput . downmixmultichannel , refs trac . Big thanks to Nils
xbmc/xbmc
be84ff50f0eed9291a9d399c7a05c08cff4e7e23
2010-06-28T23:24:33Z
mmm a / modules / stitching / include / opencv2 / stitching / detail / matchers . hpp <nl> ppp b / modules / stitching / include / opencv2 / stitching / detail / matchers . hpp <nl> <nl> <nl> # include " opencv2 / opencv_modules . hpp " <nl> <nl> - # ifdef HAVE_OPENCV_NONFREE <nl> - # include " opencv2 / nonfree / cuda . hpp " <nl> + # ifdef HAVE_OPENCV_XFEATURES2D <nl> + # include " opencv2 / xfeatures2d / cuda . hpp " <nl> # endif <nl> <nl> namespace cv { <nl> class CV_EXPORTS OrbFeaturesFinder : public FeaturesFinder <nl> } ; <nl> <nl> <nl> - # ifdef HAVE_OPENCV_NONFREE <nl> + # ifdef HAVE_OPENCV_XFEATURES2D <nl> class CV_EXPORTS SurfFeaturesFinderGpu : public FeaturesFinder <nl> { <nl> public : <nl> mmm a / modules / stitching / src / matchers . cpp <nl> ppp b / modules / stitching / src / matchers . cpp <nl> using namespace cv ; <nl> using namespace cv : : detail ; <nl> using namespace cv : : cuda ; <nl> <nl> - # ifdef HAVE_OPENCV_NONFREE <nl> - # include " opencv2 / nonfree . hpp " <nl> + # ifdef HAVE_OPENCV_XFEATURES2D <nl> + # include " opencv2 / xfeatures2d . hpp " <nl> <nl> - static bool makeUseOfNonfree = initModule_nonfree ( ) ; <nl> + static bool makeUseOfNonfree = initModule_xfeatures2d ( ) ; <nl> # endif <nl> <nl> namespace { <nl> void OrbFeaturesFinder : : find ( InputArray image , ImageFeatures & features ) <nl> } <nl> } <nl> <nl> - # ifdef HAVE_OPENCV_NONFREE <nl> + # ifdef HAVE_OPENCV_XFEATURES2D <nl> SurfFeaturesFinderGpu : : SurfFeaturesFinderGpu ( double hess_thresh , int num_octaves , int num_layers , <nl> int num_octaves_descr , int num_layers_descr ) <nl> { <nl> mmm a / modules / stitching / src / precomp . hpp <nl> ppp b / modules / stitching / src / precomp . hpp <nl> <nl> # include " opencv2 / cuda . hpp " <nl> # endif <nl> <nl> - # ifdef HAVE_OPENCV_NONFREE <nl> - # include " opencv2 / nonfree / cuda . hpp " <nl> + # ifdef HAVE_OPENCV_XFEATURES2D <nl> + # include " opencv2 / xfeatures2d / cuda . hpp " <nl> # endif <nl> <nl> # include " . . / . . / imgproc / src / gcgraph . hpp " <nl> mmm a / modules / stitching / src / stitcher . cpp <nl> ppp b / modules / stitching / src / stitcher . cpp <nl> Stitcher Stitcher : : createDefault ( bool try_use_gpu ) <nl> # ifdef HAVE_OPENCV_CUDA <nl> if ( try_use_gpu & & cuda : : getCudaEnabledDeviceCount ( ) > 0 ) <nl> { <nl> - # ifdef HAVE_OPENCV_NONFREE <nl> + # ifdef HAVE_OPENCV_XFEATURES2D <nl> stitcher . setFeaturesFinder ( makePtr < detail : : SurfFeaturesFinderGpu > ( ) ) ; <nl> # else <nl> stitcher . setFeaturesFinder ( makePtr < detail : : OrbFeaturesFinder > ( ) ) ; <nl> Stitcher Stitcher : : createDefault ( bool try_use_gpu ) <nl> else <nl> # endif <nl> { <nl> - # ifdef HAVE_OPENCV_NONFREE <nl> + # ifdef HAVE_OPENCV_XFEATURES2D <nl> stitcher . setFeaturesFinder ( makePtr < detail : : SurfFeaturesFinder > ( ) ) ; <nl> # else <nl> stitcher . setFeaturesFinder ( makePtr < detail : : OrbFeaturesFinder > ( ) ) ; <nl> mmm a / modules / stitching / test / test_matchers . cpp <nl> ppp b / modules / stitching / test / test_matchers . cpp <nl> <nl> # include " test_precomp . hpp " <nl> # include " opencv2 / opencv_modules . hpp " <nl> <nl> - # ifdef HAVE_OPENCV_NONFREE <nl> + # ifdef HAVE_OPENCV_XFEATURES2D <nl> <nl> using namespace cv ; <nl> using namespace std ; <nl> mmm a / modules / world / src / precomp . hpp <nl> ppp b / modules / world / src / precomp . hpp <nl> <nl> # ifdef HAVE_OPENCV_FEATURES2D <nl> # include " opencv2 / features2d . hpp " <nl> # endif <nl> - # ifdef HAVE_OPENCV_NONFREE <nl> - # include " opencv2 / nonfree . hpp " <nl> + # ifdef HAVE_OPENCV_XFEATURES2D <nl> + # include " opencv2 / xfeatures2d / nonfree . hpp " <nl> # endif <nl> <nl> # include " opencv2 / world . hpp " <nl> mmm a / modules / world / src / world_init . cpp <nl> ppp b / modules / world / src / world_init . cpp <nl> bool cv : : initAll ( ) <nl> # ifdef HAVE_OPENCV_FEATURES2D <nl> & & initModule_features2d ( ) <nl> # endif <nl> - # ifdef HAVE_OPENCV_NONFREE <nl> - & & initModule_nonfree ( ) <nl> + # ifdef HAVE_OPENCV_XFEATURES2D <nl> + & & initModule_xfeatures2d ( ) <nl> # endif <nl> ; <nl> } <nl> mmm a / samples / cpp / stitching_detailed . cpp <nl> ppp b / samples / cpp / stitching_detailed . cpp <nl> int main ( int argc , char * argv [ ] ) <nl> Ptr < FeaturesFinder > finder ; <nl> if ( features_type = = " surf " ) <nl> { <nl> - # ifdef HAVE_OPENCV_NONFREE <nl> + # ifdef HAVE_OPENCV_XFEATURES2D <nl> if ( try_cuda & & cuda : : getCudaEnabledDeviceCount ( ) > 0 ) <nl> finder = makePtr < SurfFeaturesFinderGpu > ( ) ; <nl> else <nl> mmm a / samples / gpu / CMakeLists . txt <nl> ppp b / samples / gpu / CMakeLists . txt <nl> if ( BUILD_EXAMPLES AND OCV_DEPENDENCIES_FOUND ) <nl> " $ { OpenCV_SOURCE_DIR } / modules / gpu / src / nvidia / core " <nl> ) <nl> <nl> - if ( HAVE_opencv_nonfree ) <nl> - ocv_include_directories ( " $ { OpenCV_SOURCE_DIR } / modules / nonfree / include " ) <nl> + if ( HAVE_opencv_xfeatures2d ) <nl> + ocv_include_directories ( " $ { OpenCV_SOURCE_DIR } / modules / xfeatures2d / include " ) <nl> endif ( ) <nl> <nl> if ( HAVE_opencv_cudacodec ) <nl> if ( BUILD_EXAMPLES AND OCV_DEPENDENCIES_FOUND ) <nl> ocv_target_link_libraries ( $ { the_target } $ { CUDA_CUDA_LIBRARY } ) <nl> endif ( ) <nl> <nl> - if ( HAVE_opencv_nonfree ) <nl> - ocv_target_link_libraries ( $ { the_target } opencv_nonfree ) <nl> + if ( HAVE_opencv_xfeatures2d ) <nl> + ocv_target_link_libraries ( $ { the_target } opencv_xfeatures2d ) <nl> endif ( ) <nl> if ( HAVE_opencv_cudacodec ) <nl> ocv_target_link_libraries ( $ { the_target } opencv_cudacodec ) <nl> mmm a / samples / gpu / performance / CMakeLists . txt <nl> ppp b / samples / gpu / performance / CMakeLists . txt <nl> set ( the_target " example_gpu_performance " ) <nl> file ( GLOB sources " performance / * . cpp " ) <nl> file ( GLOB headers " performance / * . h " ) <nl> <nl> - if ( HAVE_opencv_nonfree ) <nl> - ocv_include_directories ( " $ { OpenCV_SOURCE_DIR } / modules / nonfree / include " ) <nl> + if ( HAVE_opencv_xfeatures2d ) <nl> + ocv_include_directories ( " $ { OpenCV_SOURCE_DIR } / modules / xfeatures2d / include " ) <nl> endif ( ) <nl> <nl> add_executable ( $ { the_target } $ { sources } $ { headers } ) <nl> ocv_target_link_libraries ( $ { the_target } $ { OPENCV_LINKER_LIBS } $ { OPENCV_CUDA_SAMPLES_REQUIRED_DEPS } ) <nl> <nl> - if ( HAVE_opencv_nonfree ) <nl> - ocv_target_link_libraries ( $ { the_target } opencv_nonfree ) <nl> + if ( HAVE_opencv_xfeatures2d ) <nl> + ocv_target_link_libraries ( $ { the_target } opencv_xfeatures2d ) <nl> endif ( ) <nl> <nl> set_target_properties ( $ { the_target } PROPERTIES <nl> mmm a / samples / gpu / performance / tests . cpp <nl> ppp b / samples / gpu / performance / tests . cpp <nl> <nl> <nl> # include " opencv2 / opencv_modules . hpp " <nl> <nl> - # ifdef HAVE_OPENCV_NONFREE <nl> - # include " opencv2 / nonfree / cuda . hpp " <nl> - # include " opencv2 / nonfree / nonfree . hpp " <nl> + # ifdef HAVE_OPENCV_XFEATURES2D <nl> + # include " opencv2 / xfeatures2d / cuda . hpp " <nl> + # include " opencv2 / xfeatures2d / nonfree . hpp " <nl> # endif <nl> <nl> using namespace std ; <nl> TEST ( meanShift ) <nl> } <nl> } <nl> <nl> - # ifdef HAVE_OPENCV_NONFREE <nl> + # ifdef HAVE_OPENCV_XFEATURES2D <nl> <nl> TEST ( SURF ) <nl> { <nl> mmm a / samples / gpu / surf_keypoint_matcher . cpp <nl> ppp b / samples / gpu / surf_keypoint_matcher . cpp <nl> <nl> <nl> # include " opencv2 / opencv_modules . hpp " <nl> <nl> - # ifdef HAVE_OPENCV_NONFREE <nl> + # ifdef HAVE_OPENCV_XFEATURES2D <nl> <nl> # include " opencv2 / core / core . hpp " <nl> # include " opencv2 / features2d / features2d . hpp " <nl> # include " opencv2 / highgui / highgui . hpp " <nl> # include " opencv2 / cudafeatures2d . hpp " <nl> - # include " opencv2 / nonfree / cuda . hpp " <nl> + # include " opencv2 / xfeatures2d / cuda . hpp " <nl> <nl> using namespace std ; <nl> using namespace cv ; <nl>
Merge pull request from StevenPuttemans : fix_nonfree_includes
opencv/opencv
b5d073ad3faa96b5ad16a5a6ea71e08e7d55cb2a
2014-08-26T16:03:53Z
mmm a / tensorflow / compiler / xla / service / gpu / gpu_compiler . cc <nl> ppp b / tensorflow / compiler / xla / service / gpu / gpu_compiler . cc <nl> limitations under the License . <nl> # include " tensorflow / core / platform / subprocess . h " <nl> # include " tensorflow / core / platform / tracing . h " <nl> # include " tensorflow / core / profiler / lib / traceme . h " <nl> + # include " tensorflow / core / util / env_var . h " <nl> <nl> namespace xla { <nl> namespace gpu { <nl> Status GpuCompiler : : PrepareHloModuleForIrEmitting ( HloModule * hlo_module ) { <nl> return pipeline . Run ( hlo_module ) . status ( ) ; <nl> } <nl> <nl> + / / TODO ( cheshire ) : Duplication with gpu_conv_algorithm picker , figure out a <nl> + / / right way to share this . <nl> + static bool RequireDeterminism ( ) { <nl> + bool deterministic_ops = false ; <nl> + TF_CHECK_OK ( tensorflow : : ReadBoolFromEnvVar ( " TF_DETERMINISTIC_OPS " , <nl> + / * default_val = * / false , <nl> + & deterministic_ops ) ) ; <nl> + return deterministic_ops ; <nl> + } <nl> + <nl> Status GpuCompiler : : OptimizeHloPostLayoutAssignment ( <nl> HloModule * hlo_module , se : : StreamExecutor * stream_exec , <nl> se : : DeviceMemoryAllocator * device_allocator ) { <nl> Status GpuCompiler : : OptimizeHloPostLayoutAssignment ( <nl> options . set_is_layout_sensitive ( true ) ; <nl> pipeline . AddPass < HloPassFix < AlgebraicSimplifier > > ( options ) ; <nl> <nl> - if ( hlo_module - > config ( ) . debug_options ( ) . xla_gpu_deterministic_reductions ( ) ) { <nl> + if ( RequireDeterminism ( ) | | <nl> + hlo_module - > config ( ) . debug_options ( ) . xla_gpu_deterministic_reductions ( ) ) { <nl> pipeline . AddPass < HloPassFix < GpuTreeReductionRewriter > > ( ) ; <nl> } <nl> <nl> mmm a / tensorflow / compiler / xla / service / gpu / nvptx_compiler . cc <nl> ppp b / tensorflow / compiler / xla / service / gpu / nvptx_compiler . cc <nl> limitations under the License . <nl> # include " tensorflow / core / platform / cuda_libdevice_path . h " <nl> # include " tensorflow / core / platform / tracing . h " <nl> # include " tensorflow / core / profiler / lib / traceme . h " <nl> - # include " tensorflow / core / util / env_var . h " <nl> # include " tensorflow / stream_executor / cuda / cuda_diagnostics . h " <nl> # include " tensorflow / stream_executor / gpu / asm_compiler . h " <nl> <nl> Status NVPTXCompiler : : OptimizeHloConvolutionCanonicalization ( <nl> return Status : : OK ( ) ; <nl> } <nl> <nl> - / / TODO ( cheshire ) : Duplication with gpu_conv_algorithm picker , figure out a <nl> - / / right way to share this . <nl> - static bool RequireDeterminism ( ) { <nl> - bool deterministic_ops = false ; <nl> - TF_CHECK_OK ( tensorflow : : ReadBoolFromEnvVar ( " TF_DETERMINISTIC_OPS " , <nl> - / * default_val = * / false , <nl> - & deterministic_ops ) ) ; <nl> - return deterministic_ops ; <nl> - } <nl> - <nl> Status NVPTXCompiler : : OptimizeHloPostLayoutAssignment ( <nl> HloModule * hlo_module , se : : StreamExecutor * stream_exec , <nl> se : : DeviceMemoryAllocator * device_allocator ) { <nl>
Bring in commit diff from 8b7a3db0b6e09415b5640be4986fb4d7c6e5209a
tensorflow/tensorflow
3104a4c016104b760d861858f2cbe18ae50dcafd
2020-02-20T17:27:19Z
mmm a / imgui . cpp <nl> ppp b / imgui . cpp <nl> bool ImGui : : CollapsingHeader ( const char * label , const char * str_id , bool display <nl> <nl> ImGuiState & g = * GImGui ; <nl> const ImGuiStyle & style = g . Style ; <nl> + const ImVec2 padding = display_frame ? style . FramePadding : ImVec2 ( style . FramePadding . x , 0 . 0f ) ; <nl> <nl> IM_ASSERT ( str_id ! = NULL | | label ! = NULL ) ; <nl> if ( str_id = = NULL ) <nl> bool ImGui : : CollapsingHeader ( const char * label , const char * str_id , bool display <nl> label = str_id ; <nl> const ImGuiID id = window - > GetID ( str_id ) ; <nl> <nl> - / / Framed header expand a little outside the default padding <nl> const ImVec2 label_size = CalcTextSize ( label , NULL , true ) ; <nl> - const float text_base_offset_y = display_frame ? ImMax ( 0 . 0f , window - > DC . CurrentLineTextBaseOffset - style . FramePadding . y ) : window - > DC . CurrentLineTextBaseOffset ; / / Acquire before ItemAdd ( ) <nl> - const float frame_height = ImMax ( ImMin ( window - > DC . CurrentLineHeight , g . FontSize + g . Style . FramePadding . y * 2 ) , label_size . y + ( display_frame ? style . FramePadding . y * 2 : 0 . 0f ) ) ; / / We vertically grow up to current line height up the typical widget height . <nl> + const float text_base_offset_y = ImMax ( 0 . 0f , window - > DC . CurrentLineTextBaseOffset - padding . y ) ; <nl> + const float frame_height = ImMax ( ImMin ( window - > DC . CurrentLineHeight , g . FontSize + g . Style . FramePadding . y * 2 ) , label_size . y + padding . y * 2 ) ; / / We vertically grow up to current line height up the typical widget height . <nl> ImRect frame_bb = ImRect ( window - > DC . CursorPos , ImVec2 ( window - > Pos . x + GetContentRegionMax ( ) . x , window - > DC . CursorPos . y + frame_height ) ) ; <nl> if ( display_frame ) <nl> { <nl> + / / Framed header expand a little outside the default padding <nl> frame_bb . Min . x - = ( float ) ( int ) ( window - > WindowPadding . x * 0 . 5f ) - 1 ; <nl> frame_bb . Max . x + = ( float ) ( int ) ( window - > WindowPadding . x * 0 . 5f ) - 1 ; <nl> } <nl> <nl> - const float collapser_width = g . FontSize + style . FramePadding . x * 2 ; <nl> - const float text_width = collapser_width + ( label_size . x > 0 . 0f ? label_size . x + style . FramePadding . x * 2 : 0 . 0f ) ; / / Include collapser <nl> + const float collapser_width = g . FontSize + padding . x * 2 ; <nl> + const float text_width = collapser_width + ( label_size . x > 0 . 0f ? label_size . x + padding . x * 2 : 0 . 0f ) ; / / Include collapser <nl> const ImVec2 layout_size = ImVec2 ( text_width , frame_height ) ; <nl> ItemSize ( layout_size , text_base_offset_y ) ; <nl> <nl> - const ImRect interact_bb = display_frame ? frame_bb : ImRect ( frame_bb . Min . x , frame_bb . Min . y , frame_bb . Min . x + text_width + style . ItemSpacing . x * 2 , frame_bb . Max . y ) ; / / Arbitrary allowing to click past 2 worth of ItemSpacing <nl> + / / For regular tree nodes , we arbitrary allow to click past 2 worth of ItemSpacing <nl> + / / ( Ideally we ' d want to add a flag for the user to specify we want want the hit test to be done up to the right side of the content or not ) <nl> + const ImRect interact_bb = display_frame ? frame_bb : ImRect ( frame_bb . Min . x , frame_bb . Min . y , frame_bb . Min . x + text_width + style . ItemSpacing . x * 2 , frame_bb . Max . y ) ; <nl> bool opened = TreeNodeBehaviorIsOpened ( id , ( default_open ? ImGuiTreeNodeFlags_DefaultOpen : 0 ) | ( display_frame ? ImGuiTreeNodeFlags_NoAutoExpandOnLog : 0 ) ) ; <nl> if ( ! ItemAdd ( interact_bb , & id ) ) <nl> return opened ; <nl> bool ImGui : : CollapsingHeader ( const char * label , const char * str_id , bool display <nl> { <nl> / / Framed type <nl> RenderFrame ( frame_bb . Min , frame_bb . Max , col , true , style . FrameRounding ) ; <nl> - RenderCollapseTriangle ( frame_bb . Min + style . FramePadding + ImVec2 ( 0 . 0f , text_base_offset_y ) , opened , 1 . 0f , true ) ; <nl> - const ImVec2 text_pos = frame_bb . Min + style . FramePadding + ImVec2 ( collapser_width , text_base_offset_y ) ; <nl> + RenderCollapseTriangle ( frame_bb . Min + padding + ImVec2 ( 0 . 0f , text_base_offset_y ) , opened , 1 . 0f , true ) ; <nl> + const ImVec2 text_pos = frame_bb . Min + padding + ImVec2 ( collapser_width , text_base_offset_y ) ; <nl> if ( g . LogEnabled ) <nl> { <nl> / / NB : ' # # ' is normally used to hide text ( as a library - wide feature ) , so we need to specify the text range to make sure the # # aren ' t stripped out here . <nl> bool ImGui : : CollapsingHeader ( const char * label , const char * str_id , bool display <nl> if ( hovered ) <nl> RenderFrame ( frame_bb . Min , frame_bb . Max , col , false ) ; <nl> <nl> - RenderCollapseTriangle ( frame_bb . Min + ImVec2 ( style . FramePadding . x , g . FontSize * 0 . 15f + text_base_offset_y ) , opened , 0 . 70f , false ) ; <nl> + RenderCollapseTriangle ( frame_bb . Min + ImVec2 ( padding . x , g . FontSize * 0 . 15f + text_base_offset_y ) , opened , 0 . 70f , false ) ; <nl> if ( g . LogEnabled ) <nl> LogRenderedText ( frame_bb . Min + ImVec2 ( collapser_width , text_base_offset_y ) , " > " ) ; <nl> RenderText ( frame_bb . Min + ImVec2 ( collapser_width , text_base_offset_y ) , label ) ; <nl>
TreeNode / CollapsingHeader : some minor cleanup of that horrible function .
ocornut/imgui
d1b4159b51d944be5258d48ad8dce6fdfe4f4d80
2015-11-29T22:25:13Z
mmm a / src / hydrogen - instructions . cc <nl> ppp b / src / hydrogen - instructions . cc <nl> HCheckMaps * HCheckMaps : : New ( HValue * value , <nl> } <nl> <nl> <nl> + HCheckMaps * HCheckMaps : : NewWithTransitions ( HValue * value , <nl> + Handle < Map > map , <nl> + Zone * zone , <nl> + CompilationInfo * info ) { <nl> + HCheckMaps * check_map = new ( zone ) HCheckMaps ( value , zone , value ) ; <nl> + check_map - > map_set_ . Add ( map , zone ) ; <nl> + <nl> + / / Since transitioned elements maps of the initial map don ' t fail the map <nl> + / / check , the CheckMaps instruction doesn ' t need to depend on ElementsKinds . <nl> + check_map - > ClearGVNFlag ( kDependsOnElementsKind ) ; <nl> + <nl> + ElementsKind kind = map - > elements_kind ( ) ; <nl> + bool packed = IsFastPackedElementsKind ( kind ) ; <nl> + while ( CanTransitionToMoreGeneralFastElementsKind ( kind , packed ) ) { <nl> + kind = GetNextMoreGeneralFastElementsKind ( kind , packed ) ; <nl> + Map * transitioned_map = <nl> + map - > LookupElementsTransitionMap ( kind ) ; <nl> + if ( transitioned_map ) { <nl> + check_map - > map_set_ . Add ( Handle < Map > ( transitioned_map ) , zone ) ; <nl> + } <nl> + } ; <nl> + <nl> + if ( map - > CanOmitMapChecks ( ) & & <nl> + value - > IsConstant ( ) & & <nl> + HConstant : : cast ( value ) - > InstanceOf ( map ) ) { <nl> + check_map - > omit ( info ) ; <nl> + } <nl> + <nl> + check_map - > map_set_ . Sort ( ) ; <nl> + return check_map ; <nl> + } <nl> + <nl> + <nl> void HCheckMaps : : FinalizeUniqueValueId ( ) { <nl> if ( ! map_unique_ids_ . is_empty ( ) ) return ; <nl> Zone * zone = block ( ) - > zone ( ) ; <nl> mmm a / src / hydrogen - instructions . h <nl> ppp b / src / hydrogen - instructions . h <nl> class HCheckMaps : public HTemplateInstruction < 2 > { <nl> } <nl> <nl> static HCheckMaps * NewWithTransitions ( HValue * value , Handle < Map > map , <nl> - Zone * zone ) { <nl> - HCheckMaps * check_map = new ( zone ) HCheckMaps ( value , zone , value ) ; <nl> - check_map - > map_set_ . Add ( map , zone ) ; <nl> - <nl> - / / Since transitioned elements maps of the initial map don ' t fail the map <nl> - / / check , the CheckMaps instruction doesn ' t need to depend on ElementsKinds . <nl> - check_map - > ClearGVNFlag ( kDependsOnElementsKind ) ; <nl> - <nl> - ElementsKind kind = map - > elements_kind ( ) ; <nl> - bool packed = IsFastPackedElementsKind ( kind ) ; <nl> - while ( CanTransitionToMoreGeneralFastElementsKind ( kind , packed ) ) { <nl> - kind = GetNextMoreGeneralFastElementsKind ( kind , packed ) ; <nl> - Map * transitioned_map = <nl> - map - > LookupElementsTransitionMap ( kind ) ; <nl> - if ( transitioned_map ) { <nl> - check_map - > map_set_ . Add ( Handle < Map > ( transitioned_map ) , zone ) ; <nl> - } <nl> - } ; <nl> - check_map - > map_set_ . Sort ( ) ; <nl> - return check_map ; <nl> - } <nl> + Zone * zone , CompilationInfo * info ) ; <nl> <nl> bool CanOmitMapChecks ( ) { return omit_ ; } <nl> <nl> mmm a / src / hydrogen . cc <nl> ppp b / src / hydrogen . cc <nl> void HOptimizedGraphBuilder : : AddCheckMap ( HValue * object , Handle < Map > map ) { <nl> void HOptimizedGraphBuilder : : AddCheckMapsWithTransitions ( HValue * object , <nl> Handle < Map > map ) { <nl> BuildCheckHeapObject ( object ) ; <nl> - AddInstruction ( HCheckMaps : : NewWithTransitions ( object , map , zone ( ) ) ) ; <nl> + AddInstruction ( HCheckMaps : : NewWithTransitions ( <nl> + object , map , zone ( ) , top_info ( ) ) ) ; <nl> } <nl> <nl> <nl> mmm a / test / mjsunit / omit - constant - mapcheck . js <nl> ppp b / test / mjsunit / omit - constant - mapcheck . js <nl> assertEquals ( 2 , load2 ( ) ) ; <nl> g2 . b = 10 ; <nl> g2 . a = 5 ; <nl> assertEquals ( 5 , load2 ( ) ) ; <nl> + <nl> + var g3 = { a : 2 , b : 9 , c : 1 } <nl> + <nl> + function store ( v ) { <nl> + g3 . a = v ; <nl> + return g3 . a ; <nl> + } <nl> + <nl> + assertEquals ( 5 , store ( 5 ) ) ; <nl> + assertEquals ( 8 , store ( 8 ) ) ; <nl> + % OptimizeFunctionOnNextCall ( store ) ; <nl> + assertEquals ( 10 , store ( 10 ) ) ; <nl> + delete g3 . c ; <nl> + store ( 7 ) ; <nl> + assertEquals ( { a : 7 , b : 9 } , g3 ) ; <nl>
Also eliminate map checks with transitions .
v8/v8
7e08f81e6d218fac0032f27f8e01c885d82e9146
2013-07-23T10:01:06Z
mmm a / Telegram / Resources / tl / api . tl <nl> ppp b / Telegram / Resources / tl / api . tl <nl> peerBlocked # e8fd8014 peer_id : Peer date : int = PeerBlocked ; <nl> stats . messageStats # 8999f295 views_graph : StatsGraph = stats . MessageStats ; <nl> <nl> groupCallDiscarded # 7780bcb4 id : long access_hash : long duration : int = GroupCall ; <nl> - groupCall # 55903081 flags : # id : long access_hash : long participants_count : int params : flags . 0 ? DataJSON version : int = GroupCall ; <nl> + groupCall # 55903081 flags : # join_muted : flags . 1 ? true can_change_join_muted : flags . 2 ? true id : long access_hash : long participants_count : int params : flags . 0 ? DataJSON version : int = GroupCall ; <nl> <nl> inputGroupCall # d8aa840f id : long access_hash : long = InputGroupCall ; <nl> <nl> - groupCallParticipant # 89a0d26c flags : # muted : flags . 0 ? true left : flags . 1 ? true can_self_unmute : flags . 2 ? true user_id : int date : int source : int = GroupCallParticipant ; <nl> + groupCallParticipant # 56b087c9 flags : # muted : flags . 0 ? true left : flags . 1 ? true can_self_unmute : flags . 2 ? true user_id : int date : int active_date : flags . 3 ? int source : int = GroupCallParticipant ; <nl> <nl> - phone . groupCall # 564c9fd8 call : GroupCall sources : Vector < int > participants : Vector < GroupCallParticipant > users : Vector < User > = phone . GroupCall ; <nl> + phone . groupCall # 985c2087 call : GroupCall sources : Vector < int > participants : Vector < GroupCallParticipant > participants_next_offset : string users : Vector < User > = phone . GroupCall ; <nl> <nl> - phone . groupParticipants # 3cdb7991 count : int participants : Vector < GroupCallParticipant > users : Vector < User > version : int = phone . GroupParticipants ; <nl> + phone . groupParticipants # 9cfeb92d count : int participants : Vector < GroupCallParticipant > next_offset : string users : Vector < User > version : int = phone . GroupParticipants ; <nl> <nl> mmmfunctionsmmm <nl> <nl> phone . saveCallDebug # 277add7e peer : InputPhoneCall debug : DataJSON = Bool ; <nl> phone . sendSignalingData # ff7a9383 peer : InputPhoneCall data : bytes = Bool ; <nl> phone . createGroupCall # e428fa02 channel : InputChannel random_id : int = Updates ; <nl> phone . joinGroupCall # 5f9c8e62 flags : # muted : flags . 0 ? true call : InputGroupCall params : DataJSON = Updates ; <nl> - phone . leaveGroupCall # 60e98e5f call : InputGroupCall = Updates ; <nl> + phone . leaveGroupCall # 500377f9 call : InputGroupCall source : int = Updates ; <nl> phone . editGroupCallMember # 63146ae4 flags : # muted : flags . 0 ? true call : InputGroupCall user_id : InputUser = Updates ; <nl> phone . inviteToGroupCall # 7b393160 call : InputGroupCall users : Vector < InputUser > = Updates ; <nl> phone . discardGroupCall # 7a777135 call : InputGroupCall = Updates ; <nl> + phone . toggleGroupCallSettings # 74bbb43d flags : # call : InputGroupCall join_muted : flags . 0 ? Bool = Updates ; <nl> phone . getGroupCall # c7cb017 call : InputGroupCall = phone . GroupCall ; <nl> - phone . getGroupParticipants # de41d3b2 call : InputGroupCall max_date : int limit : int = phone . GroupParticipants ; <nl> + phone . getGroupParticipants # ae1910a4 call : InputGroupCall offset : string limit : int = phone . GroupParticipants ; <nl> phone . checkGroupCall # b74a7bea call : InputGroupCall source : int = Bool ; <nl> <nl> langpack . getLangPack # f2f2330a lang_pack : string lang_code : string = LangPackDifference ; <nl> mmm a / Telegram / SourceFiles / calls / calls_group_call . cpp <nl> ppp b / Telegram / SourceFiles / calls / calls_group_call . cpp <nl> void GroupCall : : finish ( FinishType type ) { <nl> | | state = = State : : Failed ) { <nl> return ; <nl> } <nl> - if ( ! joined ( ) ) { <nl> + if ( ! _mySsrc ) { <nl> setState ( finalState ) ; <nl> return ; <nl> } <nl> <nl> setState ( hangupState ) ; <nl> _api . request ( MTPphone_LeaveGroupCall ( <nl> - inputCall ( ) <nl> + inputCall ( ) , <nl> + MTP_int ( _mySsrc ) <nl> ) ) . done ( [ = ] ( const MTPUpdates & result ) { <nl> / / Here ' this ' could be destroyed by updates , so we set Ended after <nl> / / updates being handled , but in a guarded way . <nl> mmm a / Telegram / SourceFiles / calls / calls_group_members . cpp <nl> ppp b / Telegram / SourceFiles / calls / calls_group_members . cpp <nl> int GroupMembers : : desiredHeight ( ) const { <nl> } <nl> return 0 ; <nl> } ( ) ; <nl> - desired + = qMax ( count , _list - > fullRowsCount ( ) ) <nl> + desired + = std : : max ( count , _list - > fullRowsCount ( ) ) <nl> * st : : groupCallMembersList . item . height ; <nl> - return qMax ( height ( ) , desired ) ; <nl> + return std : : max ( height ( ) , desired ) ; <nl> } <nl> <nl> void GroupMembers : : setupHeader ( not_null < GroupCall * > call ) { <nl> mmm a / Telegram / SourceFiles / data / data_group_call . cpp <nl> ppp b / Telegram / SourceFiles / data / data_group_call . cpp <nl> void GroupCall : : requestParticipants ( ) { <nl> return ; <nl> } else if ( _participants . size ( ) > = _fullCount & & _allReceived ) { <nl> return ; <nl> + } else if ( _allReceived ) { <nl> + reload ( ) ; <nl> + return ; <nl> } <nl> - const auto requestFromDate = ( _allReceived | | _participants . empty ( ) ) <nl> - ? TimeId ( 0 ) <nl> - : _participants . back ( ) . date ; <nl> auto & api = _channel - > session ( ) . api ( ) ; <nl> _participantsRequestId = api . request ( MTPphone_GetGroupParticipants ( <nl> input ( ) , <nl> - MTP_int ( requestFromDate ) , <nl> + MTP_string ( _nextOffset ) , <nl> MTP_int ( kRequestPerPage ) <nl> ) ) . done ( [ = ] ( const MTPphone_GroupParticipants & result ) { <nl> result . match ( [ & ] ( const MTPDphone_groupParticipants & data ) { <nl> + _nextOffset = qs ( data . vnext_offset ( ) ) ; <nl> _channel - > owner ( ) . processUsers ( data . vusers ( ) ) ; <nl> applyParticipantsSlice ( data . vparticipants ( ) . v ) ; <nl> _fullCount = data . vcount ( ) . v ; <nl> mmm a / Telegram / SourceFiles / data / data_group_call . h <nl> ppp b / Telegram / SourceFiles / data / data_group_call . h <nl> class GroupCall final { <nl> <nl> std : : vector < Participant > _participants ; <nl> base : : flat_set < uint32 > _sources ; <nl> + QString _nextOffset ; <nl> int _fullCount = 0 ; <nl> int _duration = 0 ; <nl> bool _finished = false ; <nl>
Update API scheme .
telegramdesktop/tdesktop
858ee0e8c4825a8fc8fe0c349c22cb0f0f245b53
2020-12-01T06:45:22Z
mmm a / CODEOWNERS <nl> ppp b / CODEOWNERS <nl> <nl> / tensorflow / contrib / tpu / @ frankchn @ saeta @ jhseu @ sourabhbajaj <nl> / tensorflow / contrib / training / @ joel - shor @ ebrevdo <nl> / tensorflow / contrib / util / @ sherrym <nl> + <nl> + / third_party / systemlibs / @ perfinion <nl>
CODEOWNERS : add myself for third_party / systemlibs /
tensorflow/tensorflow
e8e7afa48d518faaecb688a16f1c7397d8c47a70
2018-08-31T18:08:53Z
mmm a / src / init . cpp <nl> ppp b / src / init . cpp <nl> bool AppInit2 ( ) <nl> <nl> if ( mapArgs . count ( " - loadblock " ) ) <nl> { <nl> + uiInterface . InitMessage ( _ ( " Importing blocks . . . " ) ) ; <nl> BOOST_FOREACH ( string strFile , mapMultiArgs [ " - loadblock " ] ) <nl> { <nl> FILE * file = fopen ( strFile . c_str ( ) , " rb " ) ; <nl>
add splashscreen message when importing blocks via - loadblock
bitcoin/bitcoin
2d914f89fee789defe1a1c692485b06105ff0ab3
2012-08-17T12:21:17Z
mmm a / modules / perception / map / hdmap / BUILD <nl> ppp b / modules / perception / map / hdmap / BUILD <nl> cc_library ( <nl> hdrs = [ " hdmap_input . h " ] , <nl> deps = [ <nl> " / / modules / common / math : geometry " , <nl> - " / / modules / common / proto : geometry_proto " , <nl> " / / modules / map / hdmap " , <nl> - " / / modules / map / proto : map_proto " , <nl> " / / modules / perception / base : base_type " , <nl> " / / modules / perception / base : blob " , <nl> " / / modules / perception / base : common " , <nl> cc_library ( <nl> " / / modules / perception / base : syncedmem " , <nl> " / / modules / perception / common / geometry : common " , <nl> " / / modules / perception / lib / config_manager " , <nl> - " / / modules / perception / proto : perception_config_schema_proto " , <nl> ] , <nl> ) <nl> <nl> cc_library ( <nl> # " / / modules / perception / base : point_cloud " , <nl> # " / / modules / perception / common / geometry : common " , <nl> # " / / modules / perception / lib / config_manager " , <nl> - # " / / modules / perception / proto : perception_config_schema_proto " , <nl> # ] , <nl> # ) <nl> <nl>
Build : make modules / perception / map / . . . pass
ApolloAuto/apollo
ee27f84409a30f51f55ba1af67b965eb8788b4a4
2020-06-08T12:31:35Z
mmm a / tensorflow / tools / api / golden / v1 / tensorflow . pbtxt <nl> ppp b / tensorflow / tools / api / golden / v1 / tensorflow . pbtxt <nl> tf_module { <nl> name : " python_io " <nl> mtype : " < type \ ' module \ ' > " <nl> } <nl> + member { <nl> + name : " pywrap_tensorflow " <nl> + mtype : " < type \ ' module \ ' > " <nl> + } <nl> member { <nl> name : " qint16 " <nl> mtype : " < class \ ' tensorflow . python . framework . dtypes . DType \ ' > " <nl>
add the tensorflow . pbtxt file the tool changed manually
tensorflow/tensorflow
21d69d94f7d82f0f413a43ce74fea3b2f464a8b3
2019-11-14T02:09:08Z
mmm a / format . h <nl> ppp b / format . h <nl> class IntFormatSpec : public SpecT { <nl> T value_ ; <nl> <nl> public : <nl> - IntFormatSpec ( T value , const SpecT & spec = SpecT ( ) ) <nl> - : SpecT ( spec ) , value_ ( value ) { } <nl> + IntFormatSpec ( T val , const SpecT & spec = SpecT ( ) ) <nl> + : SpecT ( spec ) , value_ ( val ) { } <nl> <nl> T value ( ) const { return value_ ; } <nl> } ; <nl> void BasicWriter < Char > : : write_double ( <nl> if ( value ! = value ) { <nl> / / Format NaN ourselves because sprintf ' s output is not consistent <nl> / / across platforms . <nl> - std : : size_t size = 4 ; <nl> + std : : size_t nan_size = 4 ; <nl> const char * nan = upper ? " NAN " : " nan " ; <nl> if ( ! sign ) { <nl> - - - size ; <nl> + - - nan_size ; <nl> + + nan ; <nl> } <nl> - CharPtr out = write_str ( nan , size , spec ) ; <nl> + CharPtr out = write_str ( nan , nan_size , spec ) ; <nl> if ( sign ) <nl> * out = sign ; <nl> return ; <nl> void BasicWriter < Char > : : write_double ( <nl> if ( internal : : isinfinity ( value ) ) { <nl> / / Format infinity ourselves because sprintf ' s output is not consistent <nl> / / across platforms . <nl> - std : : size_t size = 4 ; <nl> + std : : size_t inf_size = 4 ; <nl> const char * inf = upper ? " INF " : " inf " ; <nl> if ( ! sign ) { <nl> - - - size ; <nl> + - - inf_size ; <nl> + + inf ; <nl> } <nl> - CharPtr out = write_str ( inf , size , spec ) ; <nl> + CharPtr out = write_str ( inf , inf_size , spec ) ; <nl> if ( sign ) <nl> * out = sign ; <nl> return ; <nl> void BasicWriter < Char > : : write_double ( <nl> / / Format using snprintf . <nl> Char fill = static_cast < Char > ( spec . fill ( ) ) ; <nl> for ( ; ; ) { <nl> - std : : size_t size = buffer_ . capacity ( ) - offset ; <nl> + std : : size_t buffer_size = buffer_ . capacity ( ) - offset ; <nl> # if _MSC_VER <nl> / / MSVC ' s vsnprintf_s doesn ' t work with zero size , so reserve <nl> / / space for at least one extra character to make the size non - zero . <nl> / / Note that the buffer ' s capacity will increase by more than 1 . <nl> - if ( size = = 0 ) { <nl> + if ( buffer_size = = 0 ) { <nl> buffer_ . reserve ( offset + 1 ) ; <nl> - size = buffer_ . capacity ( ) - offset ; <nl> + buffer_size = buffer_ . capacity ( ) - offset ; <nl> } <nl> # endif <nl> Char * start = & buffer_ [ offset ] ; <nl> int n = internal : : CharTraits < Char > : : format_float ( <nl> - start , size , format , width_for_sprintf , spec . precision ( ) , value ) ; <nl> + start , buffer_size , format , width_for_sprintf , spec . precision ( ) , value ) ; <nl> if ( n > = 0 & & offset + n < buffer_ . capacity ( ) ) { <nl> if ( sign ) { <nl> if ( ( spec . align ( ) ! = ALIGN_RIGHT & & spec . align ( ) ! = ALIGN_DEFAULT ) | | <nl> void BasicWriter < Char > : : write_double ( <nl> } <nl> if ( spec . align ( ) = = ALIGN_CENTER & & <nl> spec . width ( ) > static_cast < unsigned > ( n ) ) { <nl> - unsigned width = spec . width ( ) ; <nl> + width = spec . width ( ) ; <nl> CharPtr p = grow_buffer ( width ) ; <nl> std : : copy ( p , p + n , p + ( width - n ) / 2 ) ; <nl> fill_padding ( p , spec . width ( ) , n , fill ) ; <nl>
Fix more - Wshadow warnings
fmtlib/fmt
2523f3b939f45fe342c62fd9edcc8fed253c83a7
2014-12-10T15:24:05Z
mmm a / tests / test_sanity . py <nl> ppp b / tests / test_sanity . py <nl> def test_binaryen_version ( self ) : <nl> self . check_working ( [ EMCC , path_from_root ( ' tests ' , ' hello_world . c ' ) ] , ' error parsing binaryen version ( wasm - opt version foo ) . Please check your binaryen installation ' ) <nl> <nl> make_fake_wasm_opt ( self . in_dir ( ' fake ' , ' bin ' , ' wasm - opt ' ) , ' 70 ' ) <nl> - self . check_working ( [ EMCC , path_from_root ( ' tests ' , ' hello_world . c ' ) ] , ' unexpected binaryen version : 70 ( expected 90 ) ' ) <nl> + self . check_working ( [ EMCC , path_from_root ( ' tests ' , ' hello_world . c ' ) ] , ' unexpected binaryen version : 70 ( expected ' ) <nl> mmm a / tools / shared . py <nl> ppp b / tools / shared . py <nl> <nl> LINUX = sys . platform . startswith ( ' linux ' ) <nl> DEBUG = int ( os . environ . get ( ' EMCC_DEBUG ' , ' 0 ' ) ) <nl> EXPECTED_NODE_VERSION = ( 4 , 1 , 1 ) <nl> - EXPECTED_BINARYEN_VERSION = 90 <nl> + EXPECTED_BINARYEN_VERSION = 91 <nl> <nl> <nl> # can add % ( asctime ) s to see timestamps <nl>
Bump binaryen version requirement ( )
emscripten-core/emscripten
99d6f4f34625dde97902684981e8d081e3904734
2020-04-21T00:52:54Z
mmm a / dbms / src / Common / ErrorCodes . cpp <nl> ppp b / dbms / src / Common / ErrorCodes . cpp <nl> namespace ErrorCodes <nl> extern const int S3_ERROR = 499 ; <nl> extern const int CANNOT_CREATE_DICTIONARY_FROM_METADATA = 500 ; <nl> extern const int CANNOT_CREATE_DATABASE = 501 ; <nl> + extern const int CANNOT_SIGQUEUE = 502 ; <nl> <nl> extern const int KEEPER_EXCEPTION = 999 ; <nl> extern const int POCO_EXCEPTION = 1000 ; <nl> new file mode 100644 <nl> index 00000000000 . . 463897f2c08 <nl> mmm / dev / null <nl> ppp b / dbms / src / Common / PipeFDs . cpp <nl> <nl> + # include < Common / PipeFDs . h > <nl> + # include < Common / Exception . h > <nl> + # include < Common / formatReadable . h > <nl> + <nl> + # include < common / logger_useful . h > <nl> + <nl> + # include < unistd . h > <nl> + # include < fcntl . h > <nl> + # include < string > <nl> + # include < algorithm > <nl> + <nl> + <nl> + namespace DB <nl> + { <nl> + <nl> + namespace ErrorCodes <nl> + { <nl> + extern const int CANNOT_PIPE ; <nl> + extern const int CANNOT_FCNTL ; <nl> + extern const int LOGICAL_ERROR ; <nl> + } <nl> + <nl> + void LazyPipeFDs : : open ( ) <nl> + { <nl> + for ( int & fd : fds_rw ) <nl> + if ( fd > = 0 ) <nl> + throw Exception ( " Pipe is already opened " , ErrorCodes : : LOGICAL_ERROR ) ; <nl> + <nl> + # ifndef __APPLE__ <nl> + if ( 0 ! = pipe2 ( fds_rw , O_CLOEXEC ) ) <nl> + throwFromErrno ( " Cannot create pipe " , ErrorCodes : : CANNOT_PIPE ) ; <nl> + # else <nl> + if ( 0 ! = pipe ( fds_rw ) ) <nl> + throwFromErrno ( " Cannot create pipe " , ErrorCodes : : CANNOT_PIPE ) ; <nl> + if ( 0 ! = fcntl ( fds_rw [ 0 ] , F_SETFD , FD_CLOEXEC ) ) <nl> + throwFromErrno ( " Cannot setup auto - close on exec for read end of pipe " , ErrorCodes : : CANNOT_FCNTL ) ; <nl> + if ( 0 ! = fcntl ( fds_rw [ 1 ] , F_SETFD , FD_CLOEXEC ) ) <nl> + throwFromErrno ( " Cannot setup auto - close on exec for write end of pipe " , ErrorCodes : : CANNOT_FCNTL ) ; <nl> + # endif <nl> + } <nl> + <nl> + void LazyPipeFDs : : close ( ) <nl> + { <nl> + for ( int & fd : fds_rw ) <nl> + { <nl> + if ( fd < 0 ) <nl> + continue ; <nl> + if ( 0 ! = : : close ( fd ) ) <nl> + throwFromErrno ( " Cannot close pipe " , ErrorCodes : : CANNOT_PIPE ) ; <nl> + fd = - 1 ; <nl> + } <nl> + } <nl> + <nl> + PipeFDs : : PipeFDs ( ) <nl> + { <nl> + open ( ) ; <nl> + } <nl> + <nl> + LazyPipeFDs : : ~ LazyPipeFDs ( ) <nl> + { <nl> + try <nl> + { <nl> + close ( ) ; <nl> + } <nl> + catch ( . . . ) <nl> + { <nl> + tryLogCurrentException ( __PRETTY_FUNCTION__ ) ; <nl> + } <nl> + } <nl> + <nl> + <nl> + void LazyPipeFDs : : setNonBlocking ( ) <nl> + { <nl> + int flags = fcntl ( fds_rw [ 1 ] , F_GETFL , 0 ) ; <nl> + if ( - 1 = = flags ) <nl> + throwFromErrno ( " Cannot get file status flags of pipe " , ErrorCodes : : CANNOT_FCNTL ) ; <nl> + if ( - 1 = = fcntl ( fds_rw [ 1 ] , F_SETFL , flags | O_NONBLOCK ) ) <nl> + throwFromErrno ( " Cannot set non - blocking mode of pipe " , ErrorCodes : : CANNOT_FCNTL ) ; <nl> + } <nl> + <nl> + void LazyPipeFDs : : tryIncreaseSize ( int desired_size ) <nl> + { <nl> + # if defined ( OS_LINUX ) <nl> + Poco : : Logger * log = & Poco : : Logger : : get ( " Pipe " ) ; <nl> + <nl> + / * * Increase pipe size to avoid slowdown during fine - grained trace collection . <nl> + * / <nl> + int pipe_size = fcntl ( fds_rw [ 1 ] , F_GETPIPE_SZ ) ; <nl> + if ( - 1 = = pipe_size ) <nl> + { <nl> + if ( errno = = EINVAL ) <nl> + { <nl> + LOG_INFO ( log , " Cannot get pipe capacity , " < < errnoToString ( ErrorCodes : : CANNOT_FCNTL ) < < " . Very old Linux kernels have no support for this fcntl . " ) ; <nl> + / / / It will work nevertheless . <nl> + } <nl> + else <nl> + throwFromErrno ( " Cannot get pipe capacity " , ErrorCodes : : CANNOT_FCNTL ) ; <nl> + } <nl> + else <nl> + { <nl> + for ( errno = 0 ; errno ! = EPERM & & pipe_size < desired_size ; pipe_size * = 2 ) <nl> + if ( - 1 = = fcntl ( fds_rw [ 1 ] , F_SETPIPE_SZ , pipe_size * 2 ) & & errno ! = EPERM ) <nl> + throwFromErrno ( " Cannot increase pipe capacity to " + std : : to_string ( pipe_size * 2 ) , ErrorCodes : : CANNOT_FCNTL ) ; <nl> + <nl> + LOG_TRACE ( log , " Pipe capacity is " < < formatReadableSizeWithBinarySuffix ( std : : min ( pipe_size , desired_size ) ) ) ; <nl> + } <nl> + # endif <nl> + } <nl> + <nl> + } <nl> new file mode 100644 <nl> index 00000000000 . . fe76740da70 <nl> mmm / dev / null <nl> ppp b / dbms / src / Common / PipeFDs . h <nl> <nl> + # pragma once <nl> + <nl> + # include < cstddef > <nl> + <nl> + <nl> + namespace DB <nl> + { <nl> + <nl> + / * * Struct containing a pipe with lazy initialization . <nl> + * Use ` open ` and ` close ` methods to manipulate pipe and ` fds_rw ` field to access <nl> + * pipe ' s file descriptors . <nl> + * / <nl> + struct LazyPipeFDs <nl> + { <nl> + int fds_rw [ 2 ] = { - 1 , - 1 } ; <nl> + <nl> + void open ( ) ; <nl> + void close ( ) ; <nl> + <nl> + void setNonBlocking ( ) ; <nl> + void tryIncreaseSize ( int desired_size ) ; <nl> + <nl> + ~ LazyPipeFDs ( ) ; <nl> + } ; <nl> + <nl> + <nl> + / * * Struct which opens new pipe on creation and closes it on destruction . <nl> + * Use ` fds_rw ` field to access pipe ' s file descriptors . <nl> + * / <nl> + struct PipeFDs : public LazyPipeFDs <nl> + { <nl> + PipeFDs ( ) ; <nl> + } ; <nl> + <nl> + } <nl> mmm a / dbms / src / Common / QueryProfiler . cpp <nl> ppp b / dbms / src / Common / QueryProfiler . cpp <nl> <nl> # include " QueryProfiler . h " <nl> <nl> # include < random > <nl> - # include < common / Pipe . h > <nl> # include < common / phdr_cache . h > <nl> # include < common / config_common . h > <nl> - # include < Common / StackTrace . h > <nl> # include < common / StringRef . h > <nl> # include < common / logger_useful . h > <nl> + # include < Common / PipeFDs . h > <nl> + # include < Common / StackTrace . h > <nl> # include < Common / CurrentThread . h > <nl> # include < Common / Exception . h > <nl> # include < Common / thread_local_rng . h > <nl> namespace ProfileEvents <nl> namespace DB <nl> { <nl> <nl> - extern LazyPipe trace_pipe ; <nl> + extern LazyPipeFDs trace_pipe ; <nl> <nl> namespace <nl> { <nl> mmm a / dbms / src / Common / ShellCommand . cpp <nl> ppp b / dbms / src / Common / ShellCommand . cpp <nl> <nl> # include < dlfcn . h > <nl> # include < Common / Exception . h > <nl> # include < Common / ShellCommand . h > <nl> + # include < Common / PipeFDs . h > <nl> # include < common / logger_useful . h > <nl> # include < IO / WriteHelpers . h > <nl> # include < port / unistd . h > <nl> # include < csignal > <nl> - # include < common / Pipe . h > <nl> <nl> namespace <nl> { <nl> std : : unique_ptr < ShellCommand > ShellCommand : : executeImpl ( const char * filename , c <nl> if ( ! real_vfork ) <nl> throwFromErrno ( " Cannot find symbol vfork in myself " , ErrorCodes : : CANNOT_DLSYM ) ; <nl> <nl> - Pipe pipe_stdin ; <nl> - Pipe pipe_stdout ; <nl> - Pipe pipe_stderr ; <nl> + PipeFDs pipe_stdin ; <nl> + PipeFDs pipe_stdout ; <nl> + PipeFDs pipe_stderr ; <nl> <nl> pid_t pid = reinterpret_cast < pid_t ( * ) ( ) > ( real_vfork ) ( ) ; <nl> <nl> mmm a / dbms / src / Common / TaskStatsInfoGetter . h <nl> ppp b / dbms / src / Common / TaskStatsInfoGetter . h <nl> class TaskStatsInfoGetter : private boost : : noncopyable <nl> / / / Whether the current process has permissions ( sudo or cap_net_admin capabilties ) to get taskstats info <nl> static bool checkPermissions ( ) ; <nl> <nl> - # if defined ( __linux__ ) <nl> + # if defined ( OS_LINUX ) <nl> private : <nl> int netlink_socket_fd = - 1 ; <nl> UInt16 taskstats_family_id = 0 ; <nl> mmm a / dbms / src / Common / TraceCollector . cpp <nl> ppp b / dbms / src / Common / TraceCollector . cpp <nl> <nl> <nl> # include < Core / Field . h > <nl> # include < Poco / Logger . h > <nl> - # include < common / Pipe . h > <nl> + # include < Common / PipeFDs . h > <nl> # include < Common / StackTrace . h > <nl> # include < common / logger_useful . h > <nl> # include < IO / ReadHelpers . h > <nl> <nl> namespace DB <nl> { <nl> <nl> - LazyPipe trace_pipe ; <nl> + LazyPipeFDs trace_pipe ; <nl> <nl> namespace ErrorCodes <nl> { <nl> extern const int NULL_POINTER_DEREFERENCE ; <nl> extern const int THREAD_IS_NOT_JOINABLE ; <nl> - extern const int CANNOT_FCNTL ; <nl> } <nl> <nl> TraceCollector : : TraceCollector ( std : : shared_ptr < TraceLog > & trace_log_ ) <nl> TraceCollector : : TraceCollector ( std : : shared_ptr < TraceLog > & trace_log_ ) <nl> / * * Turn write end of pipe to non - blocking mode to avoid deadlocks <nl> * when QueryProfiler is invoked under locks and TraceCollector cannot pull data from pipe . <nl> * / <nl> - int flags = fcntl ( trace_pipe . fds_rw [ 1 ] , F_GETFL , 0 ) ; <nl> - if ( - 1 = = flags ) <nl> - throwFromErrno ( " Cannot get file status flags of pipe " , ErrorCodes : : CANNOT_FCNTL ) ; <nl> - if ( - 1 = = fcntl ( trace_pipe . fds_rw [ 1 ] , F_SETFL , flags | O_NONBLOCK ) ) <nl> - throwFromErrno ( " Cannot set non - blocking mode of pipe " , ErrorCodes : : CANNOT_FCNTL ) ; <nl> - <nl> - # if defined ( OS_LINUX ) <nl> - / * * Increase pipe size to avoid slowdown during fine - grained trace collection . <nl> - * / <nl> - int pipe_size = fcntl ( trace_pipe . fds_rw [ 1 ] , F_GETPIPE_SZ ) ; <nl> - if ( - 1 = = pipe_size ) <nl> - { <nl> - if ( errno = = EINVAL ) <nl> - { <nl> - LOG_INFO ( log , " Cannot get pipe capacity , " < < errnoToString ( ErrorCodes : : CANNOT_FCNTL ) < < " . Very old Linux kernels have no support for this fcntl . " ) ; <nl> - / / / It will work nevertheless . <nl> - } <nl> - else <nl> - throwFromErrno ( " Cannot get pipe capacity " , ErrorCodes : : CANNOT_FCNTL ) ; <nl> - } <nl> - else <nl> - { <nl> - constexpr int max_pipe_capacity_to_set = 1048576 ; <nl> - for ( errno = 0 ; errno ! = EPERM & & pipe_size < max_pipe_capacity_to_set ; pipe_size * = 2 ) <nl> - if ( - 1 = = fcntl ( trace_pipe . fds_rw [ 1 ] , F_SETPIPE_SZ , pipe_size * 2 ) & & errno ! = EPERM ) <nl> - throwFromErrno ( " Cannot increase pipe capacity to " + toString ( pipe_size * 2 ) , ErrorCodes : : CANNOT_FCNTL ) ; <nl> - <nl> - LOG_TRACE ( log , " Pipe capacity is " < < formatReadableSizeWithBinarySuffix ( std : : min ( pipe_size , max_pipe_capacity_to_set ) ) ) ; <nl> - } <nl> - # endif <nl> + trace_pipe . setNonBlocking ( ) ; <nl> + trace_pipe . tryIncreaseSize ( 1 < < 20 ) ; <nl> <nl> thread = ThreadFromGlobalPool ( & TraceCollector : : run , this ) ; <nl> } <nl> new file mode 100644 <nl> index 00000000000 . . 97149fa8712 <nl> mmm / dev / null <nl> ppp b / dbms / src / Storages / System / StorageSystemStackTrace . cpp <nl> <nl> + # include < signal . h > <nl> + <nl> + # include < mutex > <nl> + # include < condition_variable > <nl> + # include < filesystem > <nl> + <nl> + # include < ext / scope_guard . h > <nl> + <nl> + # include < Storages / System / StorageSystemStackTrace . h > <nl> + # include < DataTypes / DataTypeString . h > <nl> + # include < DataTypes / DataTypesNumber . h > <nl> + # include < DataTypes / DataTypeArray . h > <nl> + # include < DataStreams / OneBlockInputStream . h > <nl> + # include < IO / ReadHelpers . h > <nl> + <nl> + <nl> + namespace DB <nl> + { <nl> + <nl> + namespace ErrorCodes <nl> + { <nl> + extern const int CANNOT_SIGQUEUE ; <nl> + } <nl> + <nl> + <nl> + NamesAndTypesList StorageSystemStackTrace : : getNamesAndTypes ( ) <nl> + { <nl> + return <nl> + { <nl> + { " thread_number " , std : : make_shared < DataTypeUInt32 > ( ) } , <nl> + { " query_id " , std : : make_shared < DataTypeString > ( ) } , <nl> + { " trace " , std : : make_shared < DataTypeArray > ( std : : make_shared < DataTypeUInt64 > ( ) ) } <nl> + } ; <nl> + } <nl> + <nl> + namespace <nl> + { <nl> + struct State <nl> + { <nl> + std : : mutex mutex ; <nl> + std : : condition_variable condvar ; <nl> + <nl> + size_t total_threads ; <nl> + size_t threads_processed ; <nl> + std : : exception_ptr exception ; <nl> + MutableColumns * columns_to_fill ; <nl> + <nl> + State ( ) { reset ( ) ; } <nl> + <nl> + void reset ( MutableColumns * columns_to_fill_ = nullptr ) <nl> + { <nl> + total_threads = 0 ; <nl> + threads_processed = 0 ; <nl> + exception = std : : exception_ptr ( ) ; <nl> + columns_to_fill = columns_to_fill_ ; <nl> + } <nl> + <nl> + operator bool ( ) <nl> + { <nl> + return columns_to_fill ! = nullptr ; <nl> + } <nl> + } ; <nl> + <nl> + State state ; <nl> + <nl> + void callback ( const siginfo_t & , const StackTrace & stack_trace , UInt32 thread_number ) <nl> + { <nl> + std : : lock_guard lock ( state . mutex ) ; <nl> + <nl> + std : : cerr < < thread_number < < " ! \ n " ; <nl> + <nl> + if ( ! state ) <nl> + return ; <nl> + <nl> + try <nl> + { <nl> + size_t stack_trace_size = stack_trace . getSize ( ) ; <nl> + size_t stack_trace_offset = stack_trace . getOffset ( ) ; <nl> + <nl> + Array arr ; <nl> + arr . reserve ( stack_trace_size - stack_trace_offset ) ; <nl> + for ( size_t i = stack_trace_offset ; i < stack_trace_size ; + + i ) <nl> + arr . emplace_back ( reinterpret_cast < intptr_t > ( stack_trace . getFrames ( ) [ i ] ) ) ; <nl> + <nl> + std : : cerr < < thread_number < < " ! ! \ n " ; <nl> + <nl> + state . columns_to_fill - > at ( 0 ) - > insert ( thread_number ) ; <nl> + state . columns_to_fill - > at ( 1 ) - > insertDefault ( ) ; <nl> + state . columns_to_fill - > at ( 2 ) - > insert ( arr ) ; <nl> + <nl> + std : : cerr < < thread_number < < " ! ! ! \ n " ; <nl> + <nl> + + + state . threads_processed ; <nl> + <nl> + std : : cerr < < state . threads_processed < < " , " < < state . total_threads < < " ! ! ! ! \ n " ; <nl> + if ( state . threads_processed > = state . total_threads ) <nl> + state . condvar . notify_one ( ) ; <nl> + } <nl> + catch ( . . . ) <nl> + { <nl> + state . reset ( ) ; <nl> + state . exception = std : : current_exception ( ) ; <nl> + state . condvar . notify_one ( ) ; <nl> + } <nl> + } <nl> + } <nl> + <nl> + void StorageSystemStackTrace : : fillData ( MutableColumns & res_columns , const Context & , const SelectQueryInfo & ) const <nl> + { <nl> + std : : unique_lock lock ( state . mutex ) ; <nl> + <nl> + state . reset ( & res_columns ) ; <nl> + SCOPE_EXIT ( { state . reset ( ) ; } ) ; <nl> + <nl> + std : : cerr < < state . columns_to_fill - > size ( ) < < " \ n " ; <nl> + <nl> + / / / Send a signal to every thread <nl> + std : : filesystem : : directory_iterator end ; <nl> + for ( std : : filesystem : : directory_iterator it ( " / proc / self / task " ) ; it ! = end ; + + it ) <nl> + { <nl> + sigval sig_value ; <nl> + sig_value . sival_ptr = reinterpret_cast < void * > ( & callback ) ; <nl> + pid_t tid = parse < pid_t > ( it - > path ( ) . filename ( ) ) ; <nl> + if ( 0 = = : : sigqueue ( tid , SIGTSTP , sig_value ) ) <nl> + { <nl> + + + state . total_threads ; <nl> + } <nl> + else <nl> + { <nl> + / / / The thread may have been already finished . <nl> + if ( ESRCH ! = errno ) <nl> + throwFromErrno ( " Cannot send signal with sigqueue " , ErrorCodes : : CANNOT_SIGQUEUE ) ; <nl> + } <nl> + } <nl> + <nl> + std : : cerr < < state . threads_processed < < " , " < < state . total_threads < < " sent \ n " ; <nl> + <nl> + / / / Timeout one second for the case the signal pipe will be full and messages will be dropped . <nl> + state . condvar . wait_for ( lock , std : : chrono : : seconds ( 1 ) , [ ] { return state . threads_processed > = state . total_threads | | state . exception ; } ) ; <nl> + if ( state . exception ) <nl> + std : : rethrow_exception ( state . exception ) ; <nl> + } <nl> + <nl> + } <nl> + <nl> new file mode 100644 <nl> index 00000000000 . . a402f56b420 <nl> mmm / dev / null <nl> ppp b / dbms / src / Storages / System / StorageSystemStackTrace . h <nl> <nl> + # pragma once <nl> + <nl> + # include < ext / shared_ptr_helper . h > <nl> + # include < Storages / System / IStorageSystemOneBlock . h > <nl> + <nl> + <nl> + namespace DB <nl> + { <nl> + <nl> + class Context ; <nl> + <nl> + <nl> + / / / Allows to introspect stack trace of all server threads . <nl> + / / / It acts like an embedded debugger . <nl> + class StorageSystemStackTrace : public ext : : shared_ptr_helper < StorageSystemStackTrace > , public IStorageSystemOneBlock < StorageSystemStackTrace > <nl> + { <nl> + friend struct ext : : shared_ptr_helper < StorageSystemStackTrace > ; <nl> + public : <nl> + String getName ( ) const override { return " SystemStackTrace " ; } <nl> + <nl> + static NamesAndTypesList getNamesAndTypes ( ) ; <nl> + <nl> + protected : <nl> + using IStorageSystemOneBlock : : IStorageSystemOneBlock ; <nl> + <nl> + void fillData ( MutableColumns & res_columns , const Context & context , const SelectQueryInfo & query_info ) const override ; <nl> + } ; <nl> + <nl> + } <nl> + <nl> mmm a / dbms / src / Storages / System / attachSystemTables . cpp <nl> ppp b / dbms / src / Storages / System / attachSystemTables . cpp <nl> <nl> # include < Storages / System / StorageSystemContributors . h > <nl> # include < Storages / System / StorageSystemDisks . h > <nl> # include < Storages / System / StorageSystemStoragePolicies . h > <nl> + # include < Storages / System / StorageSystemStackTrace . h > <nl> <nl> <nl> namespace DB <nl> void attachSystemTablesLocal ( IDatabase & system_database ) <nl> system_database . attachTable ( " collations " , StorageSystemCollations : : create ( " collations " ) ) ; <nl> system_database . attachTable ( " table_engines " , StorageSystemTableEngines : : create ( " table_engines " ) ) ; <nl> system_database . attachTable ( " contributors " , StorageSystemContributors : : create ( " contributors " ) ) ; <nl> + system_database . attachTable ( " stack_trace " , StorageSystemStackTrace : : create ( " stack_trace " ) ) ; <nl> } <nl> <nl> void attachSystemTablesServer ( IDatabase & system_database , bool has_zookeeper ) <nl> mmm a / libs / libcommon / CMakeLists . txt <nl> ppp b / libs / libcommon / CMakeLists . txt <nl> add_library ( common <nl> src / getThreadNumber . cpp <nl> src / sleep . cpp <nl> src / argsToConfig . cpp <nl> - src / Pipe . cpp <nl> src / phdr_cache . cpp <nl> <nl> include / common / SimpleCache . h <nl> add_library ( common <nl> include / common / setTerminalEcho . h <nl> include / common / find_symbols . h <nl> include / common / constexpr_helpers . h <nl> - include / common / Pipe . h <nl> include / common / getThreadNumber . h <nl> include / common / sleep . h <nl> include / common / SimpleCache . h <nl> deleted file mode 100644 <nl> index 0137c3d97af . . 00000000000 <nl> mmm a / libs / libcommon / include / common / Pipe . h <nl> ppp / dev / null <nl> <nl> - # pragma once <nl> - <nl> - # include < unistd . h > <nl> - # include < fcntl . h > <nl> - # include < stdexcept > <nl> - <nl> - / * * <nl> - * Struct containing a pipe with lazy initialization . <nl> - * Use ` open ` and ` close ` methods to manipulate pipe and ` fds_rw ` field to access <nl> - * pipe ' s file descriptors . <nl> - * / <nl> - struct LazyPipe <nl> - { <nl> - int fds_rw [ 2 ] = { - 1 , - 1 } ; <nl> - <nl> - LazyPipe ( ) = default ; <nl> - <nl> - void open ( ) ; <nl> - <nl> - void close ( ) ; <nl> - <nl> - virtual ~ LazyPipe ( ) = default ; <nl> - } ; <nl> - <nl> - / * * <nl> - * Struct which opens new pipe on creation and closes it on destruction . <nl> - * Use ` fds_rw ` field to access pipe ' s file descriptors . <nl> - * / <nl> - struct Pipe : public LazyPipe <nl> - { <nl> - Pipe ( ) ; <nl> - <nl> - ~ Pipe ( ) ; <nl> - } ; <nl> deleted file mode 100644 <nl> index 83268b76ea6 . . 00000000000 <nl> mmm a / libs / libcommon / src / Pipe . cpp <nl> ppp / dev / null <nl> <nl> - # include " common / Pipe . h " <nl> - <nl> - void LazyPipe : : open ( ) <nl> - { <nl> - for ( int & fd : fds_rw ) <nl> - { <nl> - if ( fd > = 0 ) <nl> - { <nl> - throw std : : logic_error ( " Pipe is already opened " ) ; <nl> - } <nl> - } <nl> - <nl> - # ifndef __APPLE__ <nl> - if ( 0 ! = pipe2 ( fds_rw , O_CLOEXEC ) ) <nl> - throw std : : runtime_error ( " Cannot create pipe " ) ; <nl> - # else <nl> - if ( 0 ! = pipe ( fds_rw ) ) <nl> - throw std : : runtime_error ( " Cannot create pipe " ) ; <nl> - if ( 0 ! = fcntl ( fds_rw [ 0 ] , F_SETFD , FD_CLOEXEC ) ) <nl> - throw std : : runtime_error ( " Cannot setup auto - close on exec for read end of pipe " ) ; <nl> - if ( 0 ! = fcntl ( fds_rw [ 1 ] , F_SETFD , FD_CLOEXEC ) ) <nl> - throw std : : runtime_error ( " Cannot setup auto - close on exec for write end of pipe " ) ; <nl> - # endif <nl> - } <nl> - <nl> - void LazyPipe : : close ( ) <nl> - { <nl> - for ( int fd : fds_rw ) <nl> - { <nl> - if ( fd > = 0 ) <nl> - { <nl> - : : close ( fd ) ; <nl> - } <nl> - } <nl> - } <nl> - <nl> - Pipe : : Pipe ( ) <nl> - { <nl> - open ( ) ; <nl> - } <nl> - <nl> - Pipe : : ~ Pipe ( ) <nl> - { <nl> - close ( ) ; <nl> - } <nl> mmm a / libs / libdaemon / include / daemon / BaseDaemon . h <nl> ppp b / libs / libdaemon / include / daemon / BaseDaemon . h <nl> <nl> # include < Poco / Version . h > <nl> # include < common / Types . h > <nl> # include < common / logger_useful . h > <nl> + # include < common / getThreadNumber . h > <nl> # include < daemon / GraphiteWriter . h > <nl> # include < Common / Config / ConfigProcessor . h > <nl> # include < loggers / Loggers . h > <nl> <nl> + <nl> namespace Poco { class TaskManager ; } <nl> <nl> <nl> std : : optional < std : : reference_wrapper < Daemon > > BaseDaemon : : tryGetInstance ( ) <nl> else <nl> return { } ; <nl> } <nl> + <nl> + <nl> + / / / If you send TSTP signal with value ( sigqueue ) to a thread , it will make a callback <nl> + / / / from a separate thread and you can call non signal - safe function from there . <nl> + using SignalCallback = void ( const siginfo_t & , const StackTrace & , UInt32 ) ; <nl> + <nl> mmm a / libs / libdaemon / src / BaseDaemon . cpp <nl> ppp b / libs / libdaemon / src / BaseDaemon . cpp <nl> <nl> # include < daemon / BaseDaemon . h > <nl> - # include < Common / Config / ConfigProcessor . h > <nl> + <nl> # include < sys / stat . h > <nl> # include < sys / types . h > <nl> # include < sys / time . h > <nl> <nl> # include < unistd . h > <nl> <nl> # include < typeinfo > <nl> - # include < common / logger_useful . h > <nl> - # include < common / ErrorHandlers . h > <nl> - # include < common / Pipe . h > <nl> - # include < Common / StackTrace . h > <nl> # include < sys / time . h > <nl> # include < sys / resource . h > <nl> # include < iostream > <nl> # include < fstream > <nl> # include < sstream > <nl> # include < memory > <nl> + <nl> # include < Poco / Observer . h > <nl> # include < Poco / AutoPtr . h > <nl> - # include < common / getThreadNumber . h > <nl> # include < Poco / PatternFormatter . h > <nl> # include < Poco / TaskManager . h > <nl> # include < Poco / File . h > <nl> <nl> # include < Poco / Condition . h > <nl> # include < Poco / SyslogChannel . h > <nl> # include < Poco / DirectoryIterator . h > <nl> - # include < Common / Exception . h > <nl> + <nl> + # include < common / logger_useful . h > <nl> + # include < common / ErrorHandlers . h > <nl> + # include < common / argsToConfig . h > <nl> + <nl> # include < IO / WriteBufferFromFile . h > <nl> # include < IO / WriteBufferFromFileDescriptorDiscardOnFailure . h > <nl> # include < IO / ReadBufferFromFileDescriptor . h > <nl> # include < IO / ReadHelpers . h > <nl> # include < IO / WriteHelpers . h > <nl> + # include < Common / Exception . h > <nl> + # include < Common / PipeFDs . h > <nl> + # include < Common / StackTrace . h > <nl> # include < Common / getMultipleKeysFromConfig . h > <nl> # include < Common / ClickHouseRevision . h > <nl> + # include < Common / Config / ConfigProcessor . h > <nl> # include < Common / config_version . h > <nl> - # include < common / argsToConfig . h > <nl> <nl> # ifdef __APPLE__ <nl> / / ucontext is not available without _XOPEN_SOURCE <nl> <nl> # include < ucontext . h > <nl> <nl> <nl> - Pipe signal_pipe ; <nl> + DB : : PipeFDs signal_pipe ; <nl> <nl> <nl> / * * Reset signal handler to the default and send signal to itself . <nl> static void call_default_signal_handler ( int sig ) <nl> } <nl> <nl> <nl> - using ThreadNumber = decltype ( getThreadNumber ( ) ) ; <nl> - static const size_t buf_size = sizeof ( int ) + sizeof ( siginfo_t ) + sizeof ( ucontext_t ) + sizeof ( StackTrace ) + sizeof ( ThreadNumber ) ; <nl> + / / / Normally query_id is a UUID ( string with a fixed length ) but user can provide custom query_id . <nl> + / / / Thus upper bound on query_id length should be introduced to avoid buffer overflow in signal handler . <nl> + constexpr size_t QUERY_ID_MAX_LEN = 1024 ; <nl> + <nl> + static const size_t buf_size = sizeof ( int ) + sizeof ( siginfo_t ) + sizeof ( ucontext_t ) + sizeof ( StackTrace ) + sizeof ( UInt32 ) <nl> + + QUERY_ID_MAX_LEN + 2 / * varint encoding query_id length * / ; <nl> + <nl> <nl> using signal_function = void ( int , siginfo_t * , void * ) ; <nl> <nl> static void terminateRequestedSignalHandler ( int sig , siginfo_t * info , void * co <nl> } <nl> <nl> <nl> - / * * Handler for " fault " signals . Send data about fault to separate thread to write into log . <nl> + / * * Handler for " fault " or diagnostic signals . Send data about fault to separate thread to write into log . <nl> * / <nl> - static void faultSignalHandler ( int sig , siginfo_t * info , void * context ) <nl> + static void signalHandler ( int sig , siginfo_t * info , void * context ) <nl> { <nl> char buf [ buf_size ] ; <nl> + std : : cerr < < " Size of buffer : " < < buf_size < < " \ n " ; <nl> DB : : WriteBufferFromFileDescriptorDiscardOnFailure out ( signal_pipe . fds_rw [ 1 ] , buf_size , buf ) ; <nl> <nl> const ucontext_t signal_context = * reinterpret_cast < ucontext_t * > ( context ) ; <nl> static void faultSignalHandler ( int sig , siginfo_t * info , void * context ) <nl> DB : : writePODBinary ( * info , out ) ; <nl> DB : : writePODBinary ( signal_context , out ) ; <nl> DB : : writePODBinary ( stack_trace , out ) ; <nl> - DB : : writeBinary ( getThreadNumber ( ) , out ) ; <nl> + DB : : writeBinary ( UInt32 ( getThreadNumber ( ) ) , out ) ; <nl> <nl> out . next ( ) ; <nl> <nl> class SignalListener : public Poco : : Runnable <nl> } <nl> else if ( sig = = Signals : : StdTerminate ) <nl> { <nl> - ThreadNumber thread_num ; <nl> + UInt32 thread_num ; <nl> std : : string message ; <nl> <nl> DB : : readBinary ( thread_num , in ) ; <nl> class SignalListener : public Poco : : Runnable <nl> siginfo_t info ; <nl> ucontext_t context ; <nl> StackTrace stack_trace ( NoCapture { } ) ; <nl> - ThreadNumber thread_num ; <nl> + UInt32 thread_num ; <nl> <nl> DB : : readPODBinary ( info , in ) ; <nl> DB : : readPODBinary ( context , in ) ; <nl> DB : : readPODBinary ( stack_trace , in ) ; <nl> DB : : readBinary ( thread_num , in ) ; <nl> <nl> + if ( sig = = SIGTSTP & & info . si_value . sival_ptr ) <nl> + { <nl> + / / / TSTP signal with value is used to make a custom callback from this thread . <nl> + try <nl> + { <nl> + reinterpret_cast < SignalCallback * > ( info . si_value . sival_ptr ) ( info , stack_trace , thread_num ) ; <nl> + continue ; <nl> + } <nl> + catch ( . . . ) <nl> + { <nl> + / / / Failed to process , will use ' onFault ' function . <nl> + } <nl> + } <nl> + <nl> / / / This allows to receive more signals if failure happens inside onFault function . <nl> / / / Example : segfault while symbolizing stack trace . <nl> std : : thread ( [ = ] { onFault ( sig , info , context , stack_trace , thread_num ) ; } ) . detach ( ) ; <nl> class SignalListener : public Poco : : Runnable <nl> BaseDaemon & daemon ; <nl> <nl> private : <nl> - void onTerminate ( const std : : string & message , ThreadNumber thread_num ) const <nl> + void onTerminate ( const std : : string & message , UInt32 thread_num ) const <nl> { <nl> LOG_FATAL ( log , " ( version " < < VERSION_STRING < < VERSION_OFFICIAL < < " ) ( from thread " < < thread_num < < " ) " < < message ) ; <nl> } <nl> <nl> - void onFault ( int sig , const siginfo_t & info , const ucontext_t & context , const StackTrace & stack_trace , ThreadNumber thread_num ) const <nl> + void onFault ( int sig , const siginfo_t & info , const ucontext_t & context , const StackTrace & stack_trace , UInt32 thread_num ) const <nl> { <nl> LOG_FATAL ( log , " # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # " ) ; <nl> LOG_FATAL ( log , " ( version " < < VERSION_STRING < < VERSION_OFFICIAL < < " ) ( from thread " < < thread_num < < " ) " <nl> static void terminate_handler ( ) <nl> DB : : WriteBufferFromFileDescriptor out ( signal_pipe . fds_rw [ 1 ] , buf_size , buf ) ; <nl> <nl> DB : : writeBinary ( static_cast < int > ( SignalListener : : StdTerminate ) , out ) ; <nl> - DB : : writeBinary ( getThreadNumber ( ) , out ) ; <nl> + DB : : writeBinary ( UInt32 ( getThreadNumber ( ) ) , out ) ; <nl> DB : : writeBinary ( log_message , out ) ; <nl> out . next ( ) ; <nl> <nl> void BaseDaemon : : initializeTerminationAndSignalProcessing ( ) <nl> <nl> / / / SIGTSTP is added for debugging purposes . To output a stack trace of any running thread at anytime . <nl> <nl> - add_signal_handler ( { SIGABRT , SIGSEGV , SIGILL , SIGBUS , SIGSYS , SIGFPE , SIGPIPE , SIGTSTP } , faultSignalHandler ) ; <nl> + add_signal_handler ( { SIGABRT , SIGSEGV , SIGILL , SIGBUS , SIGSYS , SIGFPE , SIGPIPE , SIGTSTP } , signalHandler ) ; <nl> add_signal_handler ( { SIGHUP , SIGUSR1 } , closeLogsSignalHandler ) ; <nl> add_signal_handler ( { SIGINT , SIGQUIT , SIGTERM } , terminateRequestedSignalHandler ) ; <nl> <nl> void BaseDaemon : : initializeTerminationAndSignalProcessing ( ) <nl> static KillingErrorHandler killing_error_handler ; <nl> Poco : : ErrorHandler : : set ( & killing_error_handler ) ; <nl> <nl> + signal_pipe . setNonBlocking ( ) ; <nl> + signal_pipe . tryIncreaseSize ( 1 < < 20 ) ; <nl> + <nl> signal_listener . reset ( new SignalListener ( * this ) ) ; <nl> signal_listener_thread . start ( * signal_listener ) ; <nl> } <nl>
Added " system . stack_trace " table ( development )
ClickHouse/ClickHouse
e0000bef989a7fff327f22e8cf4e4443e0e45dff
2019-12-22T17:20:33Z
mmm a / dbms / include / DB / Common / FileChecker . h <nl> ppp b / dbms / include / DB / Common / FileChecker . h <nl> class FileChecker <nl> { <nl> public : <nl> FileChecker ( const std : : string & file_info_path_ ) : <nl> - files_info_path ( file_info_path_ ) , files_info ( ) , log ( & Logger : : get ( " FileChecker " ) ) <nl> + files_info_path ( file_info_path_ ) <nl> { <nl> Poco : : Path path ( files_info_path ) ; <nl> tmp_files_info_path = path . parent ( ) . toString ( ) + " tmp_ " + path . getFileName ( ) ; <nl> - <nl> - if ( Poco : : File ( files_info_path ) . exists ( ) ) <nl> - boost : : property_tree : : read_json ( files_info_path , files_info ) ; <nl> } <nl> <nl> void setPath ( const std : : string & file_info_path_ ) <nl> class FileChecker <nl> <nl> void update ( const Poco : : File & file ) <nl> { <nl> + initialize ( ) ; <nl> updateTree ( file ) ; <nl> saveTree ( ) ; <nl> } <nl> <nl> void update ( const Files : : iterator & begin , const Files : : iterator & end ) <nl> { <nl> + initialize ( ) ; <nl> for ( auto it = begin ; it ! = end ; + + it ) <nl> updateTree ( * it ) ; <nl> saveTree ( ) ; <nl> class FileChecker <nl> / / / Проверяем файлы , параметры которых указаны в sizes . json <nl> bool check ( ) const <nl> { <nl> + / * * Читаем файлы заново при каждом вызове check - чтобы не нарушать константность . <nl> + * Метод check вызывается редко . <nl> + * / <nl> + PropertyTree local_files_info ; <nl> + if ( Poco : : File ( files_info_path ) . exists ( ) ) <nl> + boost : : property_tree : : read_json ( files_info_path , local_files_info ) ; <nl> + <nl> bool correct = true ; <nl> - if ( ! files_info . empty ( ) ) <nl> - for ( auto & node : files_info . get_child ( " yandex " ) ) <nl> + if ( ! local_files_info . empty ( ) ) <nl> + { <nl> + for ( auto & node : local_files_info . get_child ( " yandex " ) ) <nl> { <nl> std : : string filename = unescapeForFileName ( node . first ) ; <nl> size_t expected_size = std : : stoull ( node . second . template get < std : : string > ( " size " ) ) ; <nl> class FileChecker <nl> correct = false ; <nl> } <nl> } <nl> + } <nl> return correct ; <nl> } <nl> <nl> private : <nl> + void initialize ( ) <nl> + { <nl> + if ( initialized ) <nl> + return ; <nl> + <nl> + if ( Poco : : File ( files_info_path ) . exists ( ) ) <nl> + boost : : property_tree : : read_json ( files_info_path , files_info ) ; <nl> + <nl> + initialized = true ; <nl> + } <nl> + <nl> void updateTree ( const Poco : : File & file ) <nl> { <nl> files_info . put ( std : : string ( " yandex . " ) + escapeForFileName ( Poco : : Path ( file . path ( ) ) . getFileName ( ) ) + " . size " , std : : to_string ( file . getSize ( ) ) ) ; <nl> class FileChecker <nl> std : : string tmp_files_info_path ; <nl> <nl> using PropertyTree = boost : : property_tree : : ptree ; <nl> + <nl> + / / / Данные из файла читаются лениво . <nl> PropertyTree files_info ; <nl> + bool initialized = false ; <nl> <nl> - Logger * log ; <nl> + Logger * log = & Logger : : get ( " FileChecker " ) ; <nl> } ; <nl> } <nl> new file mode 100644 <nl> index 00000000000 . . d17dffc5850 <nl> mmm / dev / null <nl> ppp b / dbms / include / DB / Common / SimpleCache . h <nl> <nl> + # pragma once <nl> + <nl> + # include < map > <nl> + # include < tuple > <nl> + # include < mutex > <nl> + # include < statdaemons / ext / function_traits . hpp > <nl> + <nl> + <nl> + / * * Простейший кэш для свободной функции . <nl> + * Можете также передать статический метод класса или лямбду без захвата . <nl> + * Размер неограничен . Значения не устаревают . <nl> + * Для синхронизации используется mutex . <nl> + * Подходит только для простейших случаев . <nl> + * <nl> + * Использование : <nl> + * <nl> + * SimpleCache < decltype ( func ) , & func > func_cached ; <nl> + * std : : cerr < < func_cached ( args . . . ) ; <nl> + * / <nl> + template < typename F , F * f > <nl> + class SimpleCache <nl> + { <nl> + private : <nl> + using Key = typename function_traits < F > : : arguments_remove_reference ; <nl> + using Result = typename function_traits < F > : : result ; <nl> + <nl> + std : : map < Key , Result > cache ; <nl> + std : : mutex mutex ; <nl> + <nl> + public : <nl> + template < typename . . . Args > <nl> + Result operator ( ) ( Args & & . . . args ) <nl> + { <nl> + { <nl> + std : : lock_guard < std : : mutex > lock ( mutex ) ; <nl> + <nl> + Key key { std : : forward < Args > ( args ) . . . } ; <nl> + auto it = cache . find ( key ) ; <nl> + <nl> + if ( cache . end ( ) ! = it ) <nl> + return it - > second ; <nl> + } <nl> + <nl> + / / / Сами вычисления делаются не под mutex - ом . <nl> + Result res = f ( std : : forward < Args > ( args ) . . . ) ; <nl> + <nl> + { <nl> + std : : lock_guard < std : : mutex > lock ( mutex ) ; <nl> + <nl> + cache . emplace ( std : : forward_as_tuple ( args . . . ) , res ) ; <nl> + } <nl> + <nl> + return res ; <nl> + } <nl> + } ; <nl> mmm a / dbms / include / DB / Common / Throttler . h <nl> ppp b / dbms / include / DB / Common / Throttler . h <nl> <nl> # include < mutex > <nl> # include < memory > <nl> # include < statdaemons / Stopwatch . h > <nl> + # include < DB / Core / Exception . h > <nl> + # include < DB / IO / WriteHelpers . h > <nl> <nl> <nl> + namespace DB <nl> + { <nl> + <nl> / * * Позволяет ограничить скорость чего либо ( в штуках в секунду ) с помощью sleep . <nl> * Особенности работы : <nl> * - считается только средняя скорость , от момента первого вызова функции add ; <nl> * если были периоды с низкой скоростью , то в течение промежутка времени после них , скорость будет выше ; <nl> + * <nl> + * Также позволяет задать ограничение на максимальное количество в штуках . При превышении кидается исключение . <nl> * / <nl> class Throttler <nl> { <nl> public : <nl> - Throttler ( size_t max_speed_ ) : max_speed ( max_speed_ ) { } <nl> + Throttler ( size_t max_speed_ , size_t limit_ , const char * limit_exceeded_exception_message_ ) <nl> + : max_speed ( max_speed_ ) , limit ( limit_ ) , limit_exceeded_exception_message ( limit_exceeded_exception_message_ ) { } <nl> <nl> void add ( size_t amount ) <nl> { <nl> size_t new_count ; <nl> - UInt64 elapsed_ns ; <nl> + UInt64 elapsed_ns = 0 ; <nl> <nl> { <nl> std : : lock_guard < std : : mutex > lock ( mutex ) ; <nl> <nl> - if ( 0 = = count ) <nl> + if ( max_speed ) <nl> { <nl> - watch . start ( ) ; <nl> - elapsed_ns = 0 ; <nl> + if ( 0 = = count ) <nl> + { <nl> + watch . start ( ) ; <nl> + elapsed_ns = 0 ; <nl> + } <nl> + else <nl> + elapsed_ns = watch . elapsed ( ) ; <nl> } <nl> - else <nl> - elapsed_ns = watch . elapsed ( ) ; <nl> <nl> count + = amount ; <nl> new_count = count ; <nl> } <nl> <nl> - / / / Сколько должно было бы пройти времени , если бы скорость была равна max_speed . <nl> - UInt64 desired_ns = new_count * 1000000000 / max_speed ; <nl> + if ( limit & & new_count > limit ) <nl> + throw Exception ( limit_exceeded_exception_message + std : : string ( " Maximum : " ) + toString ( limit ) , ErrorCodes : : LIMIT_EXCEEDED ) ; <nl> <nl> - if ( desired_ns > elapsed_ns ) <nl> + if ( max_speed ) <nl> { <nl> - UInt64 sleep_ns = desired_ns - elapsed_ns ; <nl> - timespec sleep_ts ; <nl> - sleep_ts . tv_sec = sleep_ns / 1000000000 ; <nl> - sleep_ts . tv_nsec = sleep_ns % 1000000000 ; <nl> - nanosleep ( & sleep_ts , nullptr ) ; / / / NOTE Завершается раньше в случае сигнала . Это считается нормальным . <nl> + / / / Сколько должно было бы пройти времени , если бы скорость была равна max_speed . <nl> + UInt64 desired_ns = new_count * 1000000000 / max_speed ; <nl> + <nl> + if ( desired_ns > elapsed_ns ) <nl> + { <nl> + UInt64 sleep_ns = desired_ns - elapsed_ns ; <nl> + timespec sleep_ts ; <nl> + sleep_ts . tv_sec = sleep_ns / 1000000000 ; <nl> + sleep_ts . tv_nsec = sleep_ns % 1000000000 ; <nl> + nanosleep ( & sleep_ts , nullptr ) ; / / / NOTE Завершается раньше в случае сигнала . Это считается нормальным . <nl> + } <nl> } <nl> } <nl> <nl> private : <nl> - size_t max_speed ; <nl> + size_t max_speed = 0 ; <nl> size_t count = 0 ; <nl> + size_t limit = 0 ; / / / 0 - не ограничено . <nl> + const char * limit_exceeded_exception_message = nullptr ; <nl> Stopwatch watch { CLOCK_MONOTONIC_COARSE } ; <nl> std : : mutex mutex ; <nl> } ; <nl> <nl> <nl> typedef std : : shared_ptr < Throttler > ThrottlerPtr ; <nl> + <nl> + } <nl> mmm a / dbms / include / DB / Common / Volnitsky . h <nl> ppp b / dbms / include / DB / Common / Volnitsky . h <nl> class Volnitsky <nl> / / / Если не найдено - возвращается конец haystack . <nl> const char * search ( const char * haystack , size_t haystack_size ) const <nl> { <nl> + if ( needle_size = = 0 ) <nl> + return haystack ; <nl> + <nl> const char * haystack_end = haystack + haystack_size ; <nl> <nl> if ( needle_size = = 1 ) <nl> mmm a / dbms / include / DB / Core / ErrorCodes . h <nl> ppp b / dbms / include / DB / Core / ErrorCodes . h <nl> namespace ErrorCodes <nl> UNKNOWN_FORMAT_VERSION = 287 , <nl> DISTRIBUTED_IN_JOIN_SUBQUERY_DENIED = 288 , <nl> REPLICA_IS_NOT_IN_QUORUM = 289 , <nl> + LIMIT_EXCEEDED = 290 , <nl> <nl> KEEPER_EXCEPTION = 999 , <nl> POCO_EXCEPTION = 1000 , <nl> mmm a / dbms / include / DB / Interpreters / Limits . h <nl> ppp b / dbms / include / DB / Interpreters / Limits . h <nl> struct Limits <nl> \ <nl> / * * Максимальная скорость обмена данными по сети в байтах в секунду . 0 - не ограничена . * / \ <nl> M ( SettingUInt64 , max_network_bandwidth , 0 ) \ <nl> + / * * Максимальное количество байт на приём или передачу по сети , в рамках запроса . * / \ <nl> + M ( SettingUInt64 , max_network_bytes , 0 ) \ <nl> <nl> # define DECLARE ( TYPE , NAME , DEFAULT ) \ <nl> TYPE NAME { DEFAULT } ; <nl> mmm a / dbms / include / DB / Interpreters / Users . h <nl> ppp b / dbms / include / DB / Interpreters / Users . h <nl> <nl> # include < DB / IO / ReadHelpers . h > <nl> # include < DB / IO / HexWriteBuffer . h > <nl> # include < DB / IO / WriteBufferFromString . h > <nl> + # include < DB / IO / WriteHelpers . h > <nl> + # include < DB / Common / SimpleCache . h > <nl> <nl> # include < openssl / sha . h > <nl> <nl> class HostExactPattern : public IAddressPattern <nl> private : <nl> String host ; <nl> <nl> - public : <nl> - HostExactPattern ( const String & host_ ) : host ( host_ ) { } <nl> - <nl> - bool contains ( const Poco : : Net : : IPAddress & addr ) const <nl> + static bool containsImpl ( const String & host , const Poco : : Net : : IPAddress & addr ) <nl> { <nl> Poco : : Net : : IPAddress addr_v6 = toIPv6 ( addr ) ; <nl> <nl> class HostExactPattern : public IAddressPattern <nl> <nl> return false ; <nl> } <nl> + <nl> + public : <nl> + HostExactPattern ( const String & host_ ) : host ( host_ ) { } <nl> + <nl> + bool contains ( const Poco : : Net : : IPAddress & addr ) const <nl> + { <nl> + static SimpleCache < decltype ( containsImpl ) , & containsImpl > cache ; <nl> + return cache ( host , addr ) ; <nl> + } <nl> } ; <nl> <nl> <nl> class HostRegexpPattern : public IAddressPattern <nl> private : <nl> Poco : : RegularExpression host_regexp ; <nl> <nl> - public : <nl> - HostRegexpPattern ( const String & host_regexp_ ) : host_regexp ( host_regexp_ ) { } <nl> - <nl> - bool contains ( const Poco : : Net : : IPAddress & addr ) const <nl> + static String getDomain ( const Poco : : Net : : IPAddress & addr ) <nl> { <nl> Poco : : Net : : SocketAddress sock_addr ( addr , 0 ) ; <nl> <nl> class HostRegexpPattern : public IAddressPattern <nl> if ( 0 ! = gai_errno ) <nl> throw Exception ( " Cannot getnameinfo : " + std : : string ( gai_strerror ( gai_errno ) ) , ErrorCodes : : DNS_ERROR ) ; <nl> <nl> - String domain_str = domain ; <nl> + return domain ; <nl> + } <nl> + <nl> + public : <nl> + HostRegexpPattern ( const String & host_regexp_ ) : host_regexp ( host_regexp_ ) { } <nl> + <nl> + bool contains ( const Poco : : Net : : IPAddress & addr ) const <nl> + { <nl> + static SimpleCache < decltype ( getDomain ) , & getDomain > cache ; <nl> + <nl> + String domain = cache ( addr ) ; <nl> Poco : : RegularExpression : : Match match ; <nl> <nl> - if ( host_regexp . match ( domain_str , match ) & & HostExactPattern ( domain_str ) . contains ( addr ) ) <nl> + if ( host_regexp . match ( domain , match ) & & HostExactPattern ( domain ) . contains ( addr ) ) <nl> return true ; <nl> <nl> return false ; <nl> mmm a / dbms / include / DB / Storages / MergeTree / MergeTreeBlockInputStream . h <nl> ppp b / dbms / include / DB / Storages / MergeTree / MergeTreeBlockInputStream . h <nl> class MergeTreeBlockInputStream : public IProfilingBlockInputStream <nl> else <nl> { <nl> size_t space_left = std : : max ( 1LU , block_size / storage . index_granularity ) ; <nl> - while ( ! remaining_mark_ranges . empty ( ) & & space_left ) <nl> + while ( ! remaining_mark_ranges . empty ( ) & & space_left & & ! isCancelled ( ) ) <nl> { <nl> MarkRange & range = remaining_mark_ranges . back ( ) ; <nl> <nl> mmm a / dbms / include / DB / Storages / MergeTree / MergeTreeReadPool . h <nl> ppp b / dbms / include / DB / Storages / MergeTree / MergeTreeReadPool . h <nl> class MergeTreeReadPool <nl> <nl> std : : vector < ThreadTask > threads_tasks ; <nl> <nl> - std : : unordered_set < std : : size_t > remaining_thread_tasks ; <nl> + std : : set < std : : size_t > remaining_thread_tasks ; <nl> <nl> mutable std : : mutex mutex ; <nl> } ; <nl> mmm a / dbms / include / DB / Storages / MergeTree / MergeTreeThreadBlockInputStream . h <nl> ppp b / dbms / include / DB / Storages / MergeTree / MergeTreeThreadBlockInputStream . h <nl> class MergeTreeThreadBlockInputStream : public IProfilingBlockInputStream <nl> { <nl> Block res ; <nl> <nl> - while ( ! res ) <nl> + while ( ! res & & ! isCancelled ( ) ) <nl> { <nl> if ( ! task & & ! getNewTask ( ) ) <nl> break ; <nl> class MergeTreeThreadBlockInputStream : public IProfilingBlockInputStream <nl> { <nl> size_t space_left = std : : max ( 1LU , block_size_marks ) ; <nl> <nl> - while ( ! task - > mark_ranges . empty ( ) & & space_left ) <nl> + while ( ! task - > mark_ranges . empty ( ) & & space_left & & ! isCancelled ( ) ) <nl> { <nl> auto & range = task - > mark_ranges . back ( ) ; <nl> <nl> mmm a / dbms / include / DB / Storages / MergeTree / MergeTreeWhereOptimizer . h <nl> ppp b / dbms / include / DB / Storages / MergeTree / MergeTreeWhereOptimizer . h <nl> class MergeTreeWhereOptimizer <nl> <nl> if ( ( primary_key_columns . count ( first_arg_name ) & & isConstant ( args [ 1 ] ) ) | | <nl> ( primary_key_columns . count ( second_arg_name ) & & isConstant ( args [ 0 ] ) ) | | <nl> - ( primary_key_columns . count ( first_arg_name ) & & typeid_cast < const ASTSet * > ( args [ 1 ] . get ( ) ) ) ) <nl> + ( primary_key_columns . count ( first_arg_name ) <nl> + & & ( typeid_cast < const ASTSet * > ( args [ 1 ] . get ( ) ) | | typeid_cast < const ASTSubquery * > ( args [ 1 ] . get ( ) ) ) ) ) <nl> return true ; <nl> } <nl> <nl> mmm a / dbms / include / DB / Storages / MergeTree / ReplicatedMergeTreeBlockOutputStream . h <nl> ppp b / dbms / include / DB / Storages / MergeTree / ReplicatedMergeTreeBlockOutputStream . h <nl> class ReplicatedMergeTreeBlockOutputStream : public IBlockOutputStream <nl> <nl> StorageReplicatedMergeTree : : LogEntry log_entry ; <nl> log_entry . type = StorageReplicatedMergeTree : : LogEntry : : GET_PART ; <nl> + log_entry . create_time = time ( 0 ) ; <nl> log_entry . source_replica = storage . replica_name ; <nl> log_entry . new_part_name = part_name ; <nl> log_entry . quorum = quorum ; <nl> + log_entry . block_id = block_id ; <nl> <nl> / / / Одновременно добавим информацию о куске во все нужные места в ZooKeeper и снимем block_number_lock . <nl> <nl> mmm a / dbms / include / DB / Storages / MergeTree / ReplicatedMergeTreeLogEntry . h <nl> ppp b / dbms / include / DB / Storages / MergeTree / ReplicatedMergeTreeLogEntry . h <nl> struct ReplicatedMergeTreeLogEntryData <nl> / / / Имя куска , получающегося в результате . <nl> / / / Для DROP_RANGE имя несуществующего куска . Нужно удалить все куски , покрытые им . <nl> String new_part_name ; <nl> + String block_id ; / / / Для кусков нулевого уровня - идентификатор блока для дедупликации ( имя ноды в / blocks / ) . <nl> <nl> Strings parts_to_merge ; <nl> <nl> new file mode 100644 <nl> index 00000000000 . . 8fd472e5478 <nl> mmm / dev / null <nl> ppp b / dbms / src / Common / tests / simple_cache . cpp <nl> <nl> + # include < iostream > <nl> + # include < DB / Common / SimpleCache . h > <nl> + <nl> + <nl> + int func ( int x , int y ) <nl> + { <nl> + std : : cerr < < x < < " + " < < y < < " \ n " ; <nl> + return x + y ; <nl> + } <nl> + <nl> + <nl> + int main ( int argc , char * * argv ) <nl> + { <nl> + SimpleCache < decltype ( func ) , & func > func_cached ; <nl> + <nl> + std : : cerr < < func_cached ( 1 , 2 ) < < " \ n " ; <nl> + std : : cerr < < func_cached ( 1 , 2 ) < < " \ n " ; <nl> + std : : cerr < < func_cached ( 1 , 2 ) < < " \ n " ; <nl> + std : : cerr < < func_cached ( 3 , 4 ) < < " \ n " ; <nl> + std : : cerr < < func_cached ( 3 , 4 ) < < " \ n " ; <nl> + std : : cerr < < func_cached ( 3 , 4 ) < < " \ n " ; <nl> + } <nl> mmm a / dbms / src / Interpreters / loadMetadata . cpp <nl> ppp b / dbms / src / Interpreters / loadMetadata . cpp <nl> <nl> # include < iomanip > <nl> + # include < thread > <nl> + # include < future > <nl> + <nl> + # include < statdaemons / threadpool . hpp > <nl> <nl> # include < Poco / DirectoryIterator . h > <nl> # include < Poco / FileStream . h > <nl> static void executeCreateQuery ( const String & query , Context & context , const St <nl> <nl> void loadMetadata ( Context & context ) <nl> { <nl> - / / / Создадим все таблицы атомарно ( иначе какой - нибудь движок таблицы может успеть в фоновом потоке попытаться выполнить запрос ) . <nl> - Poco : : ScopedLock < Poco : : Mutex > lock ( context . getMutex ( ) ) ; <nl> - <nl> Logger * log = & Logger : : get ( " loadMetadata " ) ; <nl> <nl> / / / Здесь хранятся определения таблиц <nl> String path = context . getPath ( ) + " metadata " ; <nl> <nl> + struct Table <nl> + { <nl> + String database_name ; <nl> + String dir_name ; <nl> + String file_name ; <nl> + } ; <nl> + <nl> + using Tables = std : : vector < Table > ; <nl> + Tables tables ; <nl> + <nl> / / / Цикл по базам данных <nl> Poco : : DirectoryIterator dir_end ; <nl> for ( Poco : : DirectoryIterator it ( path ) ; it ! = dir_end ; + + it ) <nl> void loadMetadata ( Context & context ) <nl> <nl> String database = unescapeForFileName ( it . name ( ) ) ; <nl> <nl> - LOG_INFO ( log , " Loading database " < < database ) ; <nl> + LOG_INFO ( log , " Looking for tables in database " < < database ) ; <nl> executeCreateQuery ( " ATTACH DATABASE " + backQuoteIfNeed ( database ) , context , database , it - > path ( ) ) ; <nl> <nl> / / / Цикл по таблицам <nl> - typedef std : : vector < std : : string > Tables ; <nl> - Tables tables ; <nl> + typedef std : : vector < std : : string > FileNames ; <nl> + FileNames file_names ; <nl> <nl> for ( Poco : : DirectoryIterator jt ( it - > path ( ) ) ; jt ! = dir_end ; + + jt ) <nl> { <nl> - if ( jt . name ( ) = = " . svn " ) <nl> + / / / Для директории . svn <nl> + if ( jt . name ( ) . at ( 0 ) = = ' . ' ) <nl> continue ; <nl> <nl> / / / Файлы имеют имена вида table_name . sql <nl> if ( jt . name ( ) . compare ( jt . name ( ) . size ( ) - 4 , 4 , " . sql " ) ) <nl> throw Exception ( " Incorrect file extension : " + jt . name ( ) + " in metadata directory " + it - > path ( ) , ErrorCodes : : INCORRECT_FILE_NAME ) ; <nl> <nl> - tables . push_back ( jt - > path ( ) ) ; <nl> + file_names . push_back ( jt . name ( ) ) ; <nl> } <nl> <nl> - LOG_INFO ( log , " Found " < < tables . size ( ) < < " tables . " ) ; <nl> - <nl> / * * Таблицы быстрее грузятся , если их грузить в сортированном ( по именам ) порядке . <nl> * Иначе ( для файловой системы ext4 ) DirectoryIterator перебирает их в некотором порядке , <nl> * который не соответствует порядку создания таблиц и не соответствует порядку их расположения на диске . <nl> * / <nl> - std : : sort ( tables . begin ( ) , tables . end ( ) ) ; <nl> + std : : sort ( file_names . begin ( ) , file_names . end ( ) ) ; <nl> + <nl> + for ( const auto & name : file_names ) <nl> + tables . emplace_back ( Table { <nl> + . database_name = database , <nl> + . dir_name = it . name ( ) , <nl> + . file_name = name } ) ; <nl> + <nl> + LOG_INFO ( log , " Found " < < file_names . size ( ) < < " tables . " ) ; <nl> + } <nl> + <nl> + size_t total_tables = tables . size ( ) ; <nl> + LOG_INFO ( log , " Total " < < total_tables < < " tables . " ) ; <nl> + <nl> + StopwatchWithLock watch ; <nl> + size_t tables_processed = 0 ; <nl> + <nl> + static constexpr size_t MIN_TABLES_TO_PARALLEL_LOAD = 1 ; <nl> + static constexpr size_t PRINT_MESSAGE_EACH_N_TABLES = 256 ; <nl> + static constexpr size_t PRINT_MESSAGE_EACH_N_SECONDS = 5 ; <nl> + static constexpr size_t METADATA_FILE_BUFFER_SIZE = 32768 ; <nl> + static constexpr size_t TABLES_PARALLEL_LOAD_BUNCH_SIZE = 100 ; <nl> <nl> - Stopwatch watch ; <nl> + size_t num_threads = std : : min ( total_tables , SettingMaxThreads ( ) . getAutoValue ( ) ) ; <nl> <nl> - for ( size_t j = 0 , size = tables . size ( ) ; j < size ; + + j ) <nl> + std : : unique_ptr < boost : : threadpool : : pool > thread_pool ; <nl> + if ( total_tables > MIN_TABLES_TO_PARALLEL_LOAD & & num_threads > 1 ) <nl> + thread_pool . reset ( new boost : : threadpool : : pool ( num_threads ) ) ; <nl> + <nl> + size_t bunch_size = TABLES_PARALLEL_LOAD_BUNCH_SIZE ; <nl> + if ( total_tables < bunch_size * num_threads ) <nl> + bunch_size = total_tables / num_threads ; <nl> + <nl> + auto task_function = [ & ] ( Tables : : const_iterator begin , Tables : : const_iterator end ) <nl> + { <nl> + for ( Tables : : const_iterator it = begin ; it ! = end ; + + it ) <nl> { <nl> + const Table & table = * it ; <nl> + const String path_to_metadata = path + " / " + table . dir_name + " / " + table . file_name ; <nl> + <nl> / / / Сообщения , чтобы было не скучно ждать , когда сервер долго загружается . <nl> - if ( j % 256 = = 0 & & watch . elapsedSeconds ( ) > 5 ) <nl> + if ( __sync_add_and_fetch ( & tables_processed , 1 ) % PRINT_MESSAGE_EACH_N_TABLES = = 0 <nl> + | | watch . lockTestAndRestart ( PRINT_MESSAGE_EACH_N_SECONDS ) ) <nl> { <nl> - LOG_INFO ( log , std : : fixed < < std : : setprecision ( 2 ) < < j * 100 . 0 / size < < " % " ) ; <nl> + LOG_INFO ( log , std : : fixed < < std : : setprecision ( 2 ) < < tables_processed * 100 . 0 / total_tables < < " % " ) ; <nl> watch . restart ( ) ; <nl> } <nl> <nl> String s ; <nl> { <nl> - static const size_t in_buf_size = 32768 ; <nl> - char in_buf [ in_buf_size ] ; <nl> - ReadBufferFromFile in ( tables [ j ] , 32768 , - 1 , in_buf ) ; <nl> + char in_buf [ METADATA_FILE_BUFFER_SIZE ] ; <nl> + ReadBufferFromFile in ( path_to_metadata , METADATA_FILE_BUFFER_SIZE , - 1 , in_buf ) ; <nl> WriteBufferFromString out ( s ) ; <nl> copyData ( in , out ) ; <nl> } <nl> <nl> / * * Пустые файлы с метаданными образуются после грубого перезапуска сервера . <nl> - * Удаляем эти файлы , чтобы чуть - чуть уменьшить работу админов по запуску . <nl> - * / <nl> + * Удаляем эти файлы , чтобы чуть - чуть уменьшить работу админов по запуску . <nl> + * / <nl> if ( s . empty ( ) ) <nl> { <nl> - LOG_ERROR ( log , " File " < < tables [ j ] < < " is empty . Removing . " ) ; <nl> - Poco : : File ( tables [ j ] ) . remove ( ) ; <nl> + LOG_ERROR ( log , " File " < < path_to_metadata < < " is empty . Removing . " ) ; <nl> + Poco : : File ( path_to_metadata ) . remove ( ) ; <nl> continue ; <nl> } <nl> <nl> try <nl> { <nl> - executeCreateQuery ( s , context , database , tables [ j ] ) ; <nl> + executeCreateQuery ( s , context , table . database_name , path_to_metadata ) ; <nl> } <nl> catch ( const Exception & e ) <nl> { <nl> - throw Exception ( " Cannot create table from metadata file " + tables [ j ] + " , error : " + e . displayText ( ) + <nl> + throw Exception ( " Cannot create table from metadata file " + path_to_metadata + " , error : " + e . displayText ( ) + <nl> " , stack trace : \ n " + e . getStackTrace ( ) . toString ( ) , <nl> ErrorCodes : : CANNOT_CREATE_TABLE_FROM_METADATA ) ; <nl> } <nl> } <nl> + } ; <nl> + <nl> + / * * packaged_task используются , чтобы исключения автоматически прокидывались в основной поток . <nl> + * Недостаток - исключения попадают в основной поток только после окончания работы всех task - ов . <nl> + * / <nl> + <nl> + size_t num_bunches = ( total_tables + bunch_size - 1 ) / bunch_size ; <nl> + std : : vector < std : : packaged_task < void ( ) > > tasks ( num_bunches ) ; <nl> + <nl> + for ( size_t i = 0 ; i < num_bunches ; + + i ) <nl> + { <nl> + auto begin = tables . begin ( ) + i * bunch_size ; <nl> + auto end = ( i + 1 = = num_bunches ) <nl> + ? tables . end ( ) <nl> + : ( tables . begin ( ) + ( i + 1 ) * bunch_size ) ; <nl> + <nl> + tasks [ i ] = std : : packaged_task < void ( ) > ( std : : bind ( task_function , begin , end ) ) ; <nl> + <nl> + if ( thread_pool ) <nl> + thread_pool - > schedule ( [ i , & tasks ] { tasks [ i ] ( ) ; } ) ; <nl> + else <nl> + tasks [ i ] ( ) ; <nl> } <nl> + <nl> + if ( thread_pool ) <nl> + thread_pool - > wait ( ) ; <nl> + <nl> + for ( auto & task : tasks ) <nl> + task . get_future ( ) . get ( ) ; <nl> } <nl> <nl> <nl> mmm a / dbms / src / Server / Server . cpp <nl> ppp b / dbms / src / Server / Server . cpp <nl> class PingRequestHandler : public Poco : : Net : : HTTPRequestHandler <nl> { <nl> try <nl> { <nl> - const char * data = " Ok . \ n " ; <nl> - response . sendBuffer ( data , strlen ( data ) ) ; <nl> + if ( request . getURI ( ) = = " / " | | request . getURI ( ) = = " / ping " ) <nl> + { <nl> + const char * data = " Ok . \ n " ; <nl> + response . sendBuffer ( data , strlen ( data ) ) ; <nl> + } <nl> + else <nl> + { <nl> + response . setStatusAndReason ( Poco : : Net : : HTTPResponse : : HTTP_NOT_FOUND ) ; <nl> + response . send ( ) < < " There is no handle " < < request . getURI ( ) < < " \ n \ n " <nl> + < < " Use / or / ping for health checks . \ n " <nl> + < < " Send queries from your program with POST method or GET / ? query = . . . \ n \ n " <nl> + < < " Use clickhouse - client : \ n \ n " <nl> + < < " For interactive data analysis : \ n " <nl> + < < " clickhouse - client \ n \ n " <nl> + < < " For batch query processing : \ n " <nl> + < < " clickhouse - client - - query = ' SELECT 1 ' > result \ n " <nl> + < < " clickhouse - client < query > result \ n " ; <nl> + } <nl> } <nl> catch ( . . . ) <nl> { <nl> class PingRequestHandler : public Poco : : Net : : HTTPRequestHandler <nl> } ; <nl> <nl> <nl> - template < typename HandlerType > <nl> + template < typename HandlerType > <nl> class HTTPRequestHandlerFactory : public Poco : : Net : : HTTPRequestHandlerFactory <nl> { <nl> private : <nl> class HTTPRequestHandlerFactory : public Poco : : Net : : HTTPRequestHandlerFactory <nl> < < " , Address : " < < request . clientAddress ( ) . toString ( ) <nl> < < " , User - Agent : " < < ( request . has ( " User - Agent " ) ? request . get ( " User - Agent " ) : " none " ) ) ; <nl> <nl> - if ( request . getURI ( ) . find ( ' ? ' ) ! = std : : string : : npos | | request . getMethod ( ) = = Poco : : Net : : HTTPRequest : : HTTP_POST ) <nl> + if ( request . getURI ( ) . find ( ' ? ' ) ! = std : : string : : npos <nl> + | | request . getMethod ( ) = = Poco : : Net : : HTTPRequest : : HTTP_POST ) <nl> + { <nl> return new HandlerType ( server ) ; <nl> - else if ( request . getMethod ( ) = = Poco : : Net : : HTTPRequest : : HTTP_GET ) <nl> + } <nl> + else if ( request . getMethod ( ) = = Poco : : Net : : HTTPRequest : : HTTP_GET <nl> + | | request . getMethod ( ) = = Poco : : Net : : HTTPRequest : : HTTP_HEAD ) <nl> + { <nl> return new PingRequestHandler ( ) ; <nl> + } <nl> else <nl> - return 0 ; <nl> + return nullptr ; <nl> } <nl> } ; <nl> <nl> mmm a / dbms / src / Storages / MergeTree / MergeTreeDataSelectExecutor . cpp <nl> ppp b / dbms / src / Storages / MergeTree / MergeTreeDataSelectExecutor . cpp <nl> BlockInputStreams MergeTreeDataSelectExecutor : : spreadMarkRangesAmongThreads ( <nl> for ( std : : size_t i = 0 ; i < threads ; + + i ) <nl> { <nl> res . emplace_back ( new MergeTreeThreadBlockInputStream { <nl> - i , pool , min_marks_for_concurrent_read , max_block_size , data , use_uncompressed_cache , <nl> - prewhere_actions , <nl> - prewhere_column , settings , virt_columns <nl> + i , pool , min_marks_for_concurrent_read , max_block_size , data , use_uncompressed_cache , <nl> + prewhere_actions , <nl> + prewhere_column , settings , virt_columns <nl> } ) ; <nl> <nl> <nl> BlockInputStreams MergeTreeDataSelectExecutor : : spreadMarkRangesAmongThreadsFinal <nl> <nl> if ( settings . merge_tree_uniform_read_distribution = = 1 ) <nl> { <nl> + / / / Пусть отрезки будут перечислены справа налево , чтобы можно было выбрасывать самый левый отрезок с помощью pop_back ( ) . <nl> + for ( auto & part : parts ) <nl> + std : : reverse ( std : : begin ( part . ranges ) , std : : end ( part . ranges ) ) ; <nl> + <nl> MergeTreeReadPoolPtr pool = std : : make_shared < MergeTreeReadPool > ( <nl> parts . size ( ) , sum_marks , min_marks_for_read_task , parts , data , prewhere_actions , prewhere_column , true , <nl> column_names , true ) ; <nl> mmm a / dbms / src / Storages / MergeTree / ReplicatedMergeTreeCleanupThread . cpp <nl> ppp b / dbms / src / Storages / MergeTree / ReplicatedMergeTreeCleanupThread . cpp <nl> void ReplicatedMergeTreeCleanupThread : : clearOldBlocks ( ) <nl> timed_blocks . push_back ( std : : make_pair ( stat . czxid , block ) ) ; <nl> } <nl> <nl> - zkutil : : Ops ops ; <nl> + / / zkutil : : Ops ops ; <nl> std : : sort ( timed_blocks . begin ( ) , timed_blocks . end ( ) , std : : greater < std : : pair < Int64 , String > > ( ) ) ; <nl> - for ( size_t i = storage . data . settings . replicated_deduplication_window ; i < timed_blocks . size ( ) ; + + i ) <nl> + for ( size_t i = storage . data . settings . replicated_deduplication_window ; i < timed_blocks . size ( ) ; + + i ) <nl> { <nl> / / / Устаревшие ноды . Этот код можно будет убрать через пол года . <nl> zookeeper - > tryRemove ( storage . zookeeper_path + " / blocks / " + timed_blocks [ i ] . second + " / columns " ) ; <nl> zookeeper - > tryRemove ( storage . zookeeper_path + " / blocks / " + timed_blocks [ i ] . second + " / checksums " ) ; <nl> + zookeeper - > tryRemove ( storage . zookeeper_path + " / blocks / " + timed_blocks [ i ] . second + " / checksum " ) ; <nl> + zookeeper - > tryRemove ( storage . zookeeper_path + " / blocks / " + timed_blocks [ i ] . second + " / number " ) ; <nl> + zookeeper - > tryRemove ( storage . zookeeper_path + " / blocks / " + timed_blocks [ i ] . second ) ; <nl> <nl> - ops . push_back ( new zkutil : : Op : : Remove ( storage . zookeeper_path + " / blocks / " + timed_blocks [ i ] . second + " / number " , - 1 ) ) ; <nl> + / * ops . push_back ( new zkutil : : Op : : Remove ( storage . zookeeper_path + " / blocks / " + timed_blocks [ i ] . second + " / number " , - 1 ) ) ; <nl> ops . push_back ( new zkutil : : Op : : Remove ( storage . zookeeper_path + " / blocks / " + timed_blocks [ i ] . second + " / checksum " , - 1 ) ) ; <nl> ops . push_back ( new zkutil : : Op : : Remove ( storage . zookeeper_path + " / blocks / " + timed_blocks [ i ] . second , - 1 ) ) ; <nl> <nl> void ReplicatedMergeTreeCleanupThread : : clearOldBlocks ( ) <nl> { <nl> zookeeper - > multi ( ops ) ; <nl> ops . clear ( ) ; <nl> - } <nl> + } * / <nl> } <nl> <nl> LOG_TRACE ( log , " Cleared " < < blocks . size ( ) - storage . data . settings . replicated_deduplication_window < < " old blocks from ZooKeeper " ) ; <nl> mmm a / dbms / src / Storages / MergeTree / ReplicatedMergeTreeLogEntry . cpp <nl> ppp b / dbms / src / Storages / MergeTree / ReplicatedMergeTreeLogEntry . cpp <nl> void ReplicatedMergeTreeLogEntry : : tagPartAsFuture ( StorageReplicatedMergeTree & s <nl> <nl> void ReplicatedMergeTreeLogEntry : : writeText ( WriteBuffer & out ) const <nl> { <nl> - out < < " format version : 2 \ n " <nl> + out < < " format version : 3 \ n " <nl> < < " create_time : " < < mysqlxx : : DateTime ( create_time ? create_time : time ( 0 ) ) < < " \ n " <nl> - < < " source replica : " < < source_replica < < ' \ n ' ; <nl> + < < " source replica : " < < source_replica < < ' \ n ' <nl> + < < " block_id : " < < escape < < block_id < < ' \ n ' ; <nl> <nl> switch ( type ) <nl> { <nl> void ReplicatedMergeTreeLogEntry : : readText ( ReadBuffer & in ) <nl> <nl> in > > " format version : " > > format_version > > " \ n " ; <nl> <nl> - if ( format_version ! = 1 & & format_version ! = 2 ) <nl> + if ( format_version ! = 1 & & format_version ! = 2 & & format_version ! = 3 ) <nl> throw Exception ( " Unknown ReplicatedMergeTreeLogEntry format version : " + DB : : toString ( format_version ) , ErrorCodes : : UNKNOWN_FORMAT_VERSION ) ; <nl> <nl> - if ( format_version = = 2 ) <nl> + if ( format_version > = 2 ) <nl> { <nl> mysqlxx : : DateTime create_time_dt ; <nl> in > > " create_time : " > > create_time_dt > > " \ n " ; <nl> create_time = create_time_dt ; <nl> } <nl> <nl> - in > > " source replica : " > > source_replica > > " \ n " <nl> - > > type_str > > " \ n " ; <nl> + in > > " source replica : " > > source_replica > > " \ n " ; <nl> + <nl> + if ( format_version > = 3 ) <nl> + { <nl> + in > > " block_id : " > > escape > > block_id > > " \ n " ; <nl> + } <nl> + <nl> + in > > type_str > > " \ n " ; <nl> <nl> if ( type_str = = " get " ) <nl> { <nl> mmm a / dbms / src / Storages / StorageDistributed . cpp <nl> ppp b / dbms / src / Storages / StorageDistributed . cpp <nl> BlockInputStreams StorageDistributed : : read ( <nl> { <nl> Settings new_settings = settings ; <nl> new_settings . queue_max_wait_ms = Cluster : : saturate ( new_settings . queue_max_wait_ms , settings . limits . max_execution_time ) ; <nl> + / / / Не имеет смысла на удалённых серверах , так как запрос отправляется обычно с другим user - ом . <nl> + new_settings . max_concurrent_queries_for_user = 0 ; <nl> <nl> size_t result_size = ( cluster . pools . size ( ) * settings . max_parallel_replicas ) + cluster . getLocalNodesNum ( ) ; <nl> <nl> BlockInputStreams StorageDistributed : : read ( <nl> <nl> / / / Ограничение сетевого трафика , если нужно . <nl> ThrottlerPtr throttler ; <nl> - if ( settings . limits . max_network_bandwidth ) <nl> - throttler . reset ( new Throttler ( settings . limits . max_network_bandwidth ) ) ; <nl> + if ( settings . limits . max_network_bandwidth | | settings . limits . max_network_bytes ) <nl> + throttler . reset ( new Throttler ( <nl> + settings . limits . max_network_bandwidth , <nl> + settings . limits . max_network_bytes , <nl> + " Limit for bytes to send or receive over network exceeded . " ) ) ; <nl> <nl> Tables external_tables ; <nl> <nl> mmm a / dbms / src / Storages / StorageReplicatedMergeTree . cpp <nl> ppp b / dbms / src / Storages / StorageReplicatedMergeTree . cpp <nl> StorageReplicatedMergeTree : : StorageReplicatedMergeTree ( <nl> String unreplicated_path = full_path + " unreplicated / " ; <nl> if ( Poco : : File ( unreplicated_path ) . exists ( ) ) <nl> { <nl> - LOG_INFO ( log , " Have unreplicated data " ) ; <nl> - <nl> unreplicated_data . reset ( new MergeTreeData ( unreplicated_path , columns_ , <nl> materialized_columns_ , alias_columns_ , column_defaults_ , <nl> context_ , primary_expr_ast_ , <nl> StorageReplicatedMergeTree : : StorageReplicatedMergeTree ( <nl> <nl> unreplicated_data - > loadDataParts ( skip_sanity_checks ) ; <nl> <nl> - unreplicated_reader . reset ( new MergeTreeDataSelectExecutor ( * unreplicated_data ) ) ; <nl> - unreplicated_merger . reset ( new MergeTreeDataMerger ( * unreplicated_data ) ) ; <nl> + if ( unreplicated_data - > getDataPartsVector ( ) . empty ( ) ) <nl> + { <nl> + unreplicated_data . reset ( ) ; <nl> + } <nl> + else <nl> + { <nl> + LOG_INFO ( log , " Have unreplicated data " ) ; <nl> + unreplicated_reader . reset ( new MergeTreeDataSelectExecutor ( * unreplicated_data ) ) ; <nl> + unreplicated_merger . reset ( new MergeTreeDataMerger ( * unreplicated_data ) ) ; <nl> + } <nl> } <nl> <nl> loadQueue ( ) ; <nl> bool StorageReplicatedMergeTree : : executeLogEntry ( const LogEntry & entry , Backgro <nl> if ( entry . type ! = LogEntry : : GET_PART ) <nl> throw Exception ( " Logical error : log entry with quorum but type is not GET_PART " , ErrorCodes : : LOGICAL_ERROR ) ; <nl> <nl> + if ( entry . block_id . empty ( ) ) <nl> + throw Exception ( " Logical error : log entry with quorum have empty block_id " , ErrorCodes : : LOGICAL_ERROR ) ; <nl> + <nl> LOG_DEBUG ( log , " No active replica has part " < < entry . new_part_name < < " which needs to be written with quorum . " <nl> " Will try to mark that quorum as failed . " ) ; <nl> <nl> bool StorageReplicatedMergeTree : : executeLogEntry ( const LogEntry & entry , Backgro <nl> * - если существует узел quorum с этим куском ; <nl> * - удалим узел quorum ; <nl> * - установим nonincrement_block_numbers , чтобы разрешить мерджи через номер потерянного куска ; <nl> - * - добавим кусок в список quorum / failed_parts . <nl> - * <nl> - * TODO Удаление из blocks . <nl> + * - добавим кусок в список quorum / failed_parts ; <nl> + * - если кусок ещё не удалён из списка для дедупликации blocks / block_num , то удалим его ; <nl> * <nl> * Если что - то изменится , то ничего не сделаем - попадём сюда снова в следующий раз . <nl> * / <nl> bool StorageReplicatedMergeTree : : executeLogEntry ( const LogEntry & entry , Backgro <nl> acl , <nl> zkutil : : CreateMode : : Persistent ) ) ; <nl> <nl> + / / / Удаление из blocks . <nl> + if ( zookeeper - > exists ( zookeeper_path + " / blocks / " + entry . block_id ) ) <nl> + { <nl> + ops . push_back ( new zkutil : : Op : : Remove ( zookeeper_path + " / blocks / " + entry . block_id + " / number " , - 1 ) ) ; <nl> + ops . push_back ( new zkutil : : Op : : Remove ( zookeeper_path + " / blocks / " + entry . block_id + " / checksum " , - 1 ) ) ; <nl> + ops . push_back ( new zkutil : : Op : : Remove ( zookeeper_path + " / blocks / " + entry . block_id , - 1 ) ) ; <nl> + } <nl> + <nl> auto code = zookeeper - > tryMulti ( ops ) ; <nl> <nl> if ( code = = ZOK ) <nl> bool StorageReplicatedMergeTree : : queueTask ( BackgroundProcessingPool : : Context & p <nl> } <nl> else <nl> { <nl> - + + entry - > num_postponed ; <nl> - entry - > last_postpone_time = time ( 0 ) ; <nl> + + + ( * it ) - > num_postponed ; <nl> + ( * it ) - > last_postpone_time = time ( 0 ) ; <nl> } <nl> } <nl> } <nl> void StorageReplicatedMergeTree : : mergeSelectingThread ( ) <nl> entry . type = LogEntry : : MERGE_PARTS ; <nl> entry . source_replica = replica_name ; <nl> entry . new_part_name = merged_name ; <nl> + entry . create_time = time ( 0 ) ; <nl> <nl> for ( const auto & part : parts ) <nl> entry . parts_to_merge . push_back ( part - > name ) ; <nl> void StorageReplicatedMergeTree : : dropPartition ( const Field & field , bool detach , <nl> entry . detach = detach ; <nl> String log_znode_path = zookeeper - > create ( zookeeper_path + " / log / log - " , entry . toString ( ) , zkutil : : CreateMode : : PersistentSequential ) ; <nl> entry . znode_name = log_znode_path . substr ( log_znode_path . find_last_of ( ' / ' ) + 1 ) ; <nl> + entry . create_time = time ( 0 ) ; <nl> <nl> / / / Если надо - дожидаемся выполнения операции на себе или на всех репликах . <nl> if ( settings . replication_alter_partitions_sync ! = 0 ) <nl> void StorageReplicatedMergeTree : : attachPartition ( const Field & field , bool unrep <nl> entry . source_part_name = part_name ; <nl> entry . new_part_name = new_part_name ; <nl> entry . attach_unreplicated = unreplicated ; <nl> + entry . create_time = time ( 0 ) ; <nl> + <nl> ops . push_back ( new zkutil : : Op : : Create ( <nl> zookeeper_path + " / log / log - " , entry . toString ( ) , zookeeper - > getDefaultACL ( ) , zkutil : : CreateMode : : PersistentSequential ) ) ; <nl> } <nl> new file mode 100644 <nl> index 00000000000 . . 8c84198e359 <nl> mmm / dev / null <nl> ppp b / dbms / tests / queries / 0_stateless / 00233_position_function_family . reference <nl> <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> + 1 <nl> new file mode 100644 <nl> index 00000000000 . . 35f93715d49 <nl> mmm / dev / null <nl> ppp b / dbms / tests / queries / 0_stateless / 00233_position_function_family . sql <nl> <nl> + select 1 = position ( ' ' , ' ' ) ; <nl> + select 1 = position ( ' abc ' , ' ' ) ; <nl> + select 0 = position ( ' ' , ' abc ' ) ; <nl> + select 1 = position ( ' abc ' , ' abc ' ) ; <nl> + select 2 = position ( ' abc ' , ' bc ' ) ; <nl> + select 3 = position ( ' abc ' , ' c ' ) ; <nl> + <nl> + select 1 = position ( materialize ( ' ' ) , ' ' ) ; <nl> + select 1 = position ( materialize ( ' abc ' ) , ' ' ) ; <nl> + select 0 = position ( materialize ( ' ' ) , ' abc ' ) ; <nl> + select 1 = position ( materialize ( ' abc ' ) , ' abc ' ) ; <nl> + select 2 = position ( materialize ( ' abc ' ) , ' bc ' ) ; <nl> + select 3 = position ( materialize ( ' abc ' ) , ' c ' ) ; <nl> + <nl> + select 1 = position ( materialize ( ' ' ) , ' ' ) from system . numbers limit 10 ; <nl> + select 1 = position ( materialize ( ' abc ' ) , ' ' ) from system . numbers limit 10 ; <nl> + select 0 = position ( materialize ( ' ' ) , ' abc ' ) from system . numbers limit 10 ; <nl> + select 1 = position ( materialize ( ' abc ' ) , ' abc ' ) from system . numbers limit 10 ; <nl> + select 2 = position ( materialize ( ' abc ' ) , ' bc ' ) from system . numbers limit 10 ; <nl> + select 3 = position ( materialize ( ' abc ' ) , ' c ' ) from system . numbers limit 10 ; <nl> + <nl> + select 1 = position ( ' ' , ' ' ) ; <nl> + select 1 = position ( ' абв ' , ' ' ) ; <nl> + select 0 = position ( ' ' , ' абв ' ) ; <nl> + select 1 = position ( ' абв ' , ' абв ' ) ; <nl> + select 3 = position ( ' абв ' , ' бв ' ) ; <nl> + select 5 = position ( ' абв ' , ' в ' ) ; <nl> + <nl> + select 1 = position ( materialize ( ' ' ) , ' ' ) ; <nl> + select 1 = position ( materialize ( ' абв ' ) , ' ' ) ; <nl> + select 0 = position ( materialize ( ' ' ) , ' абв ' ) ; <nl> + select 1 = position ( materialize ( ' абв ' ) , ' абв ' ) ; <nl> + select 3 = position ( materialize ( ' абв ' ) , ' бв ' ) ; <nl> + select 5 = position ( materialize ( ' абв ' ) , ' в ' ) ; <nl> + <nl> + select 1 = position ( materialize ( ' ' ) , ' ' ) from system . numbers limit 10 ; <nl> + select 1 = position ( materialize ( ' абв ' ) , ' ' ) from system . numbers limit 10 ; <nl> + select 0 = position ( materialize ( ' ' ) , ' абв ' ) from system . numbers limit 10 ; <nl> + select 1 = position ( materialize ( ' абв ' ) , ' абв ' ) from system . numbers limit 10 ; <nl> + select 3 = position ( materialize ( ' абв ' ) , ' бв ' ) from system . numbers limit 10 ; <nl> + select 5 = position ( materialize ( ' абв ' ) , ' в ' ) from system . numbers limit 10 ; <nl> + <nl> + select 1 = positionUTF8 ( ' ' , ' ' ) ; <nl> + select 1 = positionUTF8 ( ' абв ' , ' ' ) ; <nl> + select 0 = positionUTF8 ( ' ' , ' абв ' ) ; <nl> + select 1 = positionUTF8 ( ' абв ' , ' абв ' ) ; <nl> + select 2 = positionUTF8 ( ' абв ' , ' бв ' ) ; <nl> + select 3 = positionUTF8 ( ' абв ' , ' в ' ) ; <nl> + <nl> + select 1 = positionUTF8 ( materialize ( ' ' ) , ' ' ) ; <nl> + select 1 = positionUTF8 ( materialize ( ' абв ' ) , ' ' ) ; <nl> + select 0 = positionUTF8 ( materialize ( ' ' ) , ' абв ' ) ; <nl> + select 1 = positionUTF8 ( materialize ( ' абв ' ) , ' абв ' ) ; <nl> + select 2 = positionUTF8 ( materialize ( ' абв ' ) , ' бв ' ) ; <nl> + select 3 = positionUTF8 ( materialize ( ' абв ' ) , ' в ' ) ; <nl> + <nl> + select 1 = positionUTF8 ( materialize ( ' ' ) , ' ' ) from system . numbers limit 10 ; <nl> + select 1 = positionUTF8 ( materialize ( ' абв ' ) , ' ' ) from system . numbers limit 10 ; <nl> + select 0 = positionUTF8 ( materialize ( ' ' ) , ' абв ' ) from system . numbers limit 10 ; <nl> + select 1 = positionUTF8 ( materialize ( ' абв ' ) , ' абв ' ) from system . numbers limit 10 ; <nl> + select 2 = positionUTF8 ( materialize ( ' абв ' ) , ' бв ' ) from system . numbers limit 10 ; <nl> + select 3 = positionUTF8 ( materialize ( ' абв ' ) , ' в ' ) from system . numbers limit 10 ; <nl> + <nl> + select 1 = positionCaseInsensitive ( ' ' , ' ' ) ; <nl> + select 1 = positionCaseInsensitive ( ' abc ' , ' ' ) ; <nl> + select 0 = positionCaseInsensitive ( ' ' , ' aBc ' ) ; <nl> + select 1 = positionCaseInsensitive ( ' abc ' , ' aBc ' ) ; <nl> + select 2 = positionCaseInsensitive ( ' abc ' , ' Bc ' ) ; <nl> + select 3 = positionCaseInsensitive ( ' abc ' , ' C ' ) ; <nl> + <nl> + select 1 = positionCaseInsensitive ( materialize ( ' ' ) , ' ' ) ; <nl> + select 1 = positionCaseInsensitive ( materialize ( ' abc ' ) , ' ' ) ; <nl> + select 0 = positionCaseInsensitive ( materialize ( ' ' ) , ' aBc ' ) ; <nl> + select 1 = positionCaseInsensitive ( materialize ( ' abc ' ) , ' aBc ' ) ; <nl> + select 2 = positionCaseInsensitive ( materialize ( ' abc ' ) , ' Bc ' ) ; <nl> + select 3 = positionCaseInsensitive ( materialize ( ' abc ' ) , ' C ' ) ; <nl> + <nl> + select 1 = positionCaseInsensitive ( materialize ( ' ' ) , ' ' ) from system . numbers limit 10 ; <nl> + select 1 = positionCaseInsensitive ( materialize ( ' abc ' ) , ' ' ) from system . numbers limit 10 ; <nl> + select 0 = positionCaseInsensitive ( materialize ( ' ' ) , ' aBc ' ) from system . numbers limit 10 ; <nl> + select 1 = positionCaseInsensitive ( materialize ( ' abc ' ) , ' aBc ' ) from system . numbers limit 10 ; <nl> + select 2 = positionCaseInsensitive ( materialize ( ' abc ' ) , ' Bc ' ) from system . numbers limit 10 ; <nl> + select 3 = positionCaseInsensitive ( materialize ( ' abc ' ) , ' C ' ) from system . numbers limit 10 ; <nl> + <nl> + select 1 = positionCaseInsensitive ( ' ' , ' ' ) ; <nl> + select 1 = positionCaseInsensitive ( ' абв ' , ' ' ) ; <nl> + select 0 = positionCaseInsensitive ( ' ' , ' аБв ' ) ; <nl> + select 0 = positionCaseInsensitive ( ' абв ' , ' аБв ' ) ; <nl> + select 0 = positionCaseInsensitive ( ' абв ' , ' Бв ' ) ; <nl> + select 0 = positionCaseInsensitive ( ' абв ' , ' В ' ) ; <nl> + <nl> + select 1 = positionCaseInsensitive ( materialize ( ' ' ) , ' ' ) ; <nl> + select 1 = positionCaseInsensitive ( materialize ( ' абв ' ) , ' ' ) ; <nl> + select 0 = positionCaseInsensitive ( materialize ( ' ' ) , ' аБв ' ) ; <nl> + select 0 = positionCaseInsensitive ( materialize ( ' абв ' ) , ' аБв ' ) ; <nl> + select 0 = positionCaseInsensitive ( materialize ( ' абв ' ) , ' Бв ' ) ; <nl> + select 0 = positionCaseInsensitive ( materialize ( ' абв ' ) , ' В ' ) ; <nl> + <nl> + select 1 = positionCaseInsensitive ( materialize ( ' ' ) , ' ' ) from system . numbers limit 10 ; <nl> + select 1 = positionCaseInsensitive ( materialize ( ' абв ' ) , ' ' ) from system . numbers limit 10 ; <nl> + select 0 = positionCaseInsensitive ( materialize ( ' ' ) , ' аБв ' ) from system . numbers limit 10 ; <nl> + select 0 = positionCaseInsensitive ( materialize ( ' абв ' ) , ' аБв ' ) from system . numbers limit 10 ; <nl> + select 0 = positionCaseInsensitive ( materialize ( ' абв ' ) , ' Бв ' ) from system . numbers limit 10 ; <nl> + select 0 = positionCaseInsensitive ( materialize ( ' абв ' ) , ' В ' ) from system . numbers limit 10 ; <nl> + <nl> + select 1 = positionCaseInsensitiveUTF8 ( ' ' , ' ' ) ; <nl> + select 1 = positionCaseInsensitiveUTF8 ( ' абв ' , ' ' ) ; <nl> + select 0 = positionCaseInsensitiveUTF8 ( ' ' , ' аБв ' ) ; <nl> + select 1 = positionCaseInsensitiveUTF8 ( ' абв ' , ' аБв ' ) ; <nl> + select 2 = positionCaseInsensitiveUTF8 ( ' абв ' , ' Бв ' ) ; <nl> + select 3 = positionCaseInsensitiveUTF8 ( ' абв ' , ' в ' ) ; <nl> + <nl> + select 1 = positionCaseInsensitiveUTF8 ( materialize ( ' ' ) , ' ' ) ; <nl> + select 1 = positionCaseInsensitiveUTF8 ( materialize ( ' абв ' ) , ' ' ) ; <nl> + select 0 = positionCaseInsensitiveUTF8 ( materialize ( ' ' ) , ' аБв ' ) ; <nl> + select 1 = positionCaseInsensitiveUTF8 ( materialize ( ' абв ' ) , ' аБв ' ) ; <nl> + select 2 = positionCaseInsensitiveUTF8 ( materialize ( ' абв ' ) , ' Бв ' ) ; <nl> + select 3 = positionCaseInsensitiveUTF8 ( materialize ( ' абв ' ) , ' В ' ) ; <nl> + <nl> + select 1 = positionCaseInsensitiveUTF8 ( materialize ( ' ' ) , ' ' ) from system . numbers limit 10 ; <nl> + select 1 = positionCaseInsensitiveUTF8 ( materialize ( ' абв ' ) , ' ' ) from system . numbers limit 10 ; <nl> + select 0 = positionCaseInsensitiveUTF8 ( materialize ( ' ' ) , ' аБв ' ) from system . numbers limit 10 ; <nl> + select 1 = positionCaseInsensitiveUTF8 ( materialize ( ' абв ' ) , ' аБв ' ) from system . numbers limit 10 ; <nl> + select 2 = positionCaseInsensitiveUTF8 ( materialize ( ' абв ' ) , ' Бв ' ) from system . numbers limit 10 ; <nl> + select 3 = positionCaseInsensitiveUTF8 ( materialize ( ' абв ' ) , ' В ' ) from system . numbers limit 10 ; <nl> + <nl> + select position ( ' ' as h , ' ' as n ) = positionCaseInsensitive ( h , n ) ; <nl> + select position ( ' abc ' as h , ' ' as n ) = positionCaseInsensitive ( n , n ) ; <nl> + select 0 = positionCaseInsensitive ( ' ' , ' aBc ' ) ; <nl> + select position ( ' abc ' as h , lower ( ' aBc ' as n ) ) = positionCaseInsensitive ( h , n ) ; <nl> + select position ( ' abc ' as h , lower ( ' Bc ' as n ) ) = positionCaseInsensitive ( h , n ) ; <nl> + select position ( ' abc ' as h , lower ( ' C ' as n ) ) = positionCaseInsensitive ( h , n ) ; <nl> + <nl> + select positionCaseInsensitive ( materialize ( ' ' ) as h , ' ' as n ) = positionCaseInsensitive ( h , n ) ; <nl> + select positionCaseInsensitive ( materialize ( ' abc ' ) as h , ' ' as n ) = positionCaseInsensitive ( h , n ) ; <nl> + select positionCaseInsensitive ( materialize ( ' ' ) as h , lower ( ' aBc ' as n ) ) = positionCaseInsensitive ( h , n ) ; <nl> + select positionCaseInsensitive ( materialize ( ' abc ' ) as h , lower ( ' aBc ' as n ) ) = positionCaseInsensitive ( h , n ) ; <nl> + select positionCaseInsensitive ( materialize ( ' abc ' ) as h , lower ( ' Bc ' as n ) ) = positionCaseInsensitive ( h , n ) ; <nl> + select positionCaseInsensitive ( materialize ( ' abc ' ) as h , lower ( ' C ' as n ) ) = positionCaseInsensitive ( h , n ) ; <nl> + <nl> + select position ( materialize ( ' ' ) as h , lower ( ' ' as n ) ) = positionCaseInsensitive ( h , n ) from system . numbers limit 10 ; <nl> + select position ( materialize ( ' abc ' ) as h , lower ( ' ' as n ) ) = positionCaseInsensitive ( h , n ) from system . numbers limit 10 ; <nl> + select position ( materialize ( ' ' ) as h , lower ( ' aBc ' as n ) ) = positionCaseInsensitive ( h , n ) from system . numbers limit 10 ; <nl> + select position ( materialize ( ' abc ' ) as h , lower ( ' aBc ' as n ) ) = positionCaseInsensitive ( h , n ) from system . numbers limit 10 ; <nl> + select position ( materialize ( ' abc ' ) as h , lower ( ' Bc ' as n ) ) = positionCaseInsensitive ( h , n ) from system . numbers limit 10 ; <nl> + select position ( materialize ( ' abc ' ) as h , lower ( ' C ' as n ) ) = positionCaseInsensitive ( h , n ) from system . numbers limit 10 ; <nl> + <nl> + select position ( ' ' as h , lower ( ' ' as n ) ) = positionCaseInsensitive ( h , n ) ; <nl> + select position ( ' абв ' as h , lower ( ' ' as n ) ) = positionCaseInsensitive ( h , n ) ; <nl> + select position ( ' ' as h , lower ( ' аБв ' as n ) ) = positionCaseInsensitive ( h , n ) ; <nl> + select position ( ' абв ' as h , lower ( ' аБв ' as n ) ) = positionCaseInsensitive ( h , n ) ; <nl> + select position ( ' абв ' as h , lower ( ' Бв ' as n ) ) = positionCaseInsensitive ( h , n ) ; <nl> + select position ( ' абв ' as h , lower ( ' В ' as n ) ) = positionCaseInsensitive ( h , n ) ; <nl> + <nl> + select position ( materialize ( ' ' ) as h , lower ( ' ' as n ) ) = positionCaseInsensitive ( h , n ) ; <nl> + select position ( materialize ( ' абв ' ) as h , lower ( ' ' as n ) ) = positionCaseInsensitive ( h , n ) ; <nl> + select position ( materialize ( ' ' ) as h , lower ( ' аБв ' as n ) ) = positionCaseInsensitive ( h , n ) ; <nl> + select position ( materialize ( ' абв ' ) as h , lower ( ' аБв ' as n ) ) = positionCaseInsensitive ( h , n ) ; <nl> + select position ( materialize ( ' абв ' ) as h , lower ( ' Бв ' as n ) ) = positionCaseInsensitive ( h , n ) ; <nl> + select position ( materialize ( ' абв ' ) as h , lower ( ' В ' as n ) ) = positionCaseInsensitive ( h , n ) ; <nl> + <nl> + select position ( materialize ( ' ' ) as h , lower ( ' ' as n ) ) = positionCaseInsensitive ( h , n ) ; <nl> + select position ( materialize ( ' абв ' ) as h , lower ( ' ' as n ) ) = positionCaseInsensitive ( h , n ) ; <nl> + select position ( materialize ( ' ' ) as h , lower ( ' аБв ' as n ) ) = positionCaseInsensitive ( h , n ) ; <nl> + select position ( materialize ( ' абв ' ) as h , lower ( ' аБв ' as n ) ) = positionCaseInsensitive ( h , n ) ; <nl> + select position ( materialize ( ' абв ' ) as h , lower ( ' Бв ' as n ) ) = positionCaseInsensitive ( h , n ) ; <nl> + select position ( materialize ( ' абв ' ) as h , lower ( ' В ' as n ) ) = positionCaseInsensitive ( h , n ) ; <nl> + <nl> + select position ( materialize ( ' ' ) as h , lower ( ' ' as n ) ) = positionCaseInsensitive ( h , n ) from system . numbers limit 10 ; <nl> + select position ( materialize ( ' абв ' ) as h , lower ( ' ' as n ) ) = positionCaseInsensitive ( h , n ) from system . numbers limit 10 ; <nl> + select position ( materialize ( ' ' ) as h , lower ( ' аБв ' as n ) ) = positionCaseInsensitive ( h , n ) from system . numbers limit 10 ; <nl> + select position ( materialize ( ' абв ' ) as h , lower ( ' аБв ' as n ) ) = positionCaseInsensitive ( h , n ) from system . numbers limit 10 ; <nl> + select position ( materialize ( ' абв ' ) as h , lower ( ' Бв ' as n ) ) = positionCaseInsensitive ( h , n ) from system . numbers limit 10 ; <nl> + select position ( materialize ( ' абв ' ) as h , lower ( ' В ' as n ) ) = positionCaseInsensitive ( h , n ) from system . numbers limit 10 ; <nl> + <nl> + select positionUTF8 ( ' ' as h , lowerUTF8 ( ' ' as n ) ) = positionCaseInsensitiveUTF8 ( h , n ) ; <nl> + select positionUTF8 ( ' абв ' as h , lowerUTF8 ( ' ' as n ) ) = positionCaseInsensitiveUTF8 ( h , n ) ; <nl> + select positionUTF8 ( ' ' as h , lowerUTF8 ( ' аБв ' as n ) ) = positionCaseInsensitiveUTF8 ( h , n ) ; <nl> + select positionUTF8 ( ' абв ' as h , lowerUTF8 ( ' аБв ' as n ) ) = positionCaseInsensitiveUTF8 ( h , n ) ; <nl> + select positionUTF8 ( ' абв ' as h , lowerUTF8 ( ' Бв ' as n ) ) = positionCaseInsensitiveUTF8 ( h , n ) ; <nl> + select positionUTF8 ( ' абв ' as h , lowerUTF8 ( ' в ' as n ) ) = positionCaseInsensitiveUTF8 ( h , n ) ; <nl> + <nl> + select positionUTF8 ( materialize ( ' ' ) as h , lowerUTF8 ( ' ' as n ) ) = positionCaseInsensitiveUTF8 ( h , n ) ; <nl> + select positionUTF8 ( materialize ( ' абв ' ) as h , lowerUTF8 ( ' ' as n ) ) = positionCaseInsensitiveUTF8 ( h , n ) ; <nl> + select positionUTF8 ( materialize ( ' ' ) as h , lowerUTF8 ( ' аБв ' as n ) ) = positionCaseInsensitiveUTF8 ( h , n ) ; <nl> + select positionUTF8 ( materialize ( ' абв ' ) as h , lowerUTF8 ( ' аБв ' as n ) ) = positionCaseInsensitiveUTF8 ( h , n ) ; <nl> + select positionUTF8 ( materialize ( ' абв ' ) as h , lowerUTF8 ( ' Бв ' as n ) ) = positionCaseInsensitiveUTF8 ( h , n ) ; <nl> + select positionUTF8 ( materialize ( ' абв ' ) as h , lowerUTF8 ( ' В ' as n ) ) = positionCaseInsensitiveUTF8 ( h , n ) ; <nl> + <nl> + select positionUTF8 ( materialize ( ' ' ) as h , lowerUTF8 ( ' ' as n ) ) = positionCaseInsensitiveUTF8 ( h , n ) from system . numbers limit 10 ; <nl> + select positionUTF8 ( materialize ( ' абв ' ) as h , lowerUTF8 ( ' ' as n ) ) = positionCaseInsensitiveUTF8 ( h , n ) from system . numbers limit 10 ; <nl> + select positionUTF8 ( materialize ( ' ' ) as h , lowerUTF8 ( ' аБв ' as n ) ) = positionCaseInsensitiveUTF8 ( h , n ) from system . numbers limit 10 ; <nl> + select positionUTF8 ( materialize ( ' абв ' ) as h , lowerUTF8 ( ' аБв ' as n ) ) = positionCaseInsensitiveUTF8 ( h , n ) from system . numbers limit 10 ; <nl> + select positionUTF8 ( materialize ( ' абв ' ) as h , lowerUTF8 ( ' Бв ' as n ) ) = positionCaseInsensitiveUTF8 ( h , n ) from system . numbers limit 10 ; <nl> + select positionUTF8 ( materialize ( ' абв ' ) as h , lowerUTF8 ( ' В ' as n ) ) = positionCaseInsensitiveUTF8 ( h , n ) from system . numbers limit 10 ; <nl>
Merge
ClickHouse/ClickHouse
872f339b55708cc5d6293d7a3bc6cae2712a3b0e
2015-09-28T14:04:34Z
mmm a / HelloWorld / HelloWorldScene . cpp <nl> ppp b / HelloWorld / HelloWorldScene . cpp <nl> bool HelloWorld : : init ( ) <nl> this - > addChild ( pLabel , 1 ) ; <nl> <nl> / / add " HelloWorld " splash screen " <nl> - CCSprite * pSprite = CCSprite : : spriteWithFile ( " HelloWorld . jpg " ) ; <nl> + CCSprite * pSprite = CCSprite : : spriteWithFile ( " HelloWorld . png " ) ; <nl> <nl> / / position the sprite on the center of the screen <nl> pSprite - > setPosition ( ccp ( size . width / 2 , size . height / 2 ) ) ; <nl> mmm a / HelloWorld / ios / HelloWorld . xcodeproj / project . pbxproj . REMOVED . git - id <nl> ppp b / HelloWorld / ios / HelloWorld . xcodeproj / project . pbxproj . REMOVED . git - id <nl> @ @ - 1 + 1 @ @ <nl> - 977fc2620c9879ea835bef5413e5de4e388f4f80 <nl> \ No newline at end of file <nl> + ddf527fc54bb0586b0185c164970f7a358011815 <nl> \ No newline at end of file <nl> mmm a / tests / test . ios / test . xcodeproj / project . pbxproj . REMOVED . git - id <nl> ppp b / tests / test . ios / test . xcodeproj / project . pbxproj . REMOVED . git - id <nl> @ @ - 1 + 1 @ @ <nl> - 5325d62af0d795d9e35e024fc6033a18791e8dea <nl> \ No newline at end of file <nl> + f9f3d9b77e34101bdfe79de659bd2eb18b725119 <nl> \ No newline at end of file <nl>
[ ios ] HelloWorld . jpg - > . png , xcode project from iOS4 . 1 to 4 . 2 " Lastest iOS "
cocos2d/cocos2d-x
db1f54ee451e00e9576c1f8db7c3a7ca6d138063
2011-03-14T10:31:16Z
mmm a / tensorflow / python / keras / engine / functional . py <nl> ppp b / tensorflow / python / keras / engine / functional . py <nl> def _should_skip_first_node ( layer ) : <nl> # Networks that are constructed with an Input layer / shape start with a <nl> # pre - existing node linking their input to output . This node is excluded from <nl> # the network config . <nl> - return ( isinstance ( layer , Functional ) and <nl> - # Filter out Sequential models without an input shape . <nl> - isinstance ( layer . _self_tracked_trackables [ 0 ] , <nl> - input_layer_module . InputLayer ) ) <nl> + if layer . _self_tracked_trackables : <nl> + return ( isinstance ( layer , Functional ) and <nl> + # Filter out Sequential models without an input shape . <nl> + isinstance ( layer . _self_tracked_trackables [ 0 ] , <nl> + input_layer_module . InputLayer ) ) <nl> + else : <nl> + return isinstance ( layer , Functional ) <nl> <nl> <nl> def connect_ancillary_layers ( model , created_layers ) : <nl>
Solve IndexError : list index out of range
tensorflow/tensorflow
aa59c42debb5146da4f9192321c92fe06eaec35d
2020-12-06T12:06:29Z
mmm a / aten / src / ATen / gen . py <nl> ppp b / aten / src / ATen / gen . py <nl> def check_all_files_written ( self ) : <nl> " never written : { } " . format ( self . filenames ) ) <nl> <nl> <nl> - <nl> TEMPLATE_PATH = options . source_path + " / templates " <nl> GENERATOR_DERIVED = CodeTemplate . from_file ( <nl> TEMPLATE_PATH + " / GeneratorDerived . h " ) <nl> def declare_outputs ( ) : <nl> file_manager . will_write ( fname ) <nl> for backend , density , scalar_types in iterate_types ( ) : <nl> scalar_name = scalar_types [ 0 ] <nl> - full_backend = " Sparse " + backend if density = = " Sparse " else backend <nl> + full_backend = " Sparse " + backend if density = = " Sparse " else backend <nl> for kind in [ " Storage " , " Type " , " Tensor " ] : <nl> if kind = = ' Storage ' and density = = " Sparse " : <nl> continue <nl> def generate_outputs ( ) : <nl> for fname , env in generators . items ( ) : <nl> file_manager . write ( fname , GENERATOR_DERIVED . substitute ( env ) ) <nl> <nl> - <nl> # note : this will fill in top_env [ ' type / tensor_method_declarations / definitions ' ] <nl> # and modify the declarations to include any information that will all_backends <nl> # be used by function_wrapper . create_derived <nl> mmm a / setup . py <nl> ppp b / setup . py <nl> def build_libs ( libs ) : <nl> build_libs_cmd = [ ' bash ' , ' torch / lib / build_libs . sh ' ] <nl> my_env = os . environ . copy ( ) <nl> my_env [ " PYTORCH_PYTHON " ] = sys . executable <nl> + if WITH_NINJA : <nl> + my_env [ " CMAKE_GENERATOR " ] = ' - GNinja ' <nl> + my_env [ " CMAKE_INSTALL " ] = ' ninja install ' <nl> + else : <nl> + my_env [ ' CMAKE_GENERATOR ' ] = ' ' <nl> + my_env [ ' CMAKE_INSTALL ' ] = ' make install ' <nl> if WITH_SYSTEM_NCCL : <nl> my_env [ " NCCL_ROOT_DIR " ] = NCCL_ROOT_DIR <nl> if WITH_CUDA : <nl> mmm a / torch / lib / build_libs . sh <nl> ppp b / torch / lib / build_libs . sh <nl> function build ( ) { <nl> * ) BUILD_C_FLAGS = $ C_FLAGS " - fexceptions " ; ; <nl> esac <nl> $ { CMAKE_VERSION } . . / . . / $ 1 - DCMAKE_MODULE_PATH = " $ BASE_DIR / cmake / FindCUDA " \ <nl> + $ { CMAKE_GENERATOR } \ <nl> - DTorch_FOUND = " 1 " \ <nl> - DCMAKE_INSTALL_PREFIX = " $ INSTALL_DIR " \ <nl> - DCMAKE_C_FLAGS = " $ BUILD_C_FLAGS " \ <nl> function build ( ) { <nl> - DCMAKE_BUILD_TYPE = $ ( [ $ DEBUG ] & & echo Debug | | echo Release ) \ <nl> $ { @ : 2 } \ <nl> - DCMAKE_EXPORT_COMPILE_COMMANDS = 1 <nl> - make install - j $ ( getconf _NPROCESSORS_ONLN ) <nl> + $ { CMAKE_INSTALL } - j $ ( getconf _NPROCESSORS_ONLN ) <nl> cd . . / . . <nl> <nl> local lib_prefix = $ INSTALL_DIR / lib / lib $ 1 <nl> function build_nccl ( ) { <nl> - DCMAKE_INSTALL_PREFIX = " $ INSTALL_DIR " \ <nl> - DCMAKE_C_FLAGS = " $ C_FLAGS " \ <nl> - DCMAKE_CXX_FLAGS = " $ C_FLAGS $ CPP_FLAGS " <nl> - make install <nl> + $ { CMAKE_INSTALL } <nl> mkdir - p $ { INSTALL_DIR } / lib <nl> cp " lib / libnccl . so . 1 " " $ { INSTALL_DIR } / lib / libnccl . so . 1 " <nl> if [ ! - f " $ { INSTALL_DIR } / lib / libnccl . so " ] ; then <nl> function build_aten ( ) { <nl> mkdir - p build / aten <nl> cd build / aten <nl> $ { CMAKE_VERSION } . . / . . / . . / . . / aten \ <nl> + $ { CMAKE_GENERATOR } \ <nl> - DCMAKE_BUILD_TYPE = $ ( [ $ DEBUG ] & & echo Debug | | echo Release ) \ <nl> - DNO_CUDA = $ ( ( 1 - $ WITH_CUDA ) ) \ <nl> - DCUDNN_INCLUDE_DIR = $ CUDNN_INCLUDE_DIR \ <nl> function build_aten ( ) { <nl> - DCMAKE_INSTALL_PREFIX = " $ INSTALL_DIR " \ <nl> - DCMAKE_EXPORT_COMPILE_COMMANDS = 1 <nl> # purpusefully not passing C_FLAGS for the same reason as above <nl> - make - j $ ( getconf _NPROCESSORS_ONLN ) install <nl> + $ { CMAKE_INSTALL } - j $ ( getconf _NPROCESSORS_ONLN ) <nl> cd . . / . . <nl> } <nl> <nl>
Use ninja as the cmake backend as well .
pytorch/pytorch
9e46fca424ee3f64467eb15eb7260507f9e76061
2017-12-04T19:16:26Z
mmm a / dbms / src / DataStreams / IBlockInputStream . cpp <nl> ppp b / dbms / src / DataStreams / IBlockInputStream . cpp <nl> void IBlockInputStream : : dumpTree ( std : : ostream & ostr , size_t indent , size_t mult <nl> ostr < < String ( indent , ' ' ) < < getName ( ) ; <nl> if ( multiplier > 1 ) <nl> ostr < < " × " < < multiplier ; <nl> - ostr < < " : " < < getHeader ( ) . dumpStructure ( ) ; <nl> + / / ostr < < " : " < < getHeader ( ) . dumpStructure ( ) ; <nl> ostr < < std : : endl ; <nl> + + indent ; <nl> <nl>
Better UNION ALL : development
ClickHouse/ClickHouse
8cb640ac5469334d20265dbfed739cd0fd111193
2018-03-01T06:08:09Z
mmm a / tensorflow / core / framework / lookup_interface . cc <nl> ppp b / tensorflow / core / framework / lookup_interface . cc <nl> Status LookupInterface : : CheckFindArguments ( const Tensor & key , <nl> const Tensor & default_value ) { <nl> TF_RETURN_IF_ERROR ( CheckKeyAndValueTypes ( key , default_value ) ) ; <nl> TF_RETURN_IF_ERROR ( CheckKeyShape ( key . shape ( ) ) ) ; <nl> - if ( default_value . shape ( ) ! = value_shape ( ) ) { <nl> + TensorShape fullsize_value_shape = key . shape ( ) ; <nl> + for ( int i = 0 ; i < key_shape ( ) . dims ( ) ; + + i ) { <nl> + fullsize_value_shape . RemoveDim ( fullsize_value_shape . dims ( ) - 1 ) ; <nl> + } <nl> + fullsize_value_shape . AppendShape ( value_shape ( ) ) ; <nl> + if ( default_value . shape ( ) ! = value_shape ( ) & & <nl> + default_value . shape ( ) ! = fullsize_value_shape ) { <nl> return errors : : InvalidArgument ( <nl> - " Expected shape " , value_shape ( ) . DebugString ( ) , <nl> - " for default value , got " , default_value . shape ( ) . DebugString ( ) ) ; <nl> + " Expected shape " , value_shape ( ) . DebugString ( ) , " or " , <nl> + fullsize_value_shape . DebugString ( ) , " for default value , got " , <nl> + default_value . shape ( ) . DebugString ( ) ) ; <nl> } <nl> return Status : : OK ( ) ; <nl> } <nl> mmm a / tensorflow / core / framework / lookup_interface . h <nl> ppp b / tensorflow / core / framework / lookup_interface . h <nl> class LookupInterface : public ResourceBase { <nl> / / requirements are satisfied , otherwise it returns InvalidArgument : <nl> / / - DataType of the tensor keys equals to the table key_dtype <nl> / / - DataType of the tensor default_value equals to the table value_dtype <nl> - / / - the default_value tensor shape matches the table ' s value shape . <nl> + / / - the default_value tensor has the required shape given keys and the <nl> + / / tables ' s value shape . <nl> Status CheckFindArguments ( const Tensor & keys , const Tensor & default_value ) ; <nl> <nl> string DebugString ( ) const override { <nl> mmm a / tensorflow / core / kernels / lookup_table_op . cc <nl> ppp b / tensorflow / core / kernels / lookup_table_op . cc <nl> class MutableHashTableOfScalars final : public LookupInterface { <nl> <nl> Status Find ( OpKernelContext * ctx , const Tensor & key , Tensor * value , <nl> const Tensor & default_value ) override { <nl> - const V default_val = default_value . flat < V > ( ) ( 0 ) ; <nl> const auto key_values = key . flat < K > ( ) ; <nl> auto value_values = value - > flat < V > ( ) ; <nl> + const auto default_flat = default_value . flat < V > ( ) ; <nl> + <nl> + int64 total = value_values . size ( ) ; <nl> + int64 default_total = default_flat . size ( ) ; <nl> + bool is_full_size_default = ( total = = default_total ) ; <nl> <nl> tf_shared_lock l ( mu_ ) ; <nl> for ( int64 i = 0 ; i < key_values . size ( ) ; + + i ) { <nl> + / / is_full_size_default is true : <nl> + / / Each key has an independent default value , key_values ( i ) <nl> + / / corresponding uses default_flat ( i ) as its default value . <nl> + / / <nl> + / / is_full_size_default is false : <nl> + / / All keys will share the default_flat ( 0 ) as default value . <nl> value_values ( i ) = gtl : : FindWithDefault ( <nl> - table_ , SubtleMustCopyIfIntegral ( key_values ( i ) ) , default_val ) ; <nl> + table_ , SubtleMustCopyIfIntegral ( key_values ( i ) ) , <nl> + is_full_size_default ? default_flat ( i ) : default_flat ( 0 ) ) ; <nl> } <nl> <nl> return Status : : OK ( ) ; <nl> class MutableHashTableOfTensors final : public LookupInterface { <nl> <nl> Status Find ( OpKernelContext * ctx , const Tensor & key , Tensor * value , <nl> const Tensor & default_value ) override { <nl> - const auto default_flat = default_value . flat < V > ( ) ; <nl> + const auto default_flat = default_value . flat_inner_dims < V , 2 > ( ) ; <nl> const auto key_values = key . flat < K > ( ) ; <nl> auto value_values = value - > flat_inner_dims < V , 2 > ( ) ; <nl> int64 value_dim = value_shape_ . dim_size ( 0 ) ; <nl> <nl> + int64 total = value_values . size ( ) ; <nl> + int64 default_total = default_flat . size ( ) ; <nl> + bool is_full_size_default = ( total = = default_total ) ; <nl> + <nl> tf_shared_lock l ( mu_ ) ; <nl> for ( int64 i = 0 ; i < key_values . size ( ) ; + + i ) { <nl> ValueArray * value_vec = <nl> class MutableHashTableOfTensors final : public LookupInterface { <nl> value_values ( i , j ) = value_vec - > at ( j ) ; <nl> } <nl> } else { <nl> + / / is_full_size_default is true : <nl> + / / Each key has an independent default value , key_values ( i ) <nl> + / / corresponding uses default_flat ( i ) as its default value . <nl> + / / <nl> + / / is_full_size_default is false : <nl> + / / All keys will share the default_flat ( 0 ) as default value . <nl> for ( int64 j = 0 ; j < value_dim ; j + + ) { <nl> - value_values ( i , j ) = default_flat ( j ) ; <nl> + value_values ( i , j ) = <nl> + is_full_size_default ? default_flat ( i , j ) : default_flat ( 0 , j ) ; <nl> } <nl> } <nl> } <nl> mmm a / tensorflow / core / ops / lookup_ops . cc <nl> ppp b / tensorflow / core / ops / lookup_ops . cc <nl> REGISTER_OP ( " LookupTableFindV2 " ) <nl> ShapeHandle handle ; <nl> TF_RETURN_IF_ERROR ( c - > WithRank ( c - > input ( 0 ) , 0 , & handle ) ) ; <nl> <nl> - / / Default value must be scalar or vector . <nl> - ShapeHandle keys ; <nl> - TF_RETURN_IF_ERROR ( c - > WithRankAtMost ( c - > input ( 2 ) , 1 , & keys ) ) ; <nl> - <nl> ShapeAndType value_shape_and_type ; <nl> TF_RETURN_IF_ERROR ( ValidateTableResourceHandle ( <nl> c , <nl> mmm a / tensorflow / core / ops / lookup_ops_test . cc <nl> ppp b / tensorflow / core / ops / lookup_ops_test . cc <nl> namespace { <nl> TEST ( LookupOpsTest , LookupTableFindV2_ShapeFn ) { <nl> ShapeInferenceTestOp op ( " LookupTableFindV2 " ) ; <nl> INFER_ERROR ( " Shape must be rank 0 but is rank 1 " , op , " [ ? ] ; ? ; ? " ) ; <nl> - INFER_ERROR ( " Shape must be at most rank 1 but is rank 2 " , op , " [ ] ; ? ; [ 1 , 1 ] " ) ; <nl> TF_ASSERT_OK ( NodeDefBuilder ( " test " , " LookupTableFindV2 " ) <nl> . Input ( { " table_handle " , 0 , DT_RESOURCE } ) <nl> . Input ( { " keys " , 0 , DT_INT64 } ) <nl> mmm a / tensorflow / python / kernel_tests / lookup_ops_test . py <nl> ppp b / tensorflow / python / kernel_tests / lookup_ops_test . py <nl> def testMutableHashTableFindHighRank ( self ) : <nl> result = self . evaluate ( output ) <nl> self . assertAllEqual ( [ [ 0 , 1 ] , [ - 1 , - 1 ] ] , result ) <nl> <nl> + def testMutableHashTableFindWithInvalidShapeDefaultValue ( self ) : <nl> + default_val = [ - 1 , - 1 ] <nl> + table = lookup_ops . MutableHashTable ( dtypes . string , dtypes . int64 , <nl> + default_val ) <nl> + <nl> + input_string = constant_op . constant ( [ [ " brain " , " salad " ] , <nl> + [ " tank " , " tarkus " ] ] ) <nl> + <nl> + invalid_default_val = constant_op . constant ( <nl> + [ [ - 2 , - 3 ] , [ - 4 , - 5 ] , [ - 6 , - 7 ] , [ - 8 , - 9 ] ] , dtypes . int64 ) <nl> + <nl> + with self . assertRaisesRegex ( <nl> + ( ValueError , errors_impl . InvalidArgumentError ) , <nl> + " Expected shape \ [ 2 \ ] or \ [ 2 , 2 , 2 \ ] for default value , got \ [ 4 , 2 ] " ) : <nl> + self . evaluate ( table . lookup ( input_string , invalid_default_val ) ) <nl> + <nl> + invalid_default_val = constant_op . constant ( [ [ [ - 2 , - 3 ] , [ - 4 , - 5 ] ] ] , <nl> + dtypes . int64 ) <nl> + with self . assertRaisesRegex ( <nl> + ( ValueError , errors_impl . InvalidArgumentError ) , <nl> + " Expected shape \ [ 2 \ ] or \ [ 2 , 2 , 2 \ ] for default value , got \ [ 1 , 2 , 2 \ ] " ) : <nl> + self . evaluate ( table . lookup ( input_string , invalid_default_val ) ) <nl> + <nl> + def testMutableHashTableFindHighRankScalarWithDynamicDefaultValue ( self ) : <nl> + default_val = - 1 <nl> + keys = constant_op . constant ( [ " brain " , " salad " , " surgery " ] ) <nl> + values = constant_op . constant ( [ 0 , 1 , 2 ] , dtypes . int64 ) <nl> + table = lookup_ops . MutableHashTable ( dtypes . string , dtypes . int64 , <nl> + default_val ) <nl> + <nl> + self . evaluate ( table . insert ( keys , values ) ) <nl> + self . assertAllEqual ( 3 , self . evaluate ( table . size ( ) ) ) <nl> + <nl> + input_string = constant_op . constant ( [ [ " brain " , " salad " ] , <nl> + [ " tank " , " tarkus " ] ] ) <nl> + <nl> + dynamic_default_val = constant_op . constant ( [ [ - 2 , - 3 ] , [ - 4 , - 5 ] ] , <nl> + dtypes . int64 ) <nl> + output = table . lookup ( input_string , dynamic_default_val ) <nl> + self . assertAllEqual ( [ 2 , 2 ] , output . get_shape ( ) ) <nl> + <nl> + result = self . evaluate ( output ) <nl> + self . assertAllEqual ( [ [ 0 , 1 ] , [ - 4 , - 5 ] ] , result ) <nl> + <nl> + def testMutableHashTableFindHighRankVectorWithDynamicDefaultValue ( self ) : <nl> + default_val = [ - 1 , - 1 ] <nl> + keys = constant_op . constant ( [ " brain " , " salad " , " surgery " ] ) <nl> + values = constant_op . constant ( [ [ 0 , 1 ] , [ 2 , 3 ] , [ 4 , 5 ] ] , dtypes . int64 ) <nl> + table = lookup_ops . MutableHashTable ( dtypes . string , dtypes . int64 , <nl> + default_val ) <nl> + <nl> + self . evaluate ( table . insert ( keys , values ) ) <nl> + self . assertAllEqual ( 3 , self . evaluate ( table . size ( ) ) ) <nl> + <nl> + input_string = constant_op . constant ( [ [ " brain " , " salad " ] , <nl> + [ " tank " , " tarkus " ] ] ) <nl> + <nl> + dynamic_default_val = constant_op . constant ( <nl> + [ [ [ - 2 , - 3 ] , [ - 4 , - 5 ] ] , [ [ - 6 , - 7 ] , [ - 8 , - 9 ] ] ] , dtypes . int64 ) <nl> + output = table . lookup ( input_string , dynamic_default_val ) <nl> + self . assertAllEqual ( [ 2 , 2 , 2 ] , output . get_shape ( ) ) <nl> + <nl> + result = self . evaluate ( output ) <nl> + self . assertAllEqual ( [ [ [ 0 , 1 ] , [ 2 , 3 ] ] , [ [ - 6 , - 7 ] , [ - 8 , - 9 ] ] ] , result ) <nl> + <nl> def testMutableHashTableInsertHighRank ( self ) : <nl> default_val = - 1 <nl> keys = constant_op . constant ( [ [ " brain " , " salad " ] , [ " surgery " , " tank " ] ] ) <nl> mmm a / tensorflow / python / ops / lookup_ops . py <nl> ppp b / tensorflow / python / ops / lookup_ops . py <nl> def remove ( self , keys , name = None ) : <nl> <nl> return op <nl> <nl> - def lookup ( self , keys , name = None ) : <nl> + def lookup ( self , keys , dynamic_default_values = None , name = None ) : <nl> " " " Looks up ` keys ` in a table , outputs the corresponding values . <nl> <nl> The ` default_value ` is used for keys not present in the table . <nl> def lookup ( self , keys , name = None ) : <nl> Args : <nl> keys : Keys to look up . Can be a tensor of any shape . Must match the <nl> table ' s key_dtype . <nl> + dynamic_default_values : The values to use if a key is missing in the <nl> + table . If None ( by default ) , the ` table . default_value ` will be used . <nl> + Shape of ` dynamic_default_values ` must be same with <nl> + ` table . default_value ` or the lookup result tensor . <nl> + In the latter case , each key will have a different default value . <nl> + <nl> + For example : <nl> + <nl> + ` ` ` python <nl> + keys = [ 0 , 1 , 3 ] <nl> + dynamic_default_values = [ [ 1 , 3 , 4 ] , [ 2 , 3 , 9 ] , [ 8 , 3 , 0 ] ] <nl> + <nl> + # The key ' 0 ' will use [ 1 , 3 , 4 ] as default value . <nl> + # The key ' 1 ' will use [ 2 , 3 , 9 ] as default value . <nl> + # The key ' 3 ' will use [ 8 , 3 , 0 ] as default value . <nl> + ` ` ` <nl> + <nl> name : A name for the operation ( optional ) . <nl> <nl> Returns : <nl> def lookup ( self , keys , name = None ) : <nl> ( self . resource_handle , keys , self . _default_value ) ) : <nl> keys = ops . convert_to_tensor ( keys , dtype = self . _key_dtype , name = " keys " ) <nl> with ops . colocate_with ( self . resource_handle ) : <nl> - values = gen_lookup_ops . lookup_table_find_v2 ( self . resource_handle , keys , <nl> - self . _default_value ) <nl> + values = gen_lookup_ops . lookup_table_find_v2 ( <nl> + self . resource_handle , keys , dynamic_default_values <nl> + if dynamic_default_values is not None else self . _default_value ) <nl> return values <nl> <nl> def insert ( self , keys , values , name = None ) : <nl>
Mutablehashtable lookup support full size dynamic default values .
tensorflow/tensorflow
3b11daf77e3e8c0d06a4968e2cae0fdd8af634c7
2020-10-26T11:05:22Z
mmm a / src / library_syscall . js <nl> ppp b / src / library_syscall . js <nl> mergeInto ( LibraryManager . library , { <nl> path = SYSCALLS . calculateAt ( dirfd , path ) ; <nl> return SYSCALLS . doAccess ( path , amode ) ; <nl> } <nl> + case 308 : { / / pselect <nl> + return - ERRNO_CODES . ENOSYS ; / / unsupported feature <nl> + } <nl> case 324 : { / / fallocate <nl> var stream = getStreamFromFD ( ) , mode = get ( ) , offset = get64 ( ) , len = get64 ( ) ; <nl> assert ( mode = = = 0 ) ; <nl>
pselect syscall stub
emscripten-core/emscripten
a3621a17efe9d6976e4d29127bb62fc1c1e2744a
2015-06-01T21:15:58Z
mmm a / include / swift / SIL / SILGlobalVariable . h <nl> ppp b / include / swift / SIL / SILGlobalVariable . h <nl> class SILGlobalVariable <nl> : public llvm : : ilist_node < SILGlobalVariable > , <nl> public SILAllocated < SILGlobalVariable > <nl> { <nl> + public : <nl> + using const_iterator = SILBasicBlock : : const_iterator ; <nl> + <nl> private : <nl> friend class SILModule ; <nl> friend class SILBuilder ; <nl> class SILGlobalVariable <nl> return dyn_cast_or_null < ObjectInst > ( getStaticInitializerValue ( ) ) ! = nullptr ; <nl> } <nl> <nl> + const_iterator begin ( ) const { return StaticInitializerBlock . begin ( ) ; } <nl> + const_iterator end ( ) const { return StaticInitializerBlock . end ( ) ; } <nl> + <nl> / / / Returns true if \ p I is a valid instruction to be contained in the <nl> / / / static initializer . <nl> static bool isValidStaticInitializerInst ( const SILInstruction * I , <nl> mmm a / lib / SIL / IR / SILPrinter . cpp <nl> ppp b / lib / SIL / IR / SILPrinter . cpp <nl> void SILGlobalVariable : : print ( llvm : : raw_ostream & OS , bool Verbose ) const { <nl> printClangQualifiedNameCommentIfPresent ( OS , getClangDecl ( ) ) ; <nl> <nl> OS < < " sil_global " ; <nl> - printLinkage ( OS , getLinkage ( ) , isDefinition ( ) ) ; <nl> + / / Passing true for ' isDefinition ' lets print the ( external ) linkage if it ' s <nl> + / / not a definition . <nl> + printLinkage ( OS , getLinkage ( ) , / * isDefinition * / true ) ; <nl> <nl> if ( isSerialized ( ) ) <nl> OS < < " [ serialized ] " ; <nl> mmm a / lib / SILOptimizer / IPO / CrossModuleSerializationSetup . cpp <nl> ppp b / lib / SILOptimizer / IPO / CrossModuleSerializationSetup . cpp <nl> <nl> # include " swift / SILOptimizer / PassManager / Passes . h " <nl> # include " swift / SILOptimizer / PassManager / Transforms . h " <nl> # include " swift / SILOptimizer / Utils / InstOptUtils . h " <nl> + # include " swift / SILOptimizer / Utils / SILInliner . h " <nl> # include " llvm / Support / CommandLine . h " <nl> # include " llvm / Support / Debug . h " <nl> <nl> using namespace swift ; <nl> <nl> + / / / Functions up to this ( abstract ) size are serialized , even if they are not <nl> + / / / generic . <nl> + static llvm : : cl : : opt < int > CMOFunctionSizeLimit ( " cmo - function - size - limit " , <nl> + llvm : : cl : : init ( 20 ) ) ; <nl> + <nl> namespace { <nl> <nl> / / / Scans a whole module and marks functions and types as inlinable or usable <nl> class CrossModuleSerializationSetup { <nl> <nl> bool canSerialize ( SILFunction * F , bool lookIntoThunks ) ; <nl> <nl> + bool canSerialize ( SILInstruction * inst , bool lookIntoThunks ) ; <nl> + <nl> void setUpForSerialization ( SILFunction * F ) ; <nl> <nl> void prepareInstructionForSerialization ( SILInstruction * inst ) ; <nl> static llvm : : cl : : opt < bool > SerializeEverything ( <nl> <nl> / / / Decide whether to serialize a function . <nl> static bool shouldSerialize ( SILFunction * F ) { <nl> - / / The basic heursitic : serialize all generic functions , because it makes a <nl> - / / huge difference if generic functions can be specialized or not . <nl> - if ( ! F - > getLoweredFunctionType ( ) - > isPolymorphic ( ) & & ! SerializeEverything ) <nl> - return false ; <nl> - <nl> / / Check if we already handled this function before . <nl> if ( F - > isSerialized ( ) = = IsSerialized ) <nl> return false ; <nl> static bool shouldSerialize ( SILFunction * F ) { <nl> if ( F - > hasSemanticsAttr ( " optimize . no . crossmodule " ) ) <nl> return false ; <nl> <nl> + if ( SerializeEverything ) <nl> + return true ; <nl> + <nl> + / / The basic heursitic : serialize all generic functions , because it makes a <nl> + / / huge difference if generic functions can be specialized or not . <nl> + if ( F - > getLoweredFunctionType ( ) - > isPolymorphic ( ) ) <nl> + return true ; <nl> + <nl> + / / Also serialize " small " non - generic functions . <nl> + int size = 0 ; <nl> + for ( SILBasicBlock & block : * F ) { <nl> + for ( SILInstruction & inst : block ) { <nl> + size + = ( int ) instructionInlineCost ( inst ) ; <nl> + if ( size > = CMOFunctionSizeLimit ) <nl> + return false ; <nl> + } <nl> + } <nl> + <nl> return true ; <nl> } <nl> <nl> prepareInstructionForSerialization ( SILInstruction * inst ) { <nl> handleReferencedFunction ( callee ) ; <nl> return ; <nl> } <nl> + if ( auto * GAI = dyn_cast < GlobalAddrInst > ( inst ) ) { <nl> + GAI - > getReferencedGlobal ( ) - > setSerialized ( IsSerialized ) ; <nl> + GAI - > getReferencedGlobal ( ) - > setLinkage ( SILLinkage : : Public ) ; <nl> + return ; <nl> + } <nl> if ( auto * MI = dyn_cast < MethodInst > ( inst ) ) { <nl> handleReferencedMethod ( MI - > getMember ( ) ) ; <nl> return ; <nl> bool CrossModuleSerializationSetup : : canSerialize ( SILFunction * F , <nl> / / First step : check if serializing F is even possible . <nl> for ( SILBasicBlock & block : * F ) { <nl> for ( SILInstruction & inst : block ) { <nl> - if ( auto * FRI = dyn_cast < FunctionRefBaseInst > ( & inst ) ) { <nl> - SILFunction * callee = FRI - > getReferencedFunctionOrNull ( ) ; <nl> - if ( ! canUseFromInline ( callee , lookIntoThunks ) ) <nl> - return false ; <nl> - } else if ( auto * KPI = dyn_cast < KeyPathInst > ( & inst ) ) { <nl> - bool canUse = true ; <nl> - KPI - > getPattern ( ) - > visitReferencedFunctionsAndMethods ( <nl> - [ & ] ( SILFunction * func ) { <nl> - if ( ! canUseFromInline ( func , lookIntoThunks ) ) <nl> - canUse = false ; <nl> - } , <nl> - [ ] ( SILDeclRef method ) { } ) ; <nl> - if ( ! canUse ) <nl> - return false ; <nl> - } <nl> + if ( ! canSerialize ( & inst , lookIntoThunks ) ) <nl> + return false ; <nl> } <nl> } <nl> return true ; <nl> } <nl> <nl> + bool CrossModuleSerializationSetup : : canSerialize ( SILInstruction * inst , <nl> + bool lookIntoThunks ) { <nl> + if ( auto * FRI = dyn_cast < FunctionRefBaseInst > ( inst ) ) { <nl> + SILFunction * callee = FRI - > getReferencedFunctionOrNull ( ) ; <nl> + return canUseFromInline ( callee , lookIntoThunks ) ; <nl> + } <nl> + if ( auto * KPI = dyn_cast < KeyPathInst > ( inst ) ) { <nl> + bool canUse = true ; <nl> + KPI - > getPattern ( ) - > visitReferencedFunctionsAndMethods ( <nl> + [ & ] ( SILFunction * func ) { <nl> + if ( ! canUseFromInline ( func , lookIntoThunks ) ) <nl> + canUse = false ; <nl> + } , <nl> + [ & ] ( SILDeclRef method ) { <nl> + if ( method . isForeign ) <nl> + canUse = false ; <nl> + } ) ; <nl> + return canUse ; <nl> + } <nl> + if ( auto * GAI = dyn_cast < GlobalAddrInst > ( inst ) ) { <nl> + return ! GAI - > getReferencedGlobal ( ) - > getName ( ) . startswith ( " globalinit_ " ) ; <nl> + } <nl> + if ( auto * MI = dyn_cast < MethodInst > ( inst ) ) { <nl> + return ! MI - > getMember ( ) . isForeign ; <nl> + } <nl> + <nl> + return true ; <nl> + } <nl> + <nl> / / / Returns true if the function \ p func can be used from a serialized function . <nl> / / / <nl> / / / If \ p lookIntoThunks is true , serializable shared thunks are also accepted . <nl> void CrossModuleSerializationSetup : : setUpForSerialization ( SILFunction * F ) { <nl> } <nl> F - > setSerialized ( IsSerialized ) ; <nl> <nl> - / / As a code size optimization , make serialized functions <nl> - / / @ alwaysEmitIntoClient . <nl> - / / Also , for shared thunks it ' s required to make them @ alwaysEmitIntoClient . <nl> - / / SILLinkage : : Public would not work for shared functions , because it could <nl> - / / result in duplicate - symbol linker errors . <nl> - F - > setLinkage ( SILLinkage : : PublicNonABI ) ; <nl> + if ( F - > getLoweredFunctionType ( ) - > isPolymorphic ( ) | | <nl> + F - > getLinkage ( ) ! = SILLinkage : : Public ) { <nl> + / / As a code size optimization , make serialized functions <nl> + / / @ alwaysEmitIntoClient . <nl> + / / Also , for shared thunks it ' s required to make them @ alwaysEmitIntoClient . <nl> + / / SILLinkage : : Public would not work for shared functions , because it could <nl> + / / result in duplicate - symbol linker errors . <nl> + F - > setLinkage ( SILLinkage : : PublicNonABI ) ; <nl> + } else { <nl> + F - > setLinkage ( SILLinkage : : Public ) ; <nl> + } <nl> } <nl> <nl> / / / Select functions in the module which should be serialized . <nl> mmm a / lib / SILOptimizer / PassManager / PassPipeline . cpp <nl> ppp b / lib / SILOptimizer / PassManager / PassPipeline . cpp <nl> static void addPerfEarlyModulePassPipeline ( SILPassPipelinePlan & P ) { <nl> / / is linked in from the stdlib . <nl> P . addTempRValueOpt ( ) ; <nl> <nl> + / / Needed to serialize static initializers of globals for cross - module <nl> + / / optimization . <nl> + P . addGlobalOpt ( ) ; <nl> + <nl> / / Add the outliner pass ( Osize ) . <nl> P . addOutliner ( ) ; <nl> <nl> mmm a / lib / SILOptimizer / SILCombiner / SILCombinerMiscVisitors . cpp <nl> ppp b / lib / SILOptimizer / SILCombiner / SILCombinerMiscVisitors . cpp <nl> <nl> # include " swift / SILOptimizer / Utils / CFGOptUtils . h " <nl> # include " swift / SILOptimizer / Utils / Devirtualize . h " <nl> # include " swift / SILOptimizer / Utils / InstOptUtils . h " <nl> + # include " swift / SILOptimizer / Utils / BasicBlockOptUtils . h " <nl> # include " llvm / ADT / DenseMap . h " <nl> # include " llvm / ADT / SmallPtrSet . h " <nl> # include " llvm / ADT / SmallVector . h " <nl> static bool isZeroLoadFromEmptyCollection ( LoadInst * LI ) { <nl> } <nl> } <nl> <nl> + static SingleValueInstruction * getValueFromStaticLet ( SILValue v ) { <nl> + if ( auto * globalAddr = dyn_cast < GlobalAddrInst > ( v ) ) { <nl> + SILGlobalVariable * global = globalAddr - > getReferencedGlobal ( ) ; <nl> + if ( ! global - > isLet ( ) ) <nl> + return nullptr ; <nl> + return dyn_cast_or_null < SingleValueInstruction > ( <nl> + global - > getStaticInitializerValue ( ) ) ; <nl> + } <nl> + if ( auto * seai = dyn_cast < StructElementAddrInst > ( v ) ) { <nl> + auto * structVal = getValueFromStaticLet ( seai - > getOperand ( ) ) ; <nl> + if ( ! structVal ) <nl> + return nullptr ; <nl> + return cast < SingleValueInstruction > ( <nl> + cast < StructInst > ( structVal ) - > getOperandForField ( seai - > getField ( ) ) - > get ( ) ) ; <nl> + } <nl> + if ( auto * teai = dyn_cast < TupleElementAddrInst > ( v ) ) { <nl> + auto * tupleVal = getValueFromStaticLet ( teai - > getOperand ( ) ) ; <nl> + if ( ! tupleVal ) <nl> + return nullptr ; <nl> + return cast < SingleValueInstruction > ( <nl> + cast < TupleInst > ( tupleVal ) - > getElement ( teai - > getFieldNo ( ) ) ) ; <nl> + } <nl> + return nullptr ; <nl> + } <nl> + <nl> SILInstruction * SILCombiner : : visitLoadInst ( LoadInst * LI ) { <nl> if ( LI - > getFunction ( ) - > hasOwnership ( ) ) <nl> return nullptr ; <nl> SILInstruction * SILCombiner : : visitLoadInst ( LoadInst * LI ) { <nl> if ( isZeroLoadFromEmptyCollection ( LI ) ) <nl> return Builder . createIntegerLiteral ( LI - > getLoc ( ) , LI - > getType ( ) , 0 ) ; <nl> <nl> + / / Propagate a value from a static " let " global variable . <nl> + / / This optimization is also done by GlobalOpt , but not with de - serialized <nl> + / / globals , which can occur with cross - module optimization . <nl> + if ( SingleValueInstruction * initVal = getValueFromStaticLet ( LI - > getOperand ( ) ) ) { <nl> + StaticInitCloner cloner ( LI ) ; <nl> + cloner . add ( initVal ) ; <nl> + return cloner . clone ( initVal ) ; <nl> + } <nl> + <nl> return nullptr ; <nl> } <nl> <nl> mmm a / lib / Serialization / DeserializeSIL . cpp <nl> ppp b / lib / Serialization / DeserializeSIL . cpp <nl> SILDeserializer : : readSILFunctionChecked ( DeclID FID , SILFunction * existingFn , <nl> / / occurred and this is a declaration . Work around that for now . <nl> if ( ! CurrentBB ) <nl> return fn ; <nl> + Builder . setInsertionPoint ( CurrentBB ) ; <nl> <nl> / / Handle a SILInstruction record . <nl> - if ( readSILInstruction ( fn , CurrentBB , Builder , kind , scratch ) ) { <nl> + if ( readSILInstruction ( fn , Builder , kind , scratch ) ) { <nl> LLVM_DEBUG ( llvm : : dbgs ( ) < < " readSILInstruction returns error . \ n " ) ; <nl> MF - > fatal ( ) ; <nl> } <nl> SILDeserializer : : readKeyPathComponent ( ArrayRef < uint64_t > ListOfValues , <nl> llvm_unreachable ( " invalid key path component kind encoding " ) ; <nl> } <nl> <nl> - bool SILDeserializer : : readSILInstruction ( SILFunction * Fn , SILBasicBlock * BB , <nl> + bool SILDeserializer : : readSILInstruction ( SILFunction * Fn , <nl> SILBuilder & Builder , <nl> unsigned RecordKind , <nl> SmallVectorImpl < uint64_t > & scratch ) { <nl> - / / Return error if Basic Block is null . <nl> - if ( ! BB ) <nl> - return true ; <nl> - <nl> - Builder . setInsertionPoint ( BB ) ; <nl> - Builder . setCurrentDebugScope ( Fn - > getDebugScope ( ) ) ; <nl> + if ( Fn ) <nl> + Builder . setCurrentDebugScope ( Fn - > getDebugScope ( ) ) ; <nl> unsigned RawOpCode = 0 , TyCategory = 0 , TyCategory2 = 0 , TyCategory3 = 0 , <nl> Attr = 0 , Attr2 = 0 , Attr3 = 0 , Attr4 = 0 , NumSubs = 0 , <nl> NumConformances = 0 , IsNonThrowingApply = 0 ; <nl> bool SILDeserializer : : readSILInstruction ( SILFunction * Fn , SILBasicBlock * BB , <nl> break ; <nl> } <nl> case SILInstructionKind : : ObjectInst : { <nl> - llvm_unreachable ( " Serialization of global initializers not supported " ) ; <nl> + assert ( RecordKind = = SIL_ONE_TYPE_VALUES & & <nl> + " Layout should be OneTypeValues . " ) ; <nl> + unsigned NumVals = ListOfValues . size ( ) ; <nl> + assert ( NumVals > = 1 & & " Not enough values " ) ; <nl> + unsigned numBaseElements = ListOfValues [ 0 ] ; <nl> + SILType ClassTy = <nl> + getSILType ( MF - > getType ( TyID ) , ( SILValueCategory ) TyCategory , Fn ) ; <nl> + SmallVector < SILValue , 4 > elements ; <nl> + for ( unsigned i = 1 ; i < NumVals ; i + = 2 ) { <nl> + SILType elementType = getSILType ( MF - > getType ( ListOfValues [ i + 1 ] ) , <nl> + SILValueCategory : : Object , Fn ) ; <nl> + SILValue elementVal = getLocalValue ( ListOfValues [ i ] , elementType ) ; <nl> + elements . push_back ( elementVal ) ; <nl> + } <nl> + ResultVal = Builder . createObject ( Loc , ClassTy , elements , numBaseElements ) ; <nl> + break ; <nl> } <nl> case SILInstructionKind : : BranchInst : { <nl> SmallVector < SILValue , 4 > Args ; <nl> SILGlobalVariable * SILDeserializer : : readGlobalVar ( StringRef Name ) { <nl> globalVarOrOffset = v ; <nl> v - > setDeclaration ( IsDeclaration ) ; <nl> <nl> - if ( Callback ) Callback - > didDeserialize ( MF - > getAssociatedModule ( ) , v ) ; <nl> + if ( Callback ) <nl> + Callback - > didDeserialize ( MF - > getAssociatedModule ( ) , v ) ; <nl> + <nl> + scratch . clear ( ) ; <nl> + maybeEntry = SILCursor . advance ( AF_DontPopBlockAtEnd ) ; <nl> + if ( ! maybeEntry ) <nl> + MF - > fatal ( maybeEntry . takeError ( ) ) ; <nl> + entry = maybeEntry . get ( ) ; <nl> + if ( entry . Kind = = llvm : : BitstreamEntry : : EndBlock ) <nl> + return v ; <nl> + <nl> + maybeKind = SILCursor . readRecord ( entry . ID , scratch , & blobData ) ; <nl> + if ( ! maybeKind ) <nl> + MF - > fatal ( maybeKind . takeError ( ) ) ; <nl> + kind = maybeKind . get ( ) ; <nl> + <nl> + SILBuilder Builder ( v ) ; <nl> + <nl> + llvm : : DenseMap < uint32_t , ValueBase * > SavedLocalValues ; <nl> + llvm : : DenseMap < uint32_t , ValueBase * > SavedForwardLocalValues ; <nl> + serialization : : ValueID SavedLastValueID = 1 ; <nl> + <nl> + SavedLocalValues . swap ( LocalValues ) ; <nl> + SavedForwardLocalValues . swap ( ForwardLocalValues ) ; <nl> + std : : swap ( SavedLastValueID , LastValueID ) ; <nl> + <nl> + while ( kind ! = SIL_FUNCTION & & kind ! = SIL_VTABLE & & kind ! = SIL_GLOBALVAR & & <nl> + kind ! = SIL_WITNESS_TABLE & & kind ! = SIL_DIFFERENTIABILITY_WITNESS ) { <nl> + if ( readSILInstruction ( nullptr , Builder , kind , scratch ) ) { <nl> + LLVM_DEBUG ( llvm : : dbgs ( ) < < " readSILInstruction returns error . \ n " ) ; <nl> + MF - > fatal ( ) ; <nl> + } <nl> + <nl> + / / Fetch the next record . <nl> + scratch . clear ( ) ; <nl> + llvm : : Expected < llvm : : BitstreamEntry > maybeEntry = <nl> + SILCursor . advance ( AF_DontPopBlockAtEnd ) ; <nl> + if ( ! maybeEntry ) <nl> + MF - > fatal ( maybeEntry . takeError ( ) ) ; <nl> + llvm : : BitstreamEntry entry = maybeEntry . get ( ) ; <nl> + <nl> + / / EndBlock means the end of this SILFunction . <nl> + if ( entry . Kind = = llvm : : BitstreamEntry : : EndBlock ) <nl> + break ; <nl> + maybeKind = SILCursor . readRecord ( entry . ID , scratch ) ; <nl> + if ( ! maybeKind ) <nl> + MF - > fatal ( maybeKind . takeError ( ) ) ; <nl> + kind = maybeKind . get ( ) ; <nl> + } <nl> + <nl> + SavedLocalValues . swap ( LocalValues ) ; <nl> + SavedForwardLocalValues . swap ( ForwardLocalValues ) ; <nl> + std : : swap ( SavedLastValueID , LastValueID ) ; <nl> + <nl> return v ; <nl> } <nl> <nl> mmm a / lib / Serialization / DeserializeSIL . h <nl> ppp b / lib / Serialization / DeserializeSIL . h <nl> namespace swift { <nl> SILBasicBlock * readSILBasicBlock ( SILFunction * Fn , <nl> SILBasicBlock * Prev , <nl> SmallVectorImpl < uint64_t > & scratch ) ; <nl> - / / / Read a SIL instruction within a given SIL basic block . <nl> - bool readSILInstruction ( SILFunction * Fn , SILBasicBlock * BB , <nl> + / / / Read a SIL instruction . <nl> + bool readSILInstruction ( SILFunction * Fn , <nl> SILBuilder & Builder , <nl> unsigned RecordKind , <nl> SmallVectorImpl < uint64_t > & scratch ) ; <nl> mmm a / lib / Serialization / ModuleFormat . h <nl> ppp b / lib / Serialization / ModuleFormat . h <nl> const uint16_t SWIFTMODULE_VERSION_MAJOR = 0 ; <nl> / / / describe what change you made . The content of this comment isn ' t important ; <nl> / / / it just ensures a conflict if two people change the module format . <nl> / / / Don ' t worry about adhering to the 80 - column limit for this line . <nl> - const uint16_t SWIFTMODULE_VERSION_MINOR = 560 ; / / SILVTable flag for non - overridden entries <nl> + const uint16_t SWIFTMODULE_VERSION_MINOR = 561 ; / / Initializers of globals . <nl> <nl> / / / A standard hash seed used for all string hashes in a serialized module . <nl> / / / <nl> mmm a / lib / Serialization / SerializeSIL . cpp <nl> ppp b / lib / Serialization / SerializeSIL . cpp <nl> void SILSerializer : : writeSILInstruction ( const SILInstruction & SI ) { <nl> PrettyStackTraceSILNode stackTrace ( " Serializing " , & SI ) ; <nl> <nl> switch ( SI . getKind ( ) ) { <nl> - case SILInstructionKind : : ObjectInst : <nl> - llvm_unreachable ( " static initializers of sil_global are not serialized " ) ; <nl> + case SILInstructionKind : : ObjectInst : { <nl> + const ObjectInst * OI = cast < ObjectInst > ( & SI ) ; <nl> + unsigned abbrCode = SILAbbrCodes [ SILOneTypeValuesLayout : : Code ] ; <nl> + SmallVector < ValueID , 4 > Args ; <nl> + Args . push_back ( ( unsigned ) OI - > getBaseElements ( ) . size ( ) ) ; <nl> + for ( const Operand & op : OI - > getAllOperands ( ) ) { <nl> + SILValue OpVal = op . get ( ) ; <nl> + Args . push_back ( addValueRef ( OpVal ) ) ; <nl> + SILType OpType = OpVal - > getType ( ) ; <nl> + assert ( OpType . isObject ( ) ) ; <nl> + Args . push_back ( S . addTypeRef ( OpType . getASTType ( ) ) ) ; <nl> + } <nl> + SILOneTypeValuesLayout : : emitRecord ( Out , ScratchRecord , abbrCode , <nl> + ( unsigned ) SI . getKind ( ) , <nl> + S . addTypeRef ( <nl> + OI - > getType ( ) . getASTType ( ) ) , <nl> + ( unsigned ) OI - > getType ( ) . getCategory ( ) , <nl> + Args ) ; <nl> + break ; <nl> + } <nl> <nl> case SILInstructionKind : : DebugValueInst : <nl> case SILInstructionKind : : DebugValueAddrInst : <nl> void SILSerializer : : writeSILGlobalVar ( const SILGlobalVariable & g ) { <nl> ( unsigned ) ! g . isDefinition ( ) , <nl> ( unsigned ) g . isLet ( ) , <nl> TyID , dID ) ; <nl> + <nl> + ValueIDs . clear ( ) ; <nl> + InstID = 0 ; <nl> + unsigned ValueID = 2 ; <nl> + for ( const SILInstruction & initInst : g ) { <nl> + for ( auto result : initInst . getResults ( ) ) { <nl> + ValueIDs [ result ] = ValueID + + ; <nl> + } <nl> + } <nl> + <nl> + for ( const SILInstruction & initInst : g ) { <nl> + writeSILInstruction ( initInst ) ; <nl> + } <nl> } <nl> <nl> void SILSerializer : : writeSILVTable ( const SILVTable & vt ) { <nl> mmm a / test / Interop / Cxx / extern - var / extern - var - silgen . swift <nl> ppp b / test / Interop / Cxx / extern - var / extern - var - silgen . swift <nl> func getCounter ( ) - > CInt { <nl> } <nl> <nl> / / CHECK : / / clang name : counter <nl> - / / CHECK : sil_global @ counter : $ Int32 <nl> + / / CHECK : sil_global public_external @ counter : $ Int32 <nl> / / CHECK : / / clang name : Namespaced : : counter <nl> - / / CHECK : sil_global @ { { _ZN10Namespaced7counterE | \ ? counter @ Namespaced @ @ 3HA } } : $ Int32 <nl> + / / CHECK : sil_global public_external @ { { _ZN10Namespaced7counterE | \ ? counter @ Namespaced @ @ 3HA } } : $ Int32 <nl> <nl> / / CHECK : sil hidden @ $ s4main10getCounters5Int32VyF : $ @ convention ( thin ) ( ) - > Int32 <nl> / / CHECK : [ [ COUNTER : % . * ] ] = global_addr @ counter : $ * Int32 <nl> mmm a / test / Interop / Cxx / static / inline - static - member - var - silgen . swift <nl> ppp b / test / Interop / Cxx / static / inline - static - member - var - silgen . swift <nl> func readStaticMember ( ) - > CInt { <nl> } <nl> <nl> / / CHECK : / / clang name : WithInlineStaticMember : : staticMember <nl> - / / CHECK : sil_global @ { { _ZN22WithInlineStaticMember12staticMemberE | \ ? staticMember @ WithInlineStaticMember @ @ 2HA } } : $ Int32 <nl> + / / CHECK : sil_global public_external @ { { _ZN22WithInlineStaticMember12staticMemberE | \ ? staticMember @ WithInlineStaticMember @ @ 2HA } } : $ Int32 <nl> <nl> / / CHECK : sil hidden @ $ s4main16readStaticMembers5Int32VyF : $ @ convention ( thin ) ( ) - > Int32 <nl> / / CHECK : [ [ ADDR : % . * ] ] = global_addr @ { { _ZN22WithInlineStaticMember12staticMemberE | \ ? staticMember @ WithInlineStaticMember @ @ 2HA } } : $ * Int32 <nl> mmm a / test / Interop / Cxx / static / static - member - var - silgen . swift <nl> ppp b / test / Interop / Cxx / static / static - member - var - silgen . swift <nl> <nl> / / RUN : % target - swift - emit - sil - I % S / Inputs - enable - cxx - interop % s | % FileCheck % s <nl> <nl> / / CHECK : / / clang name : WithStaticMember : : staticMember <nl> - / / CHECK : sil_global @ { { _ZN16WithStaticMember12staticMemberE | \ ? staticMember @ WithStaticMember @ @ 2HA } } : $ Int32 <nl> + / / CHECK : sil_global public_external @ { { _ZN16WithStaticMember12staticMemberE | \ ? staticMember @ WithStaticMember @ @ 2HA } } : $ Int32 <nl> / / CHECK : / / clang name : WithIncompleteStaticMember : : selfMember <nl> - / / CHECK : sil_global @ { { _ZN26WithIncompleteStaticMember10selfMemberE | \ ? selfMember @ WithIncompleteStaticMember @ @ 2V1 @ A } } : $ WithIncompleteStaticMember <nl> + / / CHECK : sil_global public_external @ { { _ZN26WithIncompleteStaticMember10selfMemberE | \ ? selfMember @ WithIncompleteStaticMember @ @ 2V1 @ A } } : $ WithIncompleteStaticMember <nl> / / CHECK : / / clang name : WithConstStaticMember : : defined <nl> - / / CHECK : sil_global [ let ] @ { { _ZN21WithConstStaticMember7definedE | \ ? defined @ WithConstStaticMember @ @ 2HB } } : $ Int32 <nl> + / / CHECK : sil_global public_external [ let ] @ { { _ZN21WithConstStaticMember7definedE | \ ? defined @ WithConstStaticMember @ @ 2HB } } : $ Int32 <nl> / / CHECK : / / clang name : WithConstStaticMember : : definedOutOfLine <nl> - / / CHECK : sil_global [ let ] @ { { _ZN21WithConstStaticMember16definedOutOfLineE | \ ? definedOutOfLine @ WithConstStaticMember @ @ 2HB } } : $ Int32 <nl> + / / CHECK : sil_global public_external [ let ] @ { { _ZN21WithConstStaticMember16definedOutOfLineE | \ ? definedOutOfLine @ WithConstStaticMember @ @ 2HB } } : $ Int32 <nl> / / CHECK : / / clang name : WithConstexprStaticMember : : definedInline <nl> - / / CHECK : sil_global [ let ] @ { { _ZN25WithConstexprStaticMember13definedInlineE | \ ? definedInline @ WithConstexprStaticMember @ @ 2HB } } : $ Int32 <nl> + / / CHECK : sil_global public_external [ let ] @ { { _ZN25WithConstexprStaticMember13definedInlineE | \ ? definedInline @ WithConstexprStaticMember @ @ 2HB } } : $ Int32 <nl> <nl> import StaticMemberVar <nl> <nl> mmm a / test / Interop / Cxx / static / static - var - silgen . swift <nl> ppp b / test / Interop / Cxx / static / static - var - silgen . swift <nl> func initStaticVars ( ) - > CInt { <nl> } <nl> <nl> / / CHECK : / / clang name : staticVar <nl> - / / CHECK : sil_global @ staticVar : $ Int32 <nl> + / / CHECK : sil_global public_external @ staticVar : $ Int32 <nl> / / CHECK : / / clang name : staticVarInit <nl> - / / CHECK : sil_global @ staticVarInit : $ Int32 <nl> + / / CHECK : sil_global public_external @ staticVarInit : $ Int32 <nl> / / CHECK : / / clang name : staticVarInlineInit <nl> - / / CHECK : sil_global @ staticVarInlineInit : $ Int32 <nl> + / / CHECK : sil_global public_external @ staticVarInlineInit : $ Int32 <nl> / / CHECK : / / clang name : staticConst <nl> - / / CHECK : sil_global [ let ] @ staticConst : $ Int32 <nl> + / / CHECK : sil_global public_external [ let ] @ staticConst : $ Int32 <nl> / / CHECK : / / clang name : staticConstInit <nl> - / / CHECK : sil_global [ let ] @ staticConstInit : $ Int32 <nl> + / / CHECK : sil_global public_external [ let ] @ staticConstInit : $ Int32 <nl> / / CHECK : / / clang name : staticConstInlineInit <nl> - / / CHECK : sil_global [ let ] @ staticConstInlineInit : $ Int32 <nl> + / / CHECK : sil_global public_external [ let ] @ staticConstInlineInit : $ Int32 <nl> / / CHECK : / / clang name : staticConstexpr <nl> - / / CHECK : sil_global [ let ] @ staticConstexpr : $ Int32 <nl> + / / CHECK : sil_global public_external [ let ] @ staticConstexpr : $ Int32 <nl> / / CHECK : / / clang name : staticNonTrivial <nl> - / / CHECK : sil_global @ staticNonTrivial : $ NonTrivial <nl> + / / CHECK : sil_global public_external @ staticNonTrivial : $ NonTrivial <nl> / / CHECK : / / clang name : staticConstNonTrivial <nl> - / / CHECK : sil_global [ let ] @ staticConstNonTrivial : $ NonTrivial <nl> + / / CHECK : sil_global public_external [ let ] @ staticConstNonTrivial : $ NonTrivial <nl> / / CHECK : / / clang name : staticConstexprNonTrivial <nl> - / / CHECK : sil_global [ let ] @ staticConstexprNonTrivial : $ NonTrivial <nl> + / / CHECK : sil_global public_external [ let ] @ staticConstexprNonTrivial : $ NonTrivial <nl> <nl> func readStaticVar ( ) - > CInt { <nl> return staticVar <nl> mmm a / test / SIL / Serialization / globals . sil <nl> ppp b / test / SIL / Serialization / globals . sil <nl> <nl> sil_stage canonical <nl> <nl> import Swift <nl> + import Builtin <nl> <nl> / / CHECK - NOT : sil_global hidden @ hidden_global_unused <nl> sil_global hidden @ hidden_global_unused : $ Int <nl> sil_global @ public_global_unused : $ Int <nl> / / CHECK : sil_global @ public_global_used <nl> sil_global @ public_global_used : $ Int <nl> <nl> + / / CHECK : sil_global [ serialized ] @ initialized_global_object : $ Int64 = { <nl> + / / CHECK - NEXT : % 0 = integer_literal $ Builtin . Int64 , 27 <nl> + / / CHECK - NEXT : % 1 = integer_literal $ Builtin . Int64 , 28 <nl> + / / CHECK - NEXT : % initval = object $ GlobalObject ( % 0 : $ Builtin . Int64 , [ tail_elems ] % 1 : $ Builtin . Int64 , % 1 : $ Builtin . Int64 ) <nl> + / / CHECK - NEXT : } <nl> + sil_global [ serialized ] @ initialized_global_object : $ Int64 = { <nl> + % 0 = integer_literal $ Builtin . Int64 , 27 <nl> + % 1 = integer_literal $ Builtin . Int64 , 28 <nl> + % initval = object $ GlobalObject ( % 0 : $ Builtin . Int64 , [ tail_elems ] % 1 : $ Builtin . Int64 , % 1 : $ Builtin . Int64 ) <nl> + } <nl> + <nl> / / CHECK : sil_global [ serialized ] @ serialized_global <nl> sil_global [ serialized ] @ serialized_global : $ Int <nl> <nl> bb0 : <nl> % 2 = tuple ( ) <nl> return % 2 : $ ( ) <nl> } <nl> + <nl> + / / CHECK : sil_global [ serialized ] @ initialized_global_int : $ Int64 = { <nl> + / / CHECK - NEXT : % 0 = integer_literal $ Builtin . Int64 , 27 <nl> + / / CHECK - NEXT : % initval = struct $ Int64 ( % 0 : $ Builtin . Int64 ) <nl> + / / CHECK - NEXT : } <nl> + sil_global [ serialized ] @ initialized_global_int : $ Int64 = { <nl> + % 0 = integer_literal $ Builtin . Int64 , 27 <nl> + % initval = struct $ Int64 ( % 0 : $ Builtin . Int64 ) <nl> + } <nl> + <nl> + class GlobalObject { <nl> + @ _hasStorage let e : Builtin . Int64 <nl> + <nl> + } <nl> + <nl> + <nl> mmm a / test / SILGen / dso_handle . swift <nl> ppp b / test / SILGen / dso_handle . swift <nl> <nl> / / RUN : % target - swift - emit - silgen - Xllvm - sil - full - demangle % s | % FileCheck % s <nl> <nl> - / / CHECK : sil_global [ [ DSO : @ __dso_handle | @ __ImageBase ] ] : $ Builtin . RawPointer <nl> + / / CHECK : sil_global public_external [ [ DSO : @ __dso_handle | @ __ImageBase ] ] : $ Builtin . RawPointer <nl> <nl> / / CHECK - LABEL : sil [ ossa ] @ main : $ @ convention ( c ) <nl> / / CHECK : bb0 <nl> mmm a / test / SILOptimizer / Inputs / cross - module . swift <nl> ppp b / test / SILOptimizer / Inputs / cross - module . swift <nl> extension Int : PrivateProtocol { <nl> } <nl> <nl> @ inline ( never ) <nl> + @ _semantics ( " optimize . no . crossmodule " ) <nl> private func printFooExistential ( _ p : PrivateProtocol ) { <nl> print ( p . foo ( ) ) <nl> } <nl> public func callUnrelated < T > ( _ t : T ) - > T { <nl> return t <nl> } <nl> <nl> + public let globalLet = 529387 <nl> mmm a / test / SILOptimizer / cross - module - optimization . swift <nl> ppp b / test / SILOptimizer / cross - module - optimization . swift <nl> <nl> <nl> / / Second test : check if CMO really imports the SIL of functions in other modules . <nl> <nl> - / / RUN : % target - build - swift - O - wmo - module - name = Main - I % t % s - Xllvm - sil - disable - pass = FunctionSignatureOpts - emit - sil | % FileCheck % s - check - prefix = CHECK - SIL <nl> + / / RUN : % target - build - swift - O - wmo - module - name = Main - I % t % s - Xllvm - sil - disable - pass = FunctionSignatureOpts - emit - sil - o % t / out . sil <nl> + / / RUN : % FileCheck % s - check - prefix = CHECK - SIL < % t / out . sil <nl> + / / RUN : % FileCheck % s - check - prefix = CHECK - SIL2 < % t / out . sil <nl> <nl> import Test <nl> <nl> func testClass ( ) { <nl> print ( createClass_gen ( 0 ) ) <nl> } <nl> <nl> + / / CHECK - SIL2 - LABEL : sil hidden [ noinline ] @ $ s4Main9testErroryyF <nl> + @ inline ( never ) <nl> func testError ( ) { <nl> / / CHECK - OUTPUT : PrivateError ( ) <nl> - / / CHECK - SIL - DAG : sil @ $ s4Test12PrivateError33_ { { . * } } : $ @ convention ( method ) ( @ thin PrivateError . Type ) - > PrivateError { { $ } } <nl> + / / CHECK - SIL2 : struct $ PrivateError ( ) <nl> + / / CHECK - SIL2 : alloc_existential_box $ Error , $ PrivateError <nl> print ( returnPrivateError ( 27 ) ) <nl> / / CHECK - OUTPUT : InternalError ( ) <nl> - / / CHECK - SIL - DAG : sil @ $ s4Test13InternalErrorVACycfC : $ @ convention ( method ) ( @ thin InternalError . Type ) - > InternalError { { $ } } <nl> + / / CHECK - SIL2 : struct $ InternalError ( ) <nl> + / / CHECK - SIL2 : alloc_existential_box $ Error , $ InternalError <nl> print ( returnInternalError ( 27 ) ) <nl> + / / CHECK - SIL2 : } / / end sil function ' $ s4Main9testErroryyF ' <nl> } <nl> <nl> class DerivedFromOpen < T > : OpenClass < T > { } <nl> func testMisc ( ) { <nl> print ( classWithPublicProperty ( 33 ) ) <nl> } <nl> <nl> + / / CHECK - SIL2 - LABEL : sil hidden [ noinline ] @ $ s4Main10testGlobalyyF <nl> + @ inline ( never ) <nl> + func testGlobal ( ) { <nl> + / / CHECK - OUTPUT : 529387 <nl> + / / CHECK - SIL2 : integer_literal $ Builtin . Int { { [ 0 - 9 ] + } } , 529387 <nl> + print ( globalLet ) <nl> + / / CHECK - SIL2 : } / / end sil function ' $ s4Main10testGlobalyyF ' <nl> + } <nl> + <nl> testNestedTypes ( ) <nl> testClass ( ) <nl> testError ( ) <nl> testSubModule ( ) <nl> testClosures ( ) <nl> testKeypath ( ) <nl> testMisc ( ) <nl> + testGlobal ( ) <nl> + <nl> mmm a / test / SILOptimizer / sil_combine . sil <nl> ppp b / test / SILOptimizer / sil_combine . sil <nl> bb0 ( % 0 : $ @ thick SpecialEnum . Type ) : <nl> % 4 = struct $ Bool ( % 3 : $ Builtin . Int1 ) <nl> return % 4 : $ Bool <nl> } <nl> + <nl> + struct IntTuple { <nl> + @ _hasStorage var t : ( Int64 , Int64 ) <nl> + } <nl> + <nl> + sil_global [ let ] @ initialized_let_global : $ Int64 = { <nl> + % 0 = integer_literal $ Builtin . Int64 , 27 <nl> + % initval = struct $ Int64 ( % 0 : $ Builtin . Int64 ) <nl> + } <nl> + <nl> + sil_global @ initialized_var_global : $ Int64 = { <nl> + % 0 = integer_literal $ Builtin . Int64 , 27 <nl> + % initval = struct $ Int64 ( % 0 : $ Builtin . Int64 ) <nl> + } <nl> + <nl> + sil_global [ let ] @ initialized_tuple_global : $ IntTuple = { <nl> + % 0 = integer_literal $ Builtin . Int64 , 27 <nl> + % 1 = integer_literal $ Builtin . Int64 , 28 <nl> + % 2 = struct $ Int64 ( % 0 : $ Builtin . Int64 ) <nl> + % 3 = struct $ Int64 ( % 1 : $ Builtin . Int64 ) <nl> + % 4 = tuple ( % 2 : $ Int64 , % 3 : $ Int64 ) <nl> + % initval = struct $ IntTuple ( % 4 : $ ( Int64 , Int64 ) ) <nl> + } <nl> + <nl> + / / CHECK - LABEL : sil @ load_from_global_let <nl> + / / CHECK : [ [ I : % . * ] ] = integer_literal $ Builtin . Int64 , 27 <nl> + / / CHECK : [ [ R : % . * ] ] = struct $ Int64 ( [ [ I ] ] : $ Builtin . Int64 ) <nl> + / / CHECK : return [ [ R ] ] <nl> + / / CHECK : } / / end sil function ' load_from_global_let ' <nl> + sil @ load_from_global_let : $ @ convention ( thin ) ( ) - > Int64 { <nl> + bb0 : <nl> + % 0 = global_addr @ initialized_let_global : $ * Int64 <nl> + % 1 = load % 0 : $ * Int64 <nl> + return % 1 : $ Int64 <nl> + } <nl> + <nl> + / / CHECK - LABEL : sil @ load_from_global_with_projections <nl> + / / CHECK : [ [ I : % . * ] ] = integer_literal $ Builtin . Int64 , 28 <nl> + / / CHECK : return [ [ I ] ] <nl> + / / CHECK : } / / end sil function ' load_from_global_with_projections ' <nl> + sil @ load_from_global_with_projections : $ @ convention ( thin ) ( ) - > Builtin . Int64 { <nl> + bb0 : <nl> + % 0 = global_addr @ initialized_tuple_global : $ * IntTuple <nl> + % 1 = struct_element_addr % 0 : $ * IntTuple , # IntTuple . t <nl> + % 2 = tuple_element_addr % 1 : $ * ( Int64 , Int64 ) , 1 <nl> + % 3 = struct_element_addr % 2 : $ * Int64 , # Int64 . _value <nl> + % 4 = load % 3 : $ * Builtin . Int64 <nl> + return % 4 : $ Builtin . Int64 <nl> + } <nl> + <nl> + / / CHECK - LABEL : sil @ load_from_global_without_projections <nl> + / / CHECK - DAG : [ [ I1 : % . * ] ] = integer_literal $ Builtin . Int64 , 27 <nl> + / / CHECK - DAG : [ [ I2 : % . * ] ] = integer_literal $ Builtin . Int64 , 28 <nl> + / / CHECK - DAG : [ [ S1 : % . * ] ] = struct $ Int64 ( [ [ I1 ] ] : $ Builtin . Int64 ) <nl> + / / CHECK - DAG : [ [ S2 : % . * ] ] = struct $ Int64 ( [ [ I2 ] ] : $ Builtin . Int64 ) <nl> + / / CHECK - DAG : [ [ T : % . * ] ] = tuple ( [ [ S1 ] ] : $ Int64 , [ [ S2 ] ] : $ Int64 ) <nl> + / / CHECK - DAG : [ [ IT : % . * ] ] = struct $ IntTuple ( [ [ T ] ] : $ ( Int64 , Int64 ) ) <nl> + / / CHECK : return [ [ IT ] ] <nl> + / / CHECK : } / / end sil function ' load_from_global_without_projections ' <nl> + sil @ load_from_global_without_projections : $ @ convention ( thin ) ( ) - > IntTuple { <nl> + bb0 : <nl> + % 0 = global_addr @ initialized_tuple_global : $ * IntTuple <nl> + % 1 = load % 0 : $ * IntTuple <nl> + return % 1 : $ IntTuple <nl> + } <nl> + <nl> + / / CHECK - LABEL : sil @ load_from_global_var <nl> + / / CHECK : global_addr <nl> + / / CHECK - NEXT : load <nl> + / / CHECK - NEXT : return <nl> + / / CHECK : } / / end sil function ' load_from_global_var ' <nl> + sil @ load_from_global_var : $ @ convention ( thin ) ( ) - > Int64 { <nl> + bb0 : <nl> + % 0 = global_addr @ initialized_var_global : $ * Int64 <nl> + % 1 = load % 0 : $ * Int64 <nl> + return % 1 : $ Int64 <nl> + } <nl>
Merge pull request from eeckstein / cmo - improvements
apple/swift
15dfa83d5da89bc4f034351a3357714af3f2a3ec
2020-06-23T08:54:02Z
mmm a / test / ModuleInterface / ModuleCache / prebuilt - module - cache - archs . swift <nl> ppp b / test / ModuleInterface / ModuleCache / prebuilt - module - cache - archs . swift <nl> <nl> / / RUN : % empty - directory ( % t ) <nl> / / RUN : % empty - directory ( % t / include / Lib . swiftmodule ) <nl> - / / RUN : cp % S / Inputs / prebuilt - module - cache / Lib . swiftinterface % t / include / Lib . swiftmodule / % target - cpu . swiftinterface <nl> + / / RUN : cp % S / Inputs / prebuilt - module - cache / Lib . swiftinterface % t / include / Lib . swiftmodule / % target - swiftinterface - name <nl> <nl> / / Baseline check : if the prebuilt cache path does not exist , everything should <nl> / / still work . <nl> mmm a / test / ModuleInterface / ModuleCache / prebuilt - module - cache - unusable . swift <nl> ppp b / test / ModuleInterface / ModuleCache / prebuilt - module - cache - unusable . swift <nl> <nl> / / RUN : % empty - directory ( % t / MCP ) <nl> / / RUN : % empty - directory ( % t / prebuilt - cache / Lib . swiftmodule ) <nl> / / RUN : % empty - directory ( % t / include / Lib . swiftmodule ) <nl> - / / RUN : cp % S / Inputs / prebuilt - module - cache / Lib . swiftinterface % t / include / Lib . swiftmodule / % target - cpu . swiftinterface <nl> + / / RUN : cp % S / Inputs / prebuilt - module - cache / Lib . swiftinterface % t / include / Lib . swiftmodule / % target - swiftinterface - name <nl> <nl> / / Prebuild a module for the current target CPU , and put it in the prebuilt cache under some imaginary CPU . <nl> - / / RUN : sed - e ' s / FromInterface / FromPrebuilt / g ' % t / include / Lib . swiftmodule / % target - cpu . swiftinterface | % target - swift - frontend - parse - stdlib - module - cache - path % t / MCP - emit - module - path % t / prebuilt - cache / Lib . swiftmodule / leg128 . swiftmodule - - module - name Lib <nl> + / / RUN : sed - e ' s / FromInterface / FromPrebuilt / g ' % t / include / Lib . swiftmodule / % target - swiftinterface - name | % target - swift - frontend - parse - stdlib - module - cache - path % t / MCP - emit - module - path % t / prebuilt - cache / Lib . swiftmodule / leg128 . swiftmodule - - module - name Lib <nl> <nl> / / Make sure that , if there ' s a module for a different architecture <nl> / / present in the prebuilt cache , it ' s ignored and the module is <nl> mmm a / test / ModuleInterface / ModuleCache / prefer - local - module - to - sdk - framework . swift <nl> ppp b / test / ModuleInterface / ModuleCache / prefer - local - module - to - sdk - framework . swift <nl> <nl> / / RUN : echo ' public func showsUpInBothPlaces ( ) { } ' > % t / Lib . swift <nl> <nl> / / 1 . Create a . swiftinterface file containing just one API , and put it inside a second build dir ( without a . swiftmodule ) <nl> - / / RUN : % target - swift - frontend - typecheck % t / Lib . swift - emit - module - interface - path % t / SecondBuildDir / Lib . framework / Modules / Lib . swiftmodule / % target - cpu . swiftinterface - module - name Lib <nl> + / / RUN : % target - swift - frontend - typecheck % t / Lib . swift - emit - module - interface - path % t / SecondBuildDir / Lib . framework / Modules / Lib . swiftmodule / % target - swiftinterface - name - module - name Lib <nl> <nl> / / 2 . Add a new API to the module , and compile just the serialized version in the build dir . <nl> / / RUN : echo ' public func onlyInTheCompiledModule ( ) { } ' > > % t / Lib . swift <nl> - / / RUN : % target - swift - frontend - emit - module % t / Lib . swift - o % t / BuildDir / Lib . framework / Modules / Lib . swiftmodule / % target - cpu . swiftmodule - emit - module - interface - path % t / BuildDir / Lib . framework / Modules / Lib . swiftmodule / % target - cpu . swiftinterface - module - name Lib <nl> + / / RUN : % target - swift - frontend - emit - module % t / Lib . swift - o % t / BuildDir / Lib . framework / Modules / Lib . swiftmodule / % target - swiftmodule - name - emit - module - interface - path % t / BuildDir / Lib . framework / Modules / Lib . swiftmodule / % target - swiftinterface - name - module - name Lib <nl> <nl> / / 3 . Make sure when we compile this test file , we can access both APIs since we ' ll <nl> / / load the compiled . swiftmodule instead of the . swiftinterface in the SDK . <nl> <nl> / / RUN : ls % t / ModuleCache | not grep ' swiftmodule ' <nl> <nl> / / 5 . This should also work if the swiftinterface isn ' t present in the first build dir . <nl> - / / RUN : rm % t / BuildDir / Lib . framework / Modules / Lib . swiftmodule / % target - cpu . swiftinterface <nl> + / / RUN : rm % t / BuildDir / Lib . framework / Modules / Lib . swiftmodule / % target - swiftinterface - name <nl> / / RUN : % target - swift - frontend - typecheck % s - F % t / BuildDir - F % t / SecondBuildDir - module - cache - path % t / ModuleCache <nl> <nl> / / 6 . Make sure we / still / didn ' t compile any . swiftinterfaces into the module cache . <nl> mmm a / test / ModuleInterface / default - prebuilt - module - location . swift <nl> ppp b / test / ModuleInterface / default - prebuilt - module - location . swift <nl> <nl> / / 3 . Compile this into a module and put it into the default prebuilt cache <nl> / / location relative to the fake resource dir . Also drop an interface into <nl> / / the build dir . <nl> - / / RUN : % target - swift - frontend - emit - module % t / PrebuiltModule . swift - o % t / ResourceDir / % target - sdk - name / prebuilt - modules / PrebuiltModule . swiftmodule / % target - cpu . swiftmodule - module - name PrebuiltModule - parse - stdlib - emit - module - interface - path % t / PrebuiltModule . swiftmodule / % target - cpu . swiftinterface <nl> + / / RUN : % target - swift - frontend - emit - module % t / PrebuiltModule . swift - o % t / ResourceDir / % target - sdk - name / prebuilt - modules / PrebuiltModule . swiftmodule / % target - swiftmodule - name - module - name PrebuiltModule - parse - stdlib - emit - module - interface - path % t / PrebuiltModule . swiftmodule / % target - swiftinterface - name <nl> <nl> / / 4 . Import this prebuilt module , but DON ' T pass in - prebuilt - module - cache - path , it should use the implicit one . <nl> / / RUN : % target - swift - frontend - typecheck - resource - dir % t / ResourceDir - I % t % s - parse - stdlib - module - cache - path % t / ModuleCache - sdk % t <nl> mmm a / test / ModuleInterface / swift_build_sdk_interfaces / check - only - mode . swift <nl> ppp b / test / ModuleInterface / swift_build_sdk_interfaces / check - only - mode . swift <nl> <nl> / / RUN : mkdir - p % t / sdk / usr / lib / swift / Normal . swiftmodule <nl> / / RUN : mkdir - p % t / sdk / System / Library / Frameworks / FMWK . framework / Modules / FMWK . swiftmodule <nl> <nl> - / / RUN : echo ' public func normal ( ) { } ' | % target - swift - frontend - - emit - module - interface - path % t / sdk / usr / lib / swift / Normal . swiftmodule / % target - cpu . swiftinterface - emit - module - o / dev / null - module - name Normal <nl> + / / RUN : echo ' public func normal ( ) { } ' | % target - swift - frontend - - emit - module - interface - path % t / sdk / usr / lib / swift / Normal . swiftmodule / % target - swiftinterface - name - emit - module - o / dev / null - module - name Normal <nl> / / RUN : echo ' public func flat ( ) { } ' | % target - swift - frontend - - emit - module - interface - path % t / sdk / usr / lib / swift / Flat . swiftinterface - emit - module - o / dev / null - module - name Flat <nl> - / / RUN : echo ' public func fmwk ( ) { } ' | % target - swift - frontend - - emit - module - interface - path % t / sdk / System / Library / Frameworks / FMWK . framework / Modules / FMWK . swiftmodule / % target - cpu . swiftinterface - emit - module - o / dev / null - module - name FMWK <nl> + / / RUN : echo ' public func fmwk ( ) { } ' | % target - swift - frontend - - emit - module - interface - path % t / sdk / System / Library / Frameworks / FMWK . framework / Modules / FMWK . swiftmodule / % target - swiftinterface - name - emit - module - o / dev / null - module - name FMWK <nl> <nl> / / RUN : % swift_build_sdk_interfaces - sdk % t / sdk - Fsystem % t / sdk / System / Library / Frameworks - v - o % t / prebuilt - check - only <nl> / / RUN : ls % t / prebuilt | % FileCheck % s <nl> <nl> / / Touch a file in the SDK ( to make it look like it changed ) and try again . <nl> / / In - check - only mode , this should force a rebuild . <nl> / / RUN : rm - rf % t / MCP <nl> - / / RUN : % { python } % S / . . / ModuleCache / Inputs / make - old . py % t / sdk / usr / lib / swift / Normal . swiftmodule / % target - cpu . swiftinterface <nl> + / / RUN : % { python } % S / . . / ModuleCache / Inputs / make - old . py % t / sdk / usr / lib / swift / Normal . swiftmodule / % target - swiftinterface - name <nl> / / RUN : % target - typecheck - verify - swift - sdk % t / sdk - Fsystem % t / sdk / System / Library / Frameworks - I % t / sdk / usr / lib / swift / - module - cache - path % t / MCP - prebuilt - module - cache - path % t / prebuilt <nl> / / RUN : not % { python } % S / . . / ModuleCache / Inputs / check - is - forwarding - module . py % t / MCP / Normal - * . swiftmodule <nl> <nl> mmm a / test / ModuleInterface / swift_build_sdk_interfaces / compiler - uses - prebuilt . swift <nl> ppp b / test / ModuleInterface / swift_build_sdk_interfaces / compiler - uses - prebuilt . swift <nl> <nl> / / RUN : mkdir - p % t / sdk / usr / lib / swift / Normal . swiftmodule <nl> / / RUN : mkdir - p % t / sdk / System / Library / Frameworks / FMWK . framework / Modules / FMWK . swiftmodule <nl> <nl> - / / RUN : echo ' public func normal ( ) { } ' | % target - swift - frontend - - emit - module - interface - path % t / sdk / usr / lib / swift / Normal . swiftmodule / % target - cpu . swiftinterface - emit - module - o / dev / null - module - name Normal <nl> + / / RUN : echo ' public func normal ( ) { } ' | % target - swift - frontend - - emit - module - interface - path % t / sdk / usr / lib / swift / Normal . swiftmodule / % target - swiftinterface - name - emit - module - o / dev / null - module - name Normal <nl> / / RUN : echo ' public func flat ( ) { } ' | % target - swift - frontend - - emit - module - interface - path % t / sdk / usr / lib / swift / Flat . swiftinterface - emit - module - o / dev / null - module - name Flat <nl> - / / RUN : echo ' public func fmwk ( ) { } ' | % target - swift - frontend - - emit - module - interface - path % t / sdk / System / Library / Frameworks / FMWK . framework / Modules / FMWK . swiftmodule / % target - cpu . swiftinterface - emit - module - o / dev / null - module - name FMWK <nl> + / / RUN : echo ' public func fmwk ( ) { } ' | % target - swift - frontend - - emit - module - interface - path % t / sdk / System / Library / Frameworks / FMWK . framework / Modules / FMWK . swiftmodule / % target - swiftinterface - name - emit - module - o / dev / null - module - name FMWK <nl> <nl> / / RUN : % swift_build_sdk_interfaces - sdk % t / sdk - Fsystem % t / sdk / System / Library / Frameworks - v - o % t / prebuilt <nl> / / RUN : ls % t / prebuilt | % FileCheck % s <nl> <nl> / / This should still be able to use the prebuilt modules because they track <nl> / / content hashes , not just size + mtime . <nl> / / RUN : rm - rf % t / MCP <nl> - / / RUN : % { python } % S / . . / ModuleCache / Inputs / make - old . py % t / sdk / usr / lib / swift / Normal . swiftmodule / % target - cpu . swiftinterface <nl> + / / RUN : % { python } % S / . . / ModuleCache / Inputs / make - old . py % t / sdk / usr / lib / swift / Normal . swiftmodule / % target - swiftinterface - name <nl> / / RUN : % target - typecheck - verify - swift - sdk % t / sdk - Fsystem % t / sdk / System / Library / Frameworks - I % t / sdk / usr / lib / swift / - module - cache - path % t / MCP - prebuilt - module - cache - path % t / prebuilt <nl> / / RUN : ls % t / MCP / * . swiftmodule | % FileCheck - check - prefix CHECK - CACHE % s <nl> / / RUN : % { python } % S / . . / ModuleCache / Inputs / check - is - forwarding - module . py % t / MCP / * . swiftmodule <nl> <nl> / / Actually change a file in the SDK , to check that we ' re tracking dependencies <nl> / / at all . <nl> / / RUN : rm - rf % t / MCP <nl> - / / RUN : echo " public func another ( ) " > > % t / sdk / usr / lib / swift / Normal . swiftmodule / % target - cpu . swiftinterface <nl> + / / RUN : echo " public func another ( ) " > > % t / sdk / usr / lib / swift / Normal . swiftmodule / % target - swiftinterface - name <nl> / / RUN : % target - typecheck - verify - swift - sdk % t / sdk - Fsystem % t / sdk / System / Library / Frameworks - I % t / sdk / usr / lib / swift / - module - cache - path % t / MCP - prebuilt - module - cache - path % t / prebuilt <nl> / / RUN : ls % t / MCP / * . swiftmodule | % FileCheck - check - prefix CHECK - CACHE % s <nl> / / RUN : not % { python } % S / . . / ModuleCache / Inputs / check - is - forwarding - module . py % t / MCP / Normal - * . swiftmodule <nl> mmm a / test / ModuleInterface / swift_build_sdk_interfaces / track - system - dependencies . swift <nl> ppp b / test / ModuleInterface / swift_build_sdk_interfaces / track - system - dependencies . swift <nl> <nl> / / RUN : % empty - directory ( % t ) <nl> / / RUN : cp - r % S / Inputs / system - dependencies - sdk % t / sdk <nl> - / / RUN : echo ' import Platform ; public func usesCStruct ( _ : SomeCStruct ? ) { } ' | % target - swift - frontend - - emit - module - interface - path % t / sdk / usr / lib / swift / Swifty . swiftmodule / % target - cpu . swiftinterface - emit - module - o / dev / null - module - name Swifty - sdk % t / sdk <nl> + / / RUN : echo ' import Platform ; public func usesCStruct ( _ : SomeCStruct ? ) { } ' | % target - swift - frontend - - emit - module - interface - path % t / sdk / usr / lib / swift / Swifty . swiftmodule / % target - swiftinterface - name - emit - module - o / dev / null - module - name Swifty - sdk % t / sdk <nl> <nl> / / RUN : % swift_build_sdk_interfaces - sdk % t / sdk - v - o % t / prebuilt <nl> / / RUN : ls % t / prebuilt | % FileCheck % s <nl> mmm a / test / Serialization / load - target - fallback . swift <nl> ppp b / test / Serialization / load - target - fallback . swift <nl> <nl> <nl> / / RUN : mkdir % t / TargetLibrary . swiftmodule <nl> / / RUN : % target - swift - frontend - emit - module - o % t / TargetLibrary . swiftmodule / % module - target - triple . swiftmodule % S / Inputs / def_func . swift - module - name TargetLibrary <nl> - / / RUN : touch % t / TargetLibrary . swiftmodule / % target - cpu . swiftmodule <nl> + / / RUN : touch % t / TargetLibrary . swiftmodule / % target - swiftmodule - name <nl> <nl> import TargetLibrary <nl> <nl> / / RUN : mkdir % t / ArchLibrary . swiftmodule <nl> - / / RUN : % target - swift - frontend - emit - module - o % t / ArchLibrary . swiftmodule / % target - cpu . swiftmodule % S / Inputs / def_func . swift - module - name ArchLibrary <nl> + / / RUN : % target - swift - frontend - emit - module - o % t / ArchLibrary . swiftmodule / % target - swiftmodule - name % S / Inputs / def_func . swift - module - name ArchLibrary <nl> <nl> import ArchLibrary <nl> <nl> / / RUN : mkdir - p % t / TargetModule . framework / Modules / TargetModule . swiftmodule <nl> / / RUN : % target - swift - frontend - emit - module - o % t / TargetModule . framework / Modules / TargetModule . swiftmodule / % module - target - triple . swiftmodule % S / Inputs / def_func . swift - module - name TargetModule <nl> - / / RUN : touch % t / TargetModule . framework / Modules / TargetModule . swiftmodule / % target - cpu . swiftmodule <nl> + / / RUN : touch % t / TargetModule . framework / Modules / TargetModule . swiftmodule / % target - swiftmodule - name <nl> <nl> import TargetModule <nl> <nl> / / RUN : mkdir - p % t / ArchModule . framework / Modules / ArchModule . swiftmodule <nl> - / / RUN : % target - swift - frontend - emit - module - o % t / ArchModule . framework / Modules / ArchModule . swiftmodule / % target - cpu . swiftmodule % S / Inputs / def_func . swift - module - name ArchModule <nl> + / / RUN : % target - swift - frontend - emit - module - o % t / ArchModule . framework / Modules / ArchModule . swiftmodule / % target - swiftmodule - name % S / Inputs / def_func . swift - module - name ArchModule <nl> <nl> import ArchModule <nl> <nl> mmm a / test / lit . cfg <nl> ppp b / test / lit . cfg <nl> if run_vers . endswith ( ' - simulator ' ) : <nl> else : <nl> run_environment = ' ' <nl> <nl> + target_arch = run_cpu <nl> + if run_os = = ' openbsd ' and run_cpu = = ' amd64 ' : <nl> + target_arch = run_cpu <nl> + run_cpu = ' x86_64 ' <nl> + <nl> run_ptrsize = ' 64 ' if ( ' 64 ' in run_cpu or run_cpu = = " s390x " ) else ' 32 ' <nl> run_ptrauth = ' ptrauth ' if run_cpu = = ' arm64e ' else ' noptrauth ' <nl> run_endian = ' little ' if run_cpu ! = ' s390x ' else ' big ' <nl> if swift_execution_tests_extra_flags : <nl> <nl> platform_module_dir = make_path ( test_resource_dir , config . target_sdk_name ) <nl> if run_vendor ! = ' apple ' : <nl> - platform_module_dir = make_path ( platform_module_dir , run_cpu ) <nl> + platform_module_dir = make_path ( platform_module_dir , target_arch ) <nl> <nl> platform_dylib_dir = platform_module_dir <nl> if run_os = = ' maccatalyst ' and config . darwin_maccatalyst_build_flavor = = " ios - like " : <nl> config . substitutions . append ( ( ' % xcode - extra - frameworks - dir ' , extra_frameworks_dir <nl> config . substitutions . append ( ( ' % target - swiftmodule - name ' , target_specific_module_triple + ' . swiftmodule ' ) ) <nl> config . substitutions . append ( ( ' % target - swiftdoc - name ' , target_specific_module_triple + ' . swiftdoc ' ) ) <nl> config . substitutions . append ( ( ' % target - swiftsourceinfo - name ' , target_specific_module_triple + ' . swiftsourceinfo ' ) ) <nl> + config . substitutions . append ( ( ' % target - swiftinterface - name ' , target_specific_module_triple + ' . swiftinterface ' ) ) <nl> <nl> config . substitutions . append ( ( ' % target - object - format ' , config . target_object_format ) ) <nl> config . substitutions . append ( ( ' % { target - shared - library - prefix } ' , config . target_shared_library_prefix ) ) <nl>
[ test ] Handle architecture aliasing for OpenBSD .
apple/swift
c8ba43744401f32dafbc960a7636b3e671875801
2020-06-13T01:06:37Z
mmm a / atom / browser / resources / mac / Info . plist <nl> ppp b / atom / browser / resources / mac / Info . plist <nl> <nl> < key > CFBundleIconFile < / key > <nl> < string > electron . icns < / string > <nl> < key > CFBundleVersion < / key > <nl> - < string > 1 . 8 . 2 < / string > <nl> + < string > 0 . 0 . 0 < / string > <nl> < key > CFBundleShortVersionString < / key > <nl> - < string > 1 . 8 . 2 < / string > <nl> + < string > 0 . 0 . 0 < / string > <nl> < key > LSApplicationCategoryType < / key > <nl> < string > public . app - category . developer - tools < / string > <nl> < key > LSMinimumSystemVersion < / key > <nl> mmm a / atom / browser / resources / win / atom . rc <nl> ppp b / atom / browser / resources / win / atom . rc <nl> END <nl> / / <nl> <nl> VS_VERSION_INFO VERSIONINFO <nl> - FILEVERSION 1 , 8 , 2 , 2 <nl> - PRODUCTVERSION 1 , 8 , 2 , 2 <nl> + FILEVERSION 0 , 0 , 0 , 0 <nl> + PRODUCTVERSION 0 , 0 , 0 , 0 <nl> FILEFLAGSMASK 0x3fL <nl> # ifdef _DEBUG <nl> FILEFLAGS 0x1L <nl> BEGIN <nl> BEGIN <nl> VALUE " CompanyName " , " GitHub , Inc . " <nl> VALUE " FileDescription " , " Electron " <nl> - VALUE " FileVersion " , " 1 . 8 . 2 " <nl> + VALUE " FileVersion " , " 0 . 0 . 0 " <nl> VALUE " InternalName " , " electron . exe " <nl> VALUE " LegalCopyright " , " Copyright ( C ) 2015 GitHub , Inc . All rights reserved . " <nl> VALUE " OriginalFilename " , " electron . exe " <nl> VALUE " ProductName " , " Electron " <nl> - VALUE " ProductVersion " , " 1 . 8 . 2 " <nl> + VALUE " ProductVersion " , " 0 . 0 . 0 " <nl> VALUE " SquirrelAwareVersion " , " 1 " <nl> END <nl> END <nl> mmm a / atom / common / atom_version . h <nl> ppp b / atom / common / atom_version . h <nl> <nl> # ifndef ATOM_COMMON_ATOM_VERSION_H_ <nl> # define ATOM_COMMON_ATOM_VERSION_H_ <nl> <nl> - # define ATOM_MAJOR_VERSION 1 <nl> - # define ATOM_MINOR_VERSION 8 <nl> - # define ATOM_PATCH_VERSION 2 <nl> - # define ATOM_PRE_RELEASE_VERSION - beta . 2 <nl> + # define ATOM_MAJOR_VERSION 0 <nl> + # define ATOM_MINOR_VERSION 0 <nl> + # define ATOM_PATCH_VERSION 0 <nl> + # define ATOM_PRE_RELEASE_VERSION - dev <nl> <nl> # ifndef ATOM_STRINGIFY <nl> # define ATOM_STRINGIFY ( n ) ATOM_STRINGIFY_HELPER ( n ) <nl> mmm a / electron . gyp <nl> ppp b / electron . gyp <nl> <nl> ' product_name % ' : ' Electron ' , <nl> ' company_name % ' : ' GitHub , Inc ' , <nl> ' company_abbr % ' : ' github ' , <nl> - ' version % ' : ' 1 . 8 . 2 - beta . 2 ' , <nl> + ' version % ' : ' 0 . 0 . 0 - dev ' , <nl> ' js2c_input_dir ' : ' < ( SHARED_INTERMEDIATE_DIR ) / js2c ' , <nl> } , <nl> ' includes ' : [ <nl>
Use version 0 . 0 . 0 - dev everywhere
electron/electron
52c0864cb855674b75669c222485333d36123694
2018-03-05T15:24:48Z
mmm a / api / envoy / config / filter / network / http_connection_manager / v2 / http_connection_manager . proto <nl> ppp b / api / envoy / config / filter / network / http_connection_manager / v2 / http_connection_manager . proto <nl> message HttpConnectionManager { <nl> / / control . <nl> bool represent_ipv4_remote_address_as_ipv4_mapped_ipv6 = 20 ; <nl> <nl> - / / [ # not - implemented - hide : ] <nl> / / The configuration for HTTP upgrades . <nl> / / For each upgrade type desired , an UpgradeConfig must be added . <nl> / / <nl> message HttpConnectionManager { <nl> / / The current implementation of upgrade headers does not handle <nl> / / multi - valued upgrade headers . Support for multi - valued headers may be <nl> / / added in the future if needed . <nl> + / / <nl> + / / . . warning : : <nl> + / / The current implementation of upgrade headers does not work with HTTP / 2 <nl> + / / upstreams . <nl> message UpgradeConfig { <nl> / / The case - insensitive name of this upgrade , e . g . " websocket " . <nl> / / For each upgrade type present in upgrade_configs , requests with <nl> message HttpConnectionManager { <nl> / / HTTP connections will be used for this upgrade type . <nl> repeated HttpFilter filters = 2 ; <nl> } ; <nl> - / / [ # not - implemented - hide : ] <nl> repeated UpgradeConfig upgrade_configs = 23 ; <nl> } <nl> <nl> mmm a / docs / root / intro / arch_overview / websocket . rst <nl> ppp b / docs / root / intro / arch_overview / websocket . rst <nl> <nl> . . _arch_overview_websocket : <nl> <nl> - WebSocket support <nl> - = = = = = = = = = = = = = = = = = <nl> + Envoy currently supports two modes of Upgrade behavior , the new generic upgrade mode , and <nl> + the old WebSocket - only TCP proxy mode . <nl> + <nl> + <nl> + New style Upgrade support <nl> + = = = = = = = = = = = = = = = = = = = = = = = = = <nl> + <nl> + The new style Upgrade support is intended mainly for WebSocket but may be used for non - WebSocket <nl> + upgrades as well . The new style of upgrades pass both the HTTP headers and the upgrade payload <nl> + through an HTTP filter chain . One may configure the <nl> + : ref : ` upgrade_configs < envoy_api_field_config . filter . network . http_connection_manager . v2 . HttpConnectionManager . upgrade_configs > ` <nl> + in one of two ways . If only the <nl> + ` upgrade_type < envoy_api_field_config . filter . network . http_connection_manager . v2 . HttpConnectionManager . UpgradeConfigs . upgrade_type > ` <nl> + is specified , both the upgrade headers , any request and response body , and WebSocket payload will <nl> + pass through the default HTTP filter chain . To avoid the use of HTTP - only filters for upgrade payload , <nl> + one can set up custom <nl> + ` filters < envoy_api_field_config . filter . network . http_connection_manager . v2 . HttpConnectionManager . UpgradeConfigs . upgrade_type > ` <nl> + for the given upgrade type , up to and including only using the router filter to send the WebSocket <nl> + data upstream . <nl> + <nl> + Old style WebSocket support <nl> + = = = = = = = = = = = = = = = = = = = = = = = = = = = <nl> <nl> Envoy supports upgrading a HTTP / 1 . 1 connection to a WebSocket connection . <nl> Connection upgrade will be allowed only if the downstream client <nl> retries , rate limits and shadowing are not supported for WebSocket routes . <nl> However , prefix rewriting , explicit and automatic host rewriting , traffic <nl> shifting and splitting are supported . <nl> <nl> - Connection semantics <nl> mmmmmmmmmmmmmmmmmmmmm <nl> + Old style Connection semantics <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmm <nl> <nl> Even though WebSocket upgrades occur over HTTP / 1 . 1 connections , WebSockets <nl> proxying works similarly to plain TCP proxy , i . e . , Envoy does not interpret <nl> mmm a / docs / root / intro / version_history . rst <nl> ppp b / docs / root / intro / version_history . rst <nl> Version history <nl> * health_check : added support for : ref : ` health check event logging < arch_overview_health_check_logging > ` . <nl> * http : better handling of HEAD requests . Now sending transfer - encoding : chunked rather than content - length : 0 . <nl> * http : response filters not applied to early error paths such as http_parser generated 400s . <nl> + * http : added generic + : ref : ` Upgrade support <nl> + < envoy_api_field_config . filter . network . http_connection_manager . v2 . HttpConnectionManager . upgrade_configs > ` <nl> * lua : added : ref : ` connection ( ) < config_http_filters_lua_connection_wrapper > ` wrapper and * ssl ( ) * API . <nl> * lua : added : ref : ` requestInfo ( ) < config_http_filters_lua_request_info_wrapper > ` wrapper and * protocol ( ) * API . <nl> * ratelimit : added support for : repo : ` api / envoy / service / ratelimit / v2 / rls . proto ` . <nl> mmm a / source / common / http / conn_manager_impl . cc <nl> ppp b / source / common / http / conn_manager_impl . cc <nl> const Network : : Connection * ConnectionManagerImpl : : ActiveStream : : connection ( ) { <nl> <nl> void ConnectionManagerImpl : : ActiveStream : : decodeHeaders ( HeaderMapPtr & & headers , bool end_stream ) { <nl> request_headers_ = std : : move ( headers ) ; <nl> - createFilterChain ( ) ; <nl> + const bool upgrade_rejected = createFilterChain ( ) = = false ; <nl> <nl> maybeEndDecode ( end_stream ) ; <nl> <nl> void ConnectionManagerImpl : : ActiveStream : : decodeHeaders ( HeaderMapPtr & & headers , <nl> connection_manager_ . stats_ . named_ . downstream_cx_http1_active_ . dec ( ) ; <nl> connection_manager_ . stats_ . named_ . downstream_cx_websocket_total_ . inc ( ) ; <nl> return ; <nl> - } else if ( websocket_requested ) { <nl> + } else if ( upgrade_rejected ) { <nl> / / Do not allow WebSocket upgrades if the route does not support it . <nl> connection_manager_ . stats_ . named_ . downstream_rq_ws_on_non_ws_route_ . inc ( ) ; <nl> sendLocalReply ( Grpc : : Common : : hasGrpcContentType ( * request_headers_ ) , Code : : Forbidden , " " , <nl> void ConnectionManagerImpl : : ActiveStream : : encodeHeaders ( ActiveStreamEncoderFilte <nl> <nl> if ( connection_manager_ . drain_state_ = = DrainState : : Closing & & <nl> connection_manager_ . codec_ - > protocol ( ) ! = Protocol : : Http2 ) { <nl> - headers . insertConnection ( ) . value ( ) . setReference ( Headers : : get ( ) . ConnectionValues . Close ) ; <nl> + / / If the connection manager is draining send " Connection : Close " on HTTP / 1 . 1 connections . <nl> + / / Do not do this for H2 ( which drains via GOAWA ) or Upgrade ( as the upgrade <nl> + / / payload is no longer HTTP / 1 . 1 ) <nl> + if ( headers . Connection ( ) = = nullptr | | headers . Connection ( ) - > value ( ) ! = " Upgrade " ) { <nl> + headers . insertConnection ( ) . value ( ) . setReference ( Headers : : get ( ) . ConnectionValues . Close ) ; <nl> + } <nl> } <nl> <nl> if ( connection_manager_ . config_ . tracingConfig ( ) ) { <nl> void ConnectionManagerImpl : : ActiveStream : : setBufferLimit ( uint32_t new_limit ) { <nl> } <nl> } <nl> <nl> - void ConnectionManagerImpl : : ActiveStream : : createFilterChain ( ) { <nl> + bool ConnectionManagerImpl : : ActiveStream : : createFilterChain ( ) { <nl> + bool upgrade_rejected = false ; <nl> + auto upgrade = request_headers_ - > Upgrade ( ) ; <nl> + if ( upgrade ! = nullptr ) { <nl> + if ( connection_manager_ . config_ . filterFactory ( ) . createUpgradeFilterChain ( <nl> + upgrade - > value ( ) . c_str ( ) , * this ) ) { <nl> + return true ; <nl> + } else { <nl> + upgrade_rejected = true ; <nl> + / / Fall through to the default filter chain . The function calling this <nl> + / / will send a local reply indicating that the upgrade failed . <nl> + } <nl> + } <nl> + <nl> connection_manager_ . config_ . filterFactory ( ) . createFilterChain ( * this ) ; <nl> + return ! upgrade_rejected ; <nl> } <nl> <nl> void ConnectionManagerImpl : : ActiveStreamFilterBase : : commonContinue ( ) { <nl> mmm a / source / common / http / conn_manager_impl . h <nl> ppp b / source / common / http / conn_manager_impl . h <nl> class ConnectionManagerImpl : Logger : : Loggable < Logger : : Id : : http > , <nl> / / Possibly increases buffer_limit_ to the value of limit . <nl> void setBufferLimit ( uint32_t limit ) ; <nl> / / Set up the Encoder / Decoder filter chain . <nl> - void createFilterChain ( ) ; <nl> + bool createFilterChain ( ) ; <nl> <nl> ConnectionManagerImpl & connection_manager_ ; <nl> Router : : ConfigConstSharedPtr snapped_route_config_ ; <nl> mmm a / source / common / http / conn_manager_utility . cc <nl> ppp b / source / common / http / conn_manager_utility . cc <nl> Network : : Address : : InstanceConstSharedPtr ConnectionManagerUtility : : mutateRequest <nl> request_headers . removeEnvoyInternalRequest ( ) ; <nl> request_headers . removeKeepAlive ( ) ; <nl> request_headers . removeProxyConnection ( ) ; <nl> - / / TODO ( alyssawilk ) handle this with current and new websocket here and below . <nl> request_headers . removeTransferEncoding ( ) ; <nl> <nl> / / If we are " using remote address " this means that we create / append to XFF with our immediate <nl> mmm a / source / common / http / http1 / codec_impl . cc <nl> ppp b / source / common / http / http1 / codec_impl . cc <nl> void StreamEncoderImpl : : encodeHeaders ( const HeaderMap & headers , bool end_stream ) <nl> Headers : : get ( ) . TransferEncoding . get ( ) . size ( ) , <nl> Headers : : get ( ) . TransferEncodingValues . Chunked . c_str ( ) , <nl> Headers : : get ( ) . TransferEncodingValues . Chunked . size ( ) ) ; <nl> + / / We do not aply chunk encoding for HTTP upgrades . <nl> + / / If there is a body in a WebSocket Upgrade response , the chunks will be <nl> + / / passed through via maybeDirectDispatch so we need to avoid appending <nl> + / / extra chunk boundaries . <nl> + / / <nl> / / When sending a response to a HEAD request Envoy may send an informational <nl> / / " Transfer - Encoding : chunked " header , but should not send a chunk encoded body . <nl> - chunk_encoding_ = ! is_response_to_head_request_ ; <nl> + chunk_encoding_ = ! Utility : : isUpgrade ( headers ) & & ! is_response_to_head_request_ ; <nl> } <nl> } <nl> <nl> http_parser_settings ConnectionImpl : : settings_ { <nl> return 0 ; <nl> } , <nl> [ ] ( http_parser * parser ) - > int { <nl> - static_cast < ConnectionImpl * > ( parser - > data ) - > onMessageComplete ( ) ; <nl> + static_cast < ConnectionImpl * > ( parser - > data ) - > onMessageCompleteBase ( ) ; <nl> return 0 ; <nl> } , <nl> nullptr , / / on_chunk_header <nl> void ConnectionImpl : : completeLastHeader ( ) { <nl> ASSERT ( current_header_value_ . empty ( ) ) ; <nl> } <nl> <nl> + bool ConnectionImpl : : maybeDirectDispatch ( Buffer : : Instance & data ) { <nl> + if ( ! handling_upgrade_ ) { <nl> + / / Only direct dispatch for Upgrade requests . <nl> + return false ; <nl> + } <nl> + <nl> + ssize_t total_parsed = 0 ; <nl> + uint64_t num_slices = data . getRawSlices ( nullptr , 0 ) ; <nl> + Buffer : : RawSlice slices [ num_slices ] ; <nl> + data . getRawSlices ( slices , num_slices ) ; <nl> + for ( Buffer : : RawSlice & slice : slices ) { <nl> + total_parsed + = slice . len_ ; <nl> + onBody ( static_cast < const char * > ( slice . mem_ ) , slice . len_ ) ; <nl> + } <nl> + ENVOY_CONN_LOG ( trace , " direct - dispatched { } bytes " , connection_ , total_parsed ) ; <nl> + data . drain ( total_parsed ) ; <nl> + return true ; <nl> + } <nl> + <nl> void ConnectionImpl : : dispatch ( Buffer : : Instance & data ) { <nl> ENVOY_CONN_LOG ( trace , " parsing { } bytes " , connection_ , data . length ( ) ) ; <nl> <nl> + if ( maybeDirectDispatch ( data ) ) { <nl> + return ; <nl> + } <nl> + <nl> / / Always unpause before dispatch . <nl> http_parser_pause ( & parser_ , 0 ) ; <nl> <nl> void ConnectionImpl : : dispatch ( Buffer : : Instance & data ) { <nl> <nl> ENVOY_CONN_LOG ( trace , " parsed { } bytes " , connection_ , total_parsed ) ; <nl> data . drain ( total_parsed ) ; <nl> + <nl> + / / If an upgrade has been handled and there is body data or early upgrade <nl> + / / payload to send on , send it on . <nl> + maybeDirectDispatch ( data ) ; <nl> } <nl> <nl> size_t ConnectionImpl : : dispatchSlice ( const char * slice , size_t len ) { <nl> int ConnectionImpl : : onHeadersCompleteBase ( ) { <nl> / / HTTP / 1 . 1 or not . <nl> protocol_ = Protocol : : Http10 ; <nl> } <nl> + if ( Utility : : isUpgrade ( * current_header_map_ ) ) { <nl> + ENVOY_CONN_LOG ( trace , " codec entering upgrade mode . " , connection_ ) ; <nl> + handling_upgrade_ = true ; <nl> + } <nl> <nl> int rc = onHeadersComplete ( std : : move ( current_header_map_ ) ) ; <nl> current_header_map_ . reset ( ) ; <nl> header_parsing_state_ = HeaderParsingState : : Done ; <nl> - return rc ; <nl> + <nl> + / / Returning 2 informs http_parser to not expect a body or further data on this connection . <nl> + return handling_upgrade_ ? 2 : rc ; <nl> + } <nl> + <nl> + void ConnectionImpl : : onMessageCompleteBase ( ) { <nl> + ENVOY_CONN_LOG ( trace , " message complete " , connection_ ) ; <nl> + if ( handling_upgrade_ ) { <nl> + / / If this is an upgrade request , swallow the onMessageComplete . The <nl> + / / upgrade payload will be treated as stream body . <nl> + ASSERT ( ! deferred_end_stream_headers_ ) ; <nl> + ENVOY_CONN_LOG ( trace , " Pausing parser due to upgrade . " , connection_ ) ; <nl> + http_parser_pause ( & parser_ , 1 ) ; <nl> + return ; <nl> + } <nl> + onMessageComplete ( ) ; <nl> } <nl> <nl> void ConnectionImpl : : onMessageBeginBase ( ) { <nl> int ServerConnectionImpl : : onHeadersComplete ( HeaderMapImplPtr & & headers ) { <nl> / / scenario where the higher layers stream through and implicitly switch to chunked transfer <nl> / / encoding because end stream with zero body length has not yet been indicated . <nl> if ( parser_ . flags & F_CHUNKED | | <nl> - ( parser_ . content_length > 0 & & parser_ . content_length ! = ULLONG_MAX ) ) { <nl> + ( parser_ . content_length > 0 & & parser_ . content_length ! = ULLONG_MAX ) | | handling_upgrade_ ) { <nl> active_request_ - > request_decoder_ - > decodeHeaders ( std : : move ( headers ) , false ) ; <nl> <nl> / / If the connection has been closed ( or is closing ) after decoding headers , pause the parser <nl> void ServerConnectionImpl : : onBody ( const char * data , size_t length ) { <nl> <nl> void ServerConnectionImpl : : onMessageComplete ( ) { <nl> if ( active_request_ ) { <nl> - ENVOY_CONN_LOG ( trace , " message complete " , connection_ ) ; <nl> Buffer : : OwnedImpl buffer ; <nl> active_request_ - > remote_complete_ = true ; <nl> <nl> void ClientConnectionImpl : : onBody ( const char * data , size_t length ) { <nl> } <nl> <nl> void ClientConnectionImpl : : onMessageComplete ( ) { <nl> - ENVOY_CONN_LOG ( trace , " message complete " , connection_ ) ; <nl> if ( ignore_message_complete_for_100_continue_ ) { <nl> ignore_message_complete_for_100_continue_ = false ; <nl> return ; <nl> mmm a / source / common / http / http1 / codec_impl . h <nl> ppp b / source / common / http / http1 / codec_impl . h <nl> class ConnectionImpl : public virtual Connection , protected Logger : : Loggable < Log <nl> uint32_t bufferLimit ( ) { return connection_ . bufferLimit ( ) ; } <nl> virtual bool supports_http_10 ( ) { return false ; } <nl> <nl> + bool maybeDirectDispatch ( Buffer : : Instance & data ) ; <nl> + <nl> protected : <nl> ConnectionImpl ( Network : : Connection & connection , http_parser_type type ) ; <nl> <nl> class ConnectionImpl : public virtual Connection , protected Logger : : Loggable < Log <nl> http_parser parser_ ; <nl> HeaderMapPtr deferred_end_stream_headers_ ; <nl> Http : : Code error_code_ { Http : : Code : : BadRequest } ; <nl> + bool handling_upgrade_ { } ; <nl> <nl> private : <nl> enum class HeaderParsingState { Field , Value , Done } ; <nl> class ConnectionImpl : public virtual Connection , protected Logger : : Loggable < Log <nl> / * * <nl> * Called when the request / response is complete . <nl> * / <nl> + void onMessageCompleteBase ( ) ; <nl> virtual void onMessageComplete ( ) PURE ; <nl> <nl> / * * <nl> mmm a / test / common / http / http1 / codec_impl_test . cc <nl> ppp b / test / common / http / http1 / codec_impl_test . cc <nl> TEST_F ( Http1ServerConnectionImplTest , RequestWithTrailers ) { <nl> EXPECT_EQ ( 0U , buffer . length ( ) ) ; <nl> } <nl> <nl> + TEST_F ( Http1ServerConnectionImplTest , UpgradeRequest ) { <nl> + initialize ( ) ; <nl> + <nl> + InSequence sequence ; <nl> + NiceMock < Http : : MockStreamDecoder > decoder ; <nl> + EXPECT_CALL ( callbacks_ , newStream ( _ ) ) . WillOnce ( ReturnRef ( decoder ) ) ; <nl> + <nl> + EXPECT_CALL ( decoder , decodeHeaders_ ( _ , false ) ) . Times ( 1 ) ; <nl> + Buffer : : OwnedImpl buffer ( <nl> + " POST / HTTP / 1 . 1 \ r \ nConnection : upgrade \ r \ nUpgrade : foo \ r \ ncontent - length : 5 \ r \ n \ r \ n " ) ; <nl> + codec_ - > dispatch ( buffer ) ; <nl> + <nl> + Buffer : : OwnedImpl expected_data1 ( " 12345 " ) ; <nl> + Buffer : : OwnedImpl body ( " 12345 " ) ; <nl> + EXPECT_CALL ( decoder , decodeData ( BufferEqual ( & expected_data1 ) , false ) ) . Times ( 1 ) ; <nl> + codec_ - > dispatch ( body ) ; <nl> + <nl> + Buffer : : OwnedImpl expected_data2 ( " abcd " ) ; <nl> + Buffer : : OwnedImpl websocket_payload ( " abcd " ) ; <nl> + EXPECT_CALL ( decoder , decodeData ( BufferEqual ( & expected_data2 ) , false ) ) . Times ( 1 ) ; <nl> + codec_ - > dispatch ( websocket_payload ) ; <nl> + } <nl> + <nl> + TEST_F ( Http1ServerConnectionImplTest , UpgradeRequestWithEarlyData ) { <nl> + initialize ( ) ; <nl> + <nl> + InSequence sequence ; <nl> + NiceMock < Http : : MockStreamDecoder > decoder ; <nl> + EXPECT_CALL ( callbacks_ , newStream ( _ ) ) . WillOnce ( ReturnRef ( decoder ) ) ; <nl> + <nl> + Buffer : : OwnedImpl expected_data ( " 12345abcd " ) ; <nl> + EXPECT_CALL ( decoder , decodeHeaders_ ( _ , false ) ) . Times ( 1 ) ; <nl> + EXPECT_CALL ( decoder , decodeData ( BufferEqual ( & expected_data ) , false ) ) . Times ( 1 ) ; <nl> + Buffer : : OwnedImpl buffer ( " POST / HTTP / 1 . 1 \ r \ nConnection : upgrade \ r \ nUpgrade : " <nl> + " foo \ r \ ncontent - length : 5 \ r \ n \ r \ n12345abcd " ) ; <nl> + codec_ - > dispatch ( buffer ) ; <nl> + } <nl> + <nl> + TEST_F ( Http1ServerConnectionImplTest , UpgradeRequestWithTEChunked ) { <nl> + initialize ( ) ; <nl> + <nl> + InSequence sequence ; <nl> + NiceMock < Http : : MockStreamDecoder > decoder ; <nl> + EXPECT_CALL ( callbacks_ , newStream ( _ ) ) . WillOnce ( ReturnRef ( decoder ) ) ; <nl> + <nl> + / / Even with T - E chunked , the data should neither be inspected for ( the not <nl> + / / present in this unit test ) chunks , but simply passed through . <nl> + Buffer : : OwnedImpl expected_data ( " 12345abcd " ) ; <nl> + EXPECT_CALL ( decoder , decodeHeaders_ ( _ , false ) ) . Times ( 1 ) ; <nl> + EXPECT_CALL ( decoder , decodeData ( BufferEqual ( & expected_data ) , false ) ) . Times ( 1 ) ; <nl> + Buffer : : OwnedImpl buffer ( " POST / HTTP / 1 . 1 \ r \ nConnection : upgrade \ r \ nUpgrade : " <nl> + " foo \ r \ ntransfer - encoding : chunked \ r \ n \ r \ n12345abcd " ) ; <nl> + codec_ - > dispatch ( buffer ) ; <nl> + } <nl> + <nl> + TEST_F ( Http1ServerConnectionImplTest , UpgradeRequestWithNoBody ) { <nl> + initialize ( ) ; <nl> + <nl> + InSequence sequence ; <nl> + NiceMock < Http : : MockStreamDecoder > decoder ; <nl> + EXPECT_CALL ( callbacks_ , newStream ( _ ) ) . WillOnce ( ReturnRef ( decoder ) ) ; <nl> + <nl> + / / Make sure we avoid the deferred_end_stream_headers_ optimization for <nl> + / / requests - with - no - body . <nl> + Buffer : : OwnedImpl expected_data ( " abcd " ) ; <nl> + EXPECT_CALL ( decoder , decodeHeaders_ ( _ , false ) ) . Times ( 1 ) ; <nl> + EXPECT_CALL ( decoder , decodeData ( BufferEqual ( & expected_data ) , false ) ) . Times ( 1 ) ; <nl> + Buffer : : OwnedImpl buffer ( <nl> + " GET / HTTP / 1 . 1 \ r \ nConnection : upgrade \ r \ nUpgrade : foo \ r \ ncontent - length : 0 \ r \ n \ r \ nabcd " ) ; <nl> + codec_ - > dispatch ( buffer ) ; <nl> + } <nl> + <nl> TEST_F ( Http1ServerConnectionImplTest , WatermarkTest ) { <nl> EXPECT_CALL ( connection_ , bufferLimit ( ) ) . Times ( 1 ) . WillOnce ( Return ( 10 ) ) ; <nl> initialize ( ) ; <nl> TEST_F ( Http1ClientConnectionImplTest , GiantPath ) { <nl> codec_ - > dispatch ( response ) ; <nl> } <nl> <nl> + TEST_F ( Http1ClientConnectionImplTest , UpgradeResponse ) { <nl> + initialize ( ) ; <nl> + <nl> + InSequence s ; <nl> + <nl> + NiceMock < Http : : MockStreamDecoder > response_decoder ; <nl> + Http : : StreamEncoder & request_encoder = codec_ - > newStream ( response_decoder ) ; <nl> + TestHeaderMapImpl headers { { " : method " , " GET " } , { " : path " , " / " } , { " : authority " , " host " } } ; <nl> + request_encoder . encodeHeaders ( headers , true ) ; <nl> + <nl> + / / Send upgrade headers <nl> + EXPECT_CALL ( response_decoder , decodeHeaders_ ( _ , false ) ) ; <nl> + Buffer : : OwnedImpl response ( <nl> + " HTTP / 1 . 1 200 OK \ r \ nContent - Length : 5 \ r \ nConnection : upgrade \ r \ nUpgrade : websocket \ r \ n \ r \ n " ) ; <nl> + codec_ - > dispatch ( response ) ; <nl> + <nl> + / / Send body payload <nl> + Buffer : : OwnedImpl expected_data1 ( " 12345 " ) ; <nl> + Buffer : : OwnedImpl body ( " 12345 " ) ; <nl> + EXPECT_CALL ( response_decoder , decodeData ( BufferEqual ( & expected_data1 ) , false ) ) . Times ( 1 ) ; <nl> + codec_ - > dispatch ( body ) ; <nl> + <nl> + / / Send websocket payload <nl> + Buffer : : OwnedImpl expected_data2 ( " abcd " ) ; <nl> + Buffer : : OwnedImpl websocket_payload ( " abcd " ) ; <nl> + EXPECT_CALL ( response_decoder , decodeData ( BufferEqual ( & expected_data2 ) , false ) ) . Times ( 1 ) ; <nl> + codec_ - > dispatch ( websocket_payload ) ; <nl> + } <nl> + <nl> + / / Same data as above , but make sure directDispatch immediately hands off any <nl> + / / outstanding data . <nl> + TEST_F ( Http1ClientConnectionImplTest , UpgradeResponseWithEarlyData ) { <nl> + initialize ( ) ; <nl> + <nl> + InSequence s ; <nl> + <nl> + NiceMock < Http : : MockStreamDecoder > response_decoder ; <nl> + Http : : StreamEncoder & request_encoder = codec_ - > newStream ( response_decoder ) ; <nl> + TestHeaderMapImpl headers { { " : method " , " GET " } , { " : path " , " / " } , { " : authority " , " host " } } ; <nl> + request_encoder . encodeHeaders ( headers , true ) ; <nl> + <nl> + / / Send upgrade headers <nl> + EXPECT_CALL ( response_decoder , decodeHeaders_ ( _ , false ) ) ; <nl> + Buffer : : OwnedImpl expected_data ( " 12345abcd " ) ; <nl> + EXPECT_CALL ( response_decoder , decodeData ( BufferEqual ( & expected_data ) , false ) ) . Times ( 1 ) ; <nl> + Buffer : : OwnedImpl response ( " HTTP / 1 . 1 200 OK \ r \ nContent - Length : 5 \ r \ nConnection : " <nl> + " upgrade \ r \ nUpgrade : websocket \ r \ n \ r \ n12345abcd " ) ; <nl> + codec_ - > dispatch ( response ) ; <nl> + } <nl> + <nl> TEST_F ( Http1ClientConnectionImplTest , WatermarkTest ) { <nl> EXPECT_CALL ( connection_ , bufferLimit ( ) ) . Times ( 1 ) . WillOnce ( Return ( 10 ) ) ; <nl> initialize ( ) ; <nl> mmm a / test / integration / BUILD <nl> ppp b / test / integration / BUILD <nl> envoy_cc_test ( <nl> " : http_integration_lib " , <nl> " / / source / common / http : header_map_lib " , <nl> " / / source / extensions / access_loggers / file : config " , <nl> - " / / source / extensions / filters / http / cors : config " , <nl> - " / / source / extensions / filters / http / dynamo : config " , <nl> - " / / source / extensions / filters / http / grpc_http1_bridge : config " , <nl> - " / / source / extensions / filters / http / health_check : config " , <nl> + " / / source / extensions / filters / http / buffer : config " , <nl> " / / test / test_common : utility_lib " , <nl> ] , <nl> ) <nl> mmm a / test / integration / websocket_integration_test . cc <nl> ppp b / test / integration / websocket_integration_test . cc <nl> namespace { <nl> <nl> bool headersRead ( const std : : string & data ) { return data . find ( " \ r \ n \ r \ n " ) ! = std : : string : : npos ; } <nl> <nl> + static std : : string websocketTestParamsToString ( <nl> + const testing : : TestParamInfo < std : : tuple < Network : : Address : : IpVersion , bool > > params ) { <nl> + return absl : : StrCat ( std : : get < 0 > ( params . param ) = = Network : : Address : : IpVersion : : v4 ? " IPv4 " <nl> + : " IPv6 " , <nl> + " _ " , std : : get < 1 > ( params . param ) = = true ? " OldStyle " : " NewStyle " ) ; <nl> + } <nl> + <nl> } / / namespace <nl> <nl> INSTANTIATE_TEST_CASE_P ( IpVersions , WebsocketIntegrationTest , <nl> - testing : : ValuesIn ( TestEnvironment : : getIpVersionsForTest ( ) ) , <nl> - TestUtility : : ipTestParamsToString ) ; <nl> + testing : : Combine ( testing : : ValuesIn ( TestEnvironment : : getIpVersionsForTest ( ) ) , <nl> + testing : : Bool ( ) ) , <nl> + websocketTestParamsToString ) ; <nl> <nl> ConfigHelper : : HttpModifierFunction <nl> - setRouteUsingWebsocket ( const envoy : : api : : v2 : : route : : RouteAction : : WebSocketProxyConfig * ws_config ) { <nl> + setRouteUsingWebsocket ( const envoy : : api : : v2 : : route : : RouteAction : : WebSocketProxyConfig * ws_config , <nl> + bool old_style ) { <nl> + if ( ! old_style ) { <nl> + return [ ] ( envoy : : config : : filter : : network : : http_connection_manager : : v2 : : HttpConnectionManager & <nl> + hcm ) { hcm . add_upgrade_configs ( ) - > set_upgrade_type ( " websocket " ) ; } ; <nl> + } <nl> return <nl> [ ws_config ] ( <nl> envoy : : config : : filter : : network : : http_connection_manager : : v2 : : HttpConnectionManager & hcm ) { <nl> setRouteUsingWebsocket ( const envoy : : api : : v2 : : route : : RouteAction : : WebSocketProxyC <nl> } <nl> <nl> void WebsocketIntegrationTest : : initialize ( ) { <nl> - / / Set a less permissive default route so it does not pick up the / websocket query . <nl> - config_helper_ . setDefaultHostAndRoute ( " * " , " / asd " ) ; <nl> + if ( old_style_websockets_ ) { <nl> + / / Set a less permissive default route so it does not pick up the / websocket query . <nl> + config_helper_ . setDefaultHostAndRoute ( " * " , " / asd " ) ; <nl> + } <nl> HttpIntegrationTest : : initialize ( ) ; <nl> } <nl> <nl> void WebsocketIntegrationTest : : validateInitialUpstreamData ( const std : : string & received_data ) { <nl> - / / The request path gets rewritten from / websocket / test to / websocket . <nl> - / / The size of headers received by the destination is 228 bytes . <nl> - EXPECT_EQ ( received_data . size ( ) , 228 ) ; <nl> + if ( old_style_websockets_ ) { <nl> + / / The request path gets rewritten from / websocket / test to / websocket . <nl> + / / The size of headers received by the destination is 228 bytes . <nl> + EXPECT_EQ ( received_data . size ( ) , 228 ) ; <nl> + } <nl> / / In HTTP1 , the transfer - length is defined by use of the " chunked " transfer - coding , even if <nl> / / content - length header is present . No body websocket upgrade request send to upstream has <nl> / / content - length header and has no transfer - encoding header . <nl> void WebsocketIntegrationTest : : validateInitialUpstreamData ( const std : : string & re <nl> EXPECT_EQ ( received_data . find ( " transfer - encoding : " ) , std : : string : : npos ) ; <nl> } <nl> <nl> - void WebsocketIntegrationTest : : validateInitialDownstreamData ( const std : : string & received_data ) { <nl> - ASSERT_EQ ( received_data , upgrade_resp_str_ ) ; <nl> + void WebsocketIntegrationTest : : validateInitialDownstreamData ( const std : : string & received_data , <nl> + const std : : string & expected_data ) { <nl> + if ( old_style_websockets_ ) { <nl> + ASSERT_EQ ( expected_data , received_data ) ; <nl> + } else { <nl> + / / Strip out the date header since we ' re not going to generate an exact match . <nl> + std : : regex extra_request_headers ( " date : . * \ r \ nserver : envoy \ r \ n " ) ; <nl> + std : : string stripped_data = std : : regex_replace ( received_data , extra_request_headers , " " ) ; <nl> + EXPECT_EQ ( expected_data , stripped_data ) ; <nl> + } <nl> } <nl> <nl> void WebsocketIntegrationTest : : validateFinalDownstreamData ( const std : : string & received_data , <nl> const std : : string & expected_data ) { <nl> - EXPECT_EQ ( received_data , expected_data ) ; <nl> + if ( old_style_websockets_ ) { <nl> + EXPECT_EQ ( received_data , expected_data ) ; <nl> + } else { <nl> + / / Strip out the date header since we ' re not going to generate an exact match . <nl> + std : : regex extra_request_headers ( " date : . * \ r \ nserver : envoy \ r \ n " ) ; <nl> + std : : string stripped_data = std : : regex_replace ( received_data , extra_request_headers , " " ) ; <nl> + EXPECT_EQ ( expected_data , stripped_data ) ; <nl> + } <nl> } <nl> <nl> void WebsocketIntegrationTest : : validateFinalUpstreamData ( const std : : string & received_data , <nl> void WebsocketIntegrationTest : : validateFinalUpstreamData ( const std : : string & rece <nl> } <nl> <nl> TEST_P ( WebsocketIntegrationTest , WebSocketConnectionDownstreamDisconnect ) { <nl> - config_helper_ . addConfigModifier ( setRouteUsingWebsocket ( nullptr ) ) ; <nl> + config_helper_ . addConfigModifier ( setRouteUsingWebsocket ( nullptr , old_style_websockets_ ) ) ; <nl> initialize ( ) ; <nl> <nl> / / WebSocket upgrade , send some data and disconnect downstream <nl> TEST_P ( WebsocketIntegrationTest , WebSocketConnectionDownstreamDisconnect ) { <nl> tcp_client = makeTcpConnection ( lookupPort ( " http " ) ) ; <nl> / / Send websocket upgrade request <nl> tcp_client - > write ( upgrade_req_str_ ) ; <nl> - test_server_ - > waitForCounterGe ( " tcp . websocket . downstream_cx_total " , 1 ) ; <nl> + if ( old_style_websockets_ ) { <nl> + test_server_ - > waitForCounterGe ( " tcp . websocket . downstream_cx_total " , 1 ) ; <nl> + } <nl> fake_upstream_connection = fake_upstreams_ [ 0 ] - > waitForRawConnection ( ) ; <nl> const std : : string data = fake_upstream_connection - > waitForData ( & headersRead ) ; <nl> validateInitialUpstreamData ( data ) ; <nl> TEST_P ( WebsocketIntegrationTest , WebSocketConnectionDownstreamDisconnect ) { <nl> / / Accept websocket upgrade request <nl> fake_upstream_connection - > write ( upgrade_resp_str_ ) ; <nl> tcp_client - > waitForData ( " \ r \ n \ r \ n " , false ) ; <nl> - validateInitialDownstreamData ( tcp_client - > data ( ) ) ; <nl> + validateInitialDownstreamData ( tcp_client - > data ( ) , downstreamRespStr ( ) ) ; <nl> <nl> / / Standard TCP proxy semantics post upgrade <nl> tcp_client - > write ( " hello " ) ; <nl> TEST_P ( WebsocketIntegrationTest , WebSocketConnectionDownstreamDisconnect ) { <nl> <nl> / / downstream disconnect <nl> tcp_client - > close ( ) ; <nl> - fake_upstream_connection - > waitForData ( FakeRawConnection : : waitForInexactMatch ( " bye " ) ) ; <nl> + std : : string final_data = <nl> + fake_upstream_connection - > waitForData ( FakeRawConnection : : waitForInexactMatch ( " bye " ) ) ; <nl> fake_upstream_connection - > waitForDisconnect ( ) ; <nl> <nl> - validateFinalDownstreamData ( tcp_client - > data ( ) , upgrade_resp_str_ + " world " ) ; <nl> + validateFinalDownstreamData ( tcp_client - > data ( ) , downstreamRespStr ( ) + " world " ) ; <nl> + <nl> + if ( old_style_websockets_ ) { <nl> + return ; <nl> + } <nl> + <nl> + const std : : string upstream_payload = " GET / websocket / test HTTP / 1 . 1 \ r \ n " <nl> + " host : host \ r \ n " <nl> + " connection : keep - alive , Upgrade \ r \ n " <nl> + " upgrade : websocket \ r \ n " <nl> + " content - length : 0 \ r \ n " <nl> + " x - forwarded - proto : http \ r \ n " <nl> + " x - envoy - expected - rq - timeout - ms : 15000 \ r \ n \ r \ n " <nl> + " hellobye ! " ; <nl> + validateFinalUpstreamData ( final_data , upstream_payload ) ; <nl> } <nl> <nl> TEST_P ( WebsocketIntegrationTest , WebSocketConnectionUpstreamDisconnect ) { <nl> - config_helper_ . addConfigModifier ( setRouteUsingWebsocket ( nullptr ) ) ; <nl> + config_helper_ . addConfigModifier ( setRouteUsingWebsocket ( nullptr , old_style_websockets_ ) ) ; <nl> initialize ( ) ; <nl> <nl> / / WebSocket upgrade , send some data and disconnect upstream <nl> TEST_P ( WebsocketIntegrationTest , WebSocketConnectionUpstreamDisconnect ) { <nl> / / Accept websocket upgrade request <nl> fake_upstream_connection - > write ( upgrade_resp_str_ ) ; <nl> tcp_client - > waitForData ( " \ r \ n \ r \ n " , false ) ; <nl> - validateInitialDownstreamData ( tcp_client - > data ( ) ) ; <nl> + validateInitialDownstreamData ( tcp_client - > data ( ) , downstreamRespStr ( ) ) ; <nl> <nl> / / Standard TCP proxy semantics post upgrade <nl> tcp_client - > write ( " hello " ) ; <nl> TEST_P ( WebsocketIntegrationTest , WebSocketConnectionUpstreamDisconnect ) { <nl> tcp_client - > waitForDisconnect ( ) ; <nl> ASSERT ( ! fake_upstream_connection - > connected ( ) ) ; <nl> <nl> - validateFinalDownstreamData ( tcp_client - > data ( ) , upgrade_resp_str_ + " world " ) ; <nl> + validateFinalDownstreamData ( tcp_client - > data ( ) , downstreamRespStr ( ) + " world " ) ; <nl> } <nl> <nl> TEST_P ( WebsocketIntegrationTest , EarlyData ) { <nl> - config_helper_ . addConfigModifier ( setRouteUsingWebsocket ( nullptr ) ) ; <nl> + config_helper_ . addConfigModifier ( setRouteUsingWebsocket ( nullptr , old_style_websockets_ ) ) ; <nl> initialize ( ) ; <nl> <nl> / / WebSocket upgrade with early data ( HTTP body ) <nl> TEST_P ( WebsocketIntegrationTest , EarlyData ) { <nl> tcp_client - > waitForData ( early_data_resp_str , false ) ; <nl> tcp_client - > waitForDisconnect ( ) ; <nl> <nl> - validateFinalDownstreamData ( tcp_client - > data ( ) , upgrade_resp_str_ + " world " ) ; <nl> + validateFinalDownstreamData ( tcp_client - > data ( ) , downstreamRespStr ( ) + " world " ) ; <nl> } <nl> <nl> TEST_P ( WebsocketIntegrationTest , WebSocketConnectionIdleTimeout ) { <nl> + if ( ! old_style_websockets_ ) { <nl> + return ; <nl> + } <nl> + <nl> envoy : : api : : v2 : : route : : RouteAction : : WebSocketProxyConfig ws_config ; <nl> ws_config . mutable_idle_timeout ( ) - > set_nanos ( <nl> std : : chrono : : duration_cast < std : : chrono : : nanoseconds > ( std : : chrono : : milliseconds ( 100 ) ) . count ( ) ) ; <nl> * ws_config . mutable_stat_prefix ( ) = " my - stat - prefix " ; <nl> - config_helper_ . addConfigModifier ( setRouteUsingWebsocket ( & ws_config ) ) ; <nl> + config_helper_ . addConfigModifier ( setRouteUsingWebsocket ( & ws_config , old_style_websockets_ ) ) ; <nl> + if ( ! old_style_websockets_ ) { <nl> + config_helper_ . addConfigModifier ( <nl> + [ ] ( envoy : : config : : filter : : network : : http_connection_manager : : v2 : : HttpConnectionManager & <nl> + hcm ) { <nl> + auto * route = hcm . mutable_route_config ( ) - > mutable_virtual_hosts ( 0 ) - > mutable_routes ( 0 ) ; <nl> + auto * route_idle_timeout = route - > mutable_route ( ) - > mutable_timeout ( ) ; <nl> + auto nanos = <nl> + std : : chrono : : duration_cast < std : : chrono : : nanoseconds > ( std : : chrono : : milliseconds ( 100 ) ) <nl> + . count ( ) ; <nl> + route_idle_timeout - > set_nanos ( nanos ) ; <nl> + auto * idle_timeout = hcm . mutable_idle_timeout ( ) ; <nl> + idle_timeout - > set_nanos ( nanos ) ; <nl> + } ) ; <nl> + } <nl> initialize ( ) ; <nl> <nl> / / WebSocket upgrade , send some data and disconnect downstream <nl> TEST_P ( WebsocketIntegrationTest , WebSocketConnectionIdleTimeout ) { <nl> / / Accept websocket upgrade request <nl> fake_upstream_connection - > write ( upgrade_resp_str_ ) ; <nl> tcp_client - > waitForData ( " \ r \ n \ r \ n " , false ) ; <nl> - validateInitialDownstreamData ( tcp_client - > data ( ) ) ; <nl> + validateInitialDownstreamData ( tcp_client - > data ( ) , downstreamRespStr ( ) ) ; <nl> / / Standard TCP proxy semantics post upgrade <nl> tcp_client - > write ( " hello " ) ; <nl> tcp_client - > write ( " hello " ) ; <nl> fake_upstream_connection - > write ( " world " ) ; <nl> tcp_client - > waitForData ( " world " , false ) ; <nl> <nl> - test_server_ - > waitForCounterGe ( " tcp . my - stat - prefix . idle_timeout " , 1 ) ; <nl> + if ( old_style_websockets_ ) { <nl> + test_server_ - > waitForCounterGe ( " tcp . my - stat - prefix . idle_timeout " , 1 ) ; <nl> + } else { <nl> + test_server_ - > waitForCounterGe ( " http . downstream_cx_idle_timeout " , 1 ) ; <nl> + } <nl> tcp_client - > waitForDisconnect ( ) ; <nl> fake_upstream_connection - > waitForDisconnect ( ) ; <nl> } <nl> <nl> TEST_P ( WebsocketIntegrationTest , WebSocketLogging ) { <nl> + if ( ! old_style_websockets_ ) <nl> + return ; <nl> envoy : : api : : v2 : : route : : RouteAction : : WebSocketProxyConfig ws_config ; <nl> ws_config . mutable_idle_timeout ( ) - > set_nanos ( <nl> std : : chrono : : duration_cast < std : : chrono : : nanoseconds > ( std : : chrono : : milliseconds ( 100 ) ) . count ( ) ) ; <nl> * ws_config . mutable_stat_prefix ( ) = " my - stat - prefix " ; <nl> <nl> - config_helper_ . addConfigModifier ( setRouteUsingWebsocket ( & ws_config ) ) ; <nl> - <nl> + config_helper_ . addConfigModifier ( setRouteUsingWebsocket ( & ws_config , old_style_websockets_ ) ) ; <nl> std : : string expected_log_template = " bytes_sent = { 0 } " <nl> " bytes_received = { 1 } " <nl> " downstream_local_address = { 2 } " <nl> TEST_P ( WebsocketIntegrationTest , WebSocketLogging ) { <nl> ip_port_regex , ip_port_regex , ip_port_regex ) ) ) ; <nl> } <nl> <nl> + / / Technically not a websocket tests , but verfies normal upgrades have parity <nl> + / / with websocket upgrades <nl> + TEST_P ( WebsocketIntegrationTest , NonWebsocketUpgrade ) { <nl> + if ( old_style_websockets_ ) { <nl> + return ; <nl> + } <nl> + config_helper_ . addConfigModifier ( <nl> + [ & ] ( envoy : : config : : filter : : network : : http_connection_manager : : v2 : : HttpConnectionManager & hcm ) <nl> + - > void { <nl> + auto * foo_upgrade = hcm . add_upgrade_configs ( ) ; <nl> + foo_upgrade - > set_upgrade_type ( " foo " ) ; <nl> + } ) ; <nl> + <nl> + config_helper_ . addConfigModifier ( setRouteUsingWebsocket ( nullptr , old_style_websockets_ ) ) ; <nl> + initialize ( ) ; <nl> + <nl> + const std : : string upgrade_req_str = " GET / HTTP / 1 . 1 \ r \ nHost : host \ r \ nConnection : " <nl> + " keep - alive , Upgrade \ r \ nUpgrade : foo \ r \ n \ r \ n " ; <nl> + const std : : string upgrade_resp_str = <nl> + " HTTP / 1 . 1 101 Switching Protocols \ r \ nConnection : Upgrade \ r \ nUpgrade : foo \ r \ n \ r \ n " ; <nl> + <nl> + / / Upgrade , send some data and disconnect downstream <nl> + IntegrationTcpClientPtr tcp_client ; <nl> + FakeRawConnectionPtr fake_upstream_connection ; <nl> + <nl> + tcp_client = makeTcpConnection ( lookupPort ( " http " ) ) ; <nl> + / / Send websocket upgrade request <nl> + / / The size of headers received by the destination is 228 bytes . <nl> + tcp_client - > write ( upgrade_req_str ) ; <nl> + if ( old_style_websockets_ ) { <nl> + test_server_ - > waitForCounterGe ( " tcp . websocket . downstream_cx_total " , 1 ) ; <nl> + } <nl> + fake_upstream_connection = fake_upstreams_ [ 0 ] - > waitForRawConnection ( ) ; <nl> + const std : : string data = fake_upstream_connection - > waitForData ( & headersRead ) ; <nl> + validateInitialUpstreamData ( data ) ; <nl> + <nl> + / / Accept websocket upgrade request <nl> + fake_upstream_connection - > write ( upgrade_resp_str ) ; <nl> + tcp_client - > waitForData ( " \ r \ n \ r \ n " , false ) ; <nl> + if ( old_style_websockets_ ) { <nl> + ASSERT_EQ ( tcp_client - > data ( ) , upgrade_resp_str ) ; <nl> + } <nl> + / / Standard TCP proxy semantics post upgrade <nl> + tcp_client - > write ( " hello " ) ; <nl> + <nl> + fake_upstream_connection - > waitForData ( FakeRawConnection : : waitForInexactMatch ( " hello " ) ) ; <nl> + fake_upstream_connection - > write ( " world " ) ; <nl> + tcp_client - > waitForData ( " world " , false ) ; <nl> + tcp_client - > write ( " bye ! " ) ; <nl> + <nl> + / / downstream disconnect <nl> + tcp_client - > close ( ) ; <nl> + std : : string final_data = <nl> + fake_upstream_connection - > waitForData ( FakeRawConnection : : waitForInexactMatch ( " bye " ) ) ; <nl> + fake_upstream_connection - > waitForDisconnect ( ) ; <nl> + <nl> + const std : : string modified_upgrade_resp_str = " HTTP / 1 . 1 101 Switching Protocols \ r \ nconnection : " <nl> + " Upgrade \ r \ nupgrade : foo \ r \ ncontent - length : " <nl> + " 0 \ r \ n \ r \ n " ; <nl> + validateFinalDownstreamData ( tcp_client - > data ( ) , modified_upgrade_resp_str + " world " ) ; <nl> + const std : : string upstream_payload = " GET / HTTP / 1 . 1 \ r \ n " <nl> + " host : host \ r \ n " <nl> + " connection : keep - alive , Upgrade \ r \ n " <nl> + " upgrade : foo \ r \ n " <nl> + " content - length : 0 \ r \ n " <nl> + " x - forwarded - proto : http \ r \ n " <nl> + " x - envoy - expected - rq - timeout - ms : 15000 \ r \ n \ r \ n " <nl> + " hellobye ! " ; <nl> + <nl> + std : : regex extra_response_headers ( " x - request - id : . * \ r \ n " ) ; <nl> + std : : string stripped_data = std : : regex_replace ( final_data , extra_response_headers , " " ) ; <nl> + EXPECT_EQ ( upstream_payload , stripped_data ) ; <nl> + } <nl> + <nl> + TEST_P ( WebsocketIntegrationTest , WebsocketCustomFilterChain ) { <nl> + config_helper_ . addConfigModifier ( setRouteUsingWebsocket ( nullptr , old_style_websockets_ ) ) ; <nl> + if ( old_style_websockets_ ) { <nl> + return ; <nl> + } <nl> + <nl> + / / Add a small buffer filter to the standard HTTP filter chain . Websocket <nl> + / / upgrades will use the HTTP filter chain so will also have small buffers . <nl> + config_helper_ . addFilter ( ConfigHelper : : SMALL_BUFFER_FILTER ) ; <nl> + <nl> + / / Add a second upgrade type which goes directly to the router filter . <nl> + config_helper_ . addConfigModifier ( <nl> + [ & ] ( envoy : : config : : filter : : network : : http_connection_manager : : v2 : : HttpConnectionManager & hcm ) <nl> + - > void { <nl> + auto * foo_upgrade = hcm . add_upgrade_configs ( ) ; <nl> + foo_upgrade - > set_upgrade_type ( " foo " ) ; <nl> + auto * filter_list_back = foo_upgrade - > add_filters ( ) ; <nl> + const std : : string json = <nl> + Json : : Factory : : loadFromYamlString ( " name : envoy . router " ) - > asJsonString ( ) ; <nl> + MessageUtil : : loadFromJson ( json , * filter_list_back ) ; <nl> + } ) ; <nl> + initialize ( ) ; <nl> + <nl> + / / Websocket upgrades are configured to disallow large payload . <nl> + const std : : string early_data_req_str ( 2048 , ' a ' ) ; <nl> + { <nl> + const std : : string upgrade_req_str = <nl> + fmt : : format ( " GET / websocket / test HTTP / 1 . 1 \ r \ nHost : host \ r \ nConnection : " <nl> + " keep - alive , Upgrade \ r \ nUpgrade : websocket \ r \ nContent - Length : { } \ r \ n \ r \ n " , <nl> + early_data_req_str . length ( ) ) ; <nl> + IntegrationTcpClientPtr tcp_client = makeTcpConnection ( lookupPort ( " http " ) ) ; <nl> + tcp_client - > write ( upgrade_req_str + early_data_req_str ) ; <nl> + tcp_client - > waitForData ( " \ r \ n \ r \ n " , false ) ; <nl> + EXPECT_NE ( tcp_client - > data ( ) . find ( " 413 " ) , std : : string : : npos ) ; <nl> + tcp_client - > waitForDisconnect ( true ) ; <nl> + } <nl> + <nl> + / / HTTP requests are configured to disallow large bodies . <nl> + { <nl> + const std : : string upgrade_req_str = fmt : : format ( " GET / HTTP / 1 . 1 \ r \ nHost : host \ r \ nConnection : " <nl> + " keep - alive \ r \ nContent - Length : { } \ r \ n \ r \ n " , <nl> + early_data_req_str . length ( ) ) ; <nl> + IntegrationTcpClientPtr tcp_client = makeTcpConnection ( lookupPort ( " http " ) ) ; <nl> + tcp_client - > write ( upgrade_req_str + early_data_req_str ) ; <nl> + tcp_client - > waitForData ( " \ r \ n \ r \ n " , false ) ; <nl> + EXPECT_NE ( tcp_client - > data ( ) . find ( " 413 " ) , std : : string : : npos ) ; <nl> + tcp_client - > waitForDisconnect ( true ) ; <nl> + } <nl> + <nl> + / / Foo upgrades are configured without the buffer filter , so should explicitly <nl> + / / allow large payload . <nl> + { <nl> + const std : : string upgrade_req_str = <nl> + fmt : : format ( " GET / websocket / test HTTP / 1 . 1 \ r \ nHost : host \ r \ nConnection : " <nl> + " keep - alive , Upgrade \ r \ nUpgrade : foo \ r \ nContent - Length : { } \ r \ n \ r \ n " , <nl> + early_data_req_str . length ( ) ) ; <nl> + IntegrationTcpClientPtr tcp_client = makeTcpConnection ( lookupPort ( " http " ) ) ; <nl> + tcp_client - > write ( upgrade_req_str + early_data_req_str ) ; <nl> + FakeRawConnectionPtr fake_upstream_connection = fake_upstreams_ [ 0 ] - > waitForRawConnection ( ) ; <nl> + ASSERT_TRUE ( fake_upstream_connection ! = nullptr ) ; <nl> + / / Make sure the full payload arrives . <nl> + const std : : string data = fake_upstream_connection - > waitForData ( <nl> + FakeRawConnection : : waitForInexactMatch ( early_data_req_str . c_str ( ) ) ) ; <nl> + / / Tear down all the connections cleanly . <nl> + tcp_client - > close ( ) ; <nl> + fake_upstream_connection - > waitForDisconnect ( ) ; <nl> + } <nl> + } <nl> + <nl> TEST_P ( WebsocketIntegrationTest , BidirectionalChunkedData ) { <nl> - config_helper_ . addConfigModifier ( setRouteUsingWebsocket ( nullptr ) ) ; <nl> + config_helper_ . addConfigModifier ( setRouteUsingWebsocket ( nullptr , old_style_websockets_ ) ) ; <nl> initialize ( ) ; <nl> const std : : string upgrade_req_str = " GET / websocket / test HTTP / 1 . 1 \ r \ nHost : host \ r \ nconnection : " <nl> " keep - alive , Upgrade \ r \ nupgrade : Websocket \ r \ n " <nl> TEST_P ( WebsocketIntegrationTest , BidirectionalChunkedData ) { <nl> IntegrationTcpClientPtr tcp_client = makeTcpConnection ( lookupPort ( " http " ) ) ; <nl> tcp_client - > write ( upgrade_req_str ) ; <nl> FakeRawConnectionPtr fake_upstream_connection = fake_upstreams_ [ 0 ] - > waitForRawConnection ( ) ; <nl> - / / TODO ( alyssawilk ) We should be able to wait for SomeWebSocketPayload but it <nl> - / / is not flushed immediately . <nl> - const std : : string data = <nl> - fake_upstream_connection - > waitForData ( FakeRawConnection : : waitForInexactMatch ( " \ r \ n \ r \ n " ) ) ; <nl> + const std : : string data = fake_upstream_connection - > waitForData ( <nl> + FakeRawConnection : : waitForInexactMatch ( " SomeWebSocketPayload " ) ) ; <nl> <nl> / / Finish the upgrade . <nl> const std : : string upgrade_resp_str = <nl> " HTTP / 1 . 1 101 Switching Protocols \ r \ nconnection : Upgrade \ r \ nupgrade : Websocket \ r \ n " <nl> " transfer - encoding : chunked \ r \ n \ r \ n " <nl> - " 4 \ r \ nabcd \ r \ n0 \ r \ n " <nl> + " 4 \ r \ nabcd \ r \ n0 \ r \ n \ r \ n " <nl> " SomeWebsocketResponsePayload " ; <nl> fake_upstream_connection - > write ( upgrade_resp_str ) ; <nl> - tcp_client - > waitForData ( " Payload " , false ) ; <nl> + tcp_client - > waitForData ( " SomeWebsocketResponsePayload " , false ) ; <nl> <nl> / / Verify bidirectional data still works . <nl> tcp_client - > write ( " FinalClientPayload " ) ; <nl> TEST_P ( WebsocketIntegrationTest , BidirectionalChunkedData ) { <nl> tcp_client - > close ( ) ; <nl> fake_upstream_connection - > waitForDisconnect ( ) ; <nl> <nl> - / / TODO ( alyssawilk ) the current stack is stripping chunked encoding , then <nl> - / / adding back the chunked encoding header without actually chunk encoding . <nl> - / / Data about HTTP vs websocket data boundaries is therefore lost . Fix by <nl> - / / actually chunk encoding . <nl> - const std : : string old_style_modified_payload = " GET / websocket HTTP / 1 . 1 \ r \ n " <nl> - " host : host \ r \ n " <nl> - " connection : keep - alive , Upgrade \ r \ n " <nl> - " upgrade : Websocket \ r \ n " <nl> - " x - forwarded - proto : http \ r \ n " <nl> - " x - envoy - original - path : / websocket / test \ r \ n " <nl> - " transfer - encoding : chunked \ r \ n \ r \ n " <nl> - " 123SomeWebSocketPayloadFinalClientPayload " ; <nl> - validateFinalUpstreamData ( final_data , old_style_modified_payload ) ; <nl> + const std : : string modified_upstream_payload = <nl> + " GET / websocket / test HTTP / 1 . 1 \ r \ n " <nl> + " host : host \ r \ n " <nl> + " connection : keep - alive , Upgrade \ r \ n " <nl> + " upgrade : Websocket \ r \ n " <nl> + " x - forwarded - proto : http \ r \ n " <nl> + " x - envoy - expected - rq - timeout - ms : 15000 \ r \ n " <nl> + " transfer - encoding : chunked \ r \ n \ r \ n " <nl> + " 3 \ r \ n123 \ r \ n0 \ r \ n \ r \ nSomeWebSocketPayloadFinalClientPayload " ; <nl> + const std : : string old_style_modified_payload = <nl> + " GET / websocket HTTP / 1 . 1 \ r \ n " <nl> + " host : host \ r \ n " <nl> + " connection : keep - alive , Upgrade \ r \ n " <nl> + " upgrade : Websocket \ r \ n " <nl> + " x - forwarded - proto : http \ r \ n " <nl> + " x - envoy - original - path : / websocket / test \ r \ n " <nl> + " transfer - encoding : chunked \ r \ n \ r \ n " <nl> + " 3 \ r \ n123 \ r \ n0 \ r \ n \ r \ nSomeWebSocketPayloadFinalClientPayload " ; <nl> + validateFinalUpstreamData ( final_data , old_style_websockets_ ? old_style_modified_payload <nl> + : modified_upstream_payload ) ; <nl> <nl> const std : : string modified_downstream_payload = <nl> " HTTP / 1 . 1 101 Switching Protocols \ r \ nconnection : Upgrade \ r \ nupgrade : Websocket \ r \ n " <nl> " transfer - encoding : chunked \ r \ n \ r \ n " <nl> - " 4 \ r \ nabcd \ r \ n0 \ r \ n " <nl> + " 4 \ r \ nabcd \ r \ n0 \ r \ n \ r \ n " <nl> " SomeWebsocketResponsePayloadFinalServerPayload " ; <nl> validateFinalDownstreamData ( tcp_client - > data ( ) , modified_downstream_payload ) ; <nl> } <nl> mmm a / test / integration / websocket_integration_test . h <nl> ppp b / test / integration / websocket_integration_test . h <nl> <nl> <nl> namespace Envoy { <nl> <nl> - class WebsocketIntegrationTest : public HttpIntegrationTest , <nl> - public testing : : TestWithParam < Network : : Address : : IpVersion > { <nl> + class WebsocketIntegrationTest <nl> + : public HttpIntegrationTest , <nl> + public testing : : TestWithParam < std : : tuple < Network : : Address : : IpVersion , bool > > { <nl> public : <nl> void initialize ( ) override ; <nl> - WebsocketIntegrationTest ( ) : HttpIntegrationTest ( Http : : CodecClient : : Type : : HTTP1 , GetParam ( ) ) { } <nl> + WebsocketIntegrationTest ( ) <nl> + : HttpIntegrationTest ( Http : : CodecClient : : Type : : HTTP1 , std : : get < 0 > ( GetParam ( ) ) ) { } <nl> + bool old_style_websockets_ { std : : get < 1 > ( GetParam ( ) ) } ; <nl> <nl> protected : <nl> void validateInitialUpstreamData ( const std : : string & received_data ) ; <nl> - void validateInitialDownstreamData ( const std : : string & received_data ) ; <nl> + void validateInitialDownstreamData ( const std : : string & received_data , <nl> + const std : : string & expected_data ) ; <nl> void validateFinalDownstreamData ( const std : : string & received_data , <nl> const std : : string & expected_data ) ; <nl> void validateFinalUpstreamData ( const std : : string & received_data , <nl> const std : : string & expected_data ) ; <nl> <nl> + const std : : string & downstreamRespStr ( ) { <nl> + return old_style_websockets_ ? upgrade_resp_str_ : modified_upgrade_resp_str_ ; <nl> + } <nl> + <nl> const std : : string upgrade_req_str_ = " GET / websocket / test HTTP / 1 . 1 \ r \ nHost : host \ r \ nConnection : " <nl> " keep - alive , Upgrade \ r \ nUpgrade : websocket \ r \ n \ r \ n " ; <nl> const std : : string upgrade_resp_str_ = <nl> " HTTP / 1 . 1 101 Switching Protocols \ r \ nConnection : Upgrade \ r \ nUpgrade : websocket \ r \ n \ r \ n " ; <nl> + <nl> + const std : : string modified_upgrade_resp_str_ = " HTTP / 1 . 1 101 Switching Protocols \ r \ nconnection : " <nl> + " Upgrade \ r \ nupgrade : websocket \ r \ ncontent - length : " <nl> + " 0 \ r \ n \ r \ n " ; <nl> } ; <nl> <nl> } / / namespace Envoy <nl>
http : new style WebSockets , where headers and data are processed by the filter chain . ( )
envoyproxy/envoy
95c3e1343de707edee58defbec03ba87c9e969de
2018-07-12T16:28:32Z
mmm a / buildscripts / resmokeconfig / suites / replica_sets_initsync_jscore_passthrough . yml <nl> ppp b / buildscripts / resmokeconfig / suites / replica_sets_initsync_jscore_passthrough . yml <nl> selector : <nl> - jstests / core / txns / no_writes_to_config_transactions_with_prepared_transaction . js <nl> - jstests / core / txns / prepare_conflict . js <nl> - jstests / core / txns / prepare_prepared_transaction . js <nl> + - jstests / core / txns / prepared_transactions_do_not_block_non_conflicting_ddl . js <nl> - jstests / core / txns / statement_ids_accepted . js <nl> # TODO ( SERVER - 35865 ) : Unblacklist when we also correctly write and apply ' commitTransaction ' <nl> # oplog entries , besides SERVER - 36492 . <nl> mmm a / buildscripts / resmokeconfig / suites / replica_sets_initsync_static_jscore_passthrough . yml <nl> ppp b / buildscripts / resmokeconfig / suites / replica_sets_initsync_static_jscore_passthrough . yml <nl> selector : <nl> - jstests / core / txns / no_writes_to_config_transactions_with_prepared_transaction . js <nl> - jstests / core / txns / prepare_conflict . js <nl> - jstests / core / txns / prepare_prepared_transaction . js <nl> + - jstests / core / txns / prepared_transactions_do_not_block_non_conflicting_ddl . js <nl> - jstests / core / txns / statement_ids_accepted . js <nl> # TODO ( SERVER - 35865 ) : Unblacklist when we also correctly write and apply ' commitTransaction ' <nl> # oplog entries , besides SERVER - 36492 . <nl> new file mode 100644 <nl> index 000000000000 . . a5bf806987d1 <nl> mmm / dev / null <nl> ppp b / jstests / core / txns / prepared_transactions_do_not_block_non_conflicting_ddl . js <nl> <nl> + / / Test that prepared transactions don ' t block DDL operations on the non - conflicting collections . <nl> + / / @ tags : [ uses_transactions , uses_prepare_transaction ] <nl> + ( function ( ) { <nl> + " use strict " ; <nl> + <nl> + load ( " jstests / core / txns / libs / prepare_helpers . js " ) ; <nl> + const dbName = " prepared_transactions_do_not_block_non_conflicting_ddl " ; <nl> + const collName = " transactions_collection " ; <nl> + const otherDBName = " prepared_transactions_do_not_block_non_conflicting_ddl_other " ; <nl> + const otherCollName = " transactions_collection_other " ; <nl> + const testDB = db . getSiblingDB ( dbName ) ; <nl> + const otherDB = db . getSiblingDB ( otherDBName ) ; <nl> + <nl> + const session = testDB . getMongo ( ) . startSession ( { causalConsistency : false } ) ; <nl> + const sessionDB = session . getDatabase ( dbName ) ; <nl> + const sessionColl = sessionDB [ collName ] ; <nl> + <nl> + / / Setup . <nl> + testDB . dropDatabase ( ) ; <nl> + otherDB . dropDatabase ( ) ; <nl> + assert . commandWorked ( sessionColl . insert ( { _id : 1 , x : 0 } ) ) ; <nl> + <nl> + / * * <nl> + * Tests that DDL operations on non - conflicting namespaces don ' t block on transactions . <nl> + * / <nl> + function testSuccess ( cmdDBName , ddlCmd ) { <nl> + session . startTransaction ( ) ; <nl> + assert . commandWorked ( sessionColl . update ( { _id : 1 } , { $ inc : { x : 1 } } ) ) ; <nl> + const prepareTimestamp = PrepareHelpers . prepareTransaction ( session ) ; <nl> + assert . commandWorked ( testDB . getSiblingDB ( cmdDBName ) . runCommand ( ddlCmd ) ) ; <nl> + assert . commandWorked ( <nl> + PrepareHelpers . commitTransactionAfterPrepareTS ( session , prepareTimestamp ) ) ; <nl> + } <nl> + <nl> + jsTest . log ( " Test ' create ' . " ) ; <nl> + const createCmd = { create : collName } ; <nl> + testSuccess ( otherDBName , createCmd ) ; <nl> + <nl> + jsTest . log ( " Test ' createIndexes ' . " ) ; <nl> + const createIndexesCmd = { createIndexes : collName , indexes : [ { key : { x : 1 } , name : " x_1 " } ] } ; <nl> + testSuccess ( otherDBName , createIndexesCmd ) ; <nl> + <nl> + jsTest . log ( " Test ' dropIndexes ' . " ) ; <nl> + const dropIndexesCmd = { dropIndexes : collName , index : " x_1 " } ; <nl> + testSuccess ( otherDBName , dropIndexesCmd ) ; <nl> + <nl> + sessionColl . createIndex ( { multiKeyField : 1 } ) ; <nl> + jsTest . log ( " Test ' insert ' that enables multi - key index on the same collection . " ) ; <nl> + const insertAndSetMultiKeyCmd = { insert : collName , documents : [ { multiKeyField : [ 1 , 2 ] } ] } ; <nl> + testSuccess ( dbName , insertAndSetMultiKeyCmd ) ; <nl> + <nl> + jsTest . log ( " Test ' drop ' . " ) ; <nl> + const dropCmd = { drop : collName } ; <nl> + testSuccess ( otherDBName , dropCmd ) ; <nl> + <nl> + jsTest . log ( " Test ' renameCollection ' . " ) ; <nl> + assert . commandWorked ( otherDB . getCollection ( collName ) . insert ( { x : " doc - for - rename - collection " } ) ) ; <nl> + otherDB . runCommand ( { drop : otherCollName } ) ; <nl> + const renameCollectionCmd = { <nl> + renameCollection : otherDBName + " . " + collName , <nl> + to : otherDBName + " . " + otherCollName <nl> + } ; <nl> + testSuccess ( " admin " , renameCollectionCmd ) ; <nl> + <nl> + session . endSession ( ) ; <nl> + } ( ) ) ; <nl> mmm a / src / mongo / db / concurrency / lock_manager . cpp <nl> ppp b / src / mongo / db / concurrency / lock_manager . cpp <nl> void LockRequest : : initNew ( Locker * locker , LockGrantNotification * notify ) { <nl> partitioned = false ; <nl> mode = MODE_NONE ; <nl> convertMode = MODE_NONE ; <nl> + unlockPending = 0 ; <nl> } <nl> <nl> <nl> mmm a / src / mongo / db / concurrency / lock_state . cpp <nl> ppp b / src / mongo / db / concurrency / lock_state . cpp <nl> void LockerImpl : : endWriteUnitOfWork ( ) { <nl> } <nl> } <nl> <nl> + bool LockerImpl : : releaseWriteUnitOfWork ( LockSnapshot * stateOut ) { <nl> + / / Only the global WUOW can be released . <nl> + invariant ( _wuowNestingLevel = = 1 ) ; <nl> + - - _wuowNestingLevel ; <nl> + invariant ( ! isGlobalLockedRecursively ( ) ) ; <nl> + <nl> + / / All locks should be pending to unlock . <nl> + invariant ( _requests . size ( ) = = _numResourcesToUnlockAtEndUnitOfWork ) ; <nl> + for ( auto it = _requests . begin ( ) ; it ; it . next ( ) ) { <nl> + / / No converted lock so we don ' t need to unlock more than once . <nl> + invariant ( it - > unlockPending = = 1 ) ; <nl> + } <nl> + _numResourcesToUnlockAtEndUnitOfWork = 0 ; <nl> + <nl> + return saveLockStateAndUnlock ( stateOut ) ; <nl> + } <nl> + <nl> + void LockerImpl : : restoreWriteUnitOfWork ( OperationContext * opCtx , <nl> + const LockSnapshot & stateToRestore ) { <nl> + if ( stateToRestore . globalMode ! = MODE_NONE ) { <nl> + restoreLockState ( opCtx , stateToRestore ) ; <nl> + } <nl> + <nl> + invariant ( _numResourcesToUnlockAtEndUnitOfWork = = 0 ) ; <nl> + for ( auto it = _requests . begin ( ) ; it ; it . next ( ) ) { <nl> + invariant ( _shouldDelayUnlock ( it . key ( ) , ( it - > mode ) ) ) ; <nl> + invariant ( it - > unlockPending = = 0 ) ; <nl> + it - > unlockPending + + ; <nl> + } <nl> + _numResourcesToUnlockAtEndUnitOfWork = static_cast < unsigned > ( _requests . size ( ) ) ; <nl> + <nl> + beginWriteUnitOfWork ( ) ; <nl> + } <nl> + <nl> LockResult LockerImpl : : lock ( OperationContext * opCtx , <nl> ResourceId resId , <nl> LockMode mode , <nl> mmm a / src / mongo / db / concurrency / lock_state . h <nl> ppp b / src / mongo / db / concurrency / lock_state . h <nl> class LockerImpl : public Locker { <nl> virtual LockResult lockRSTLBegin ( OperationContext * opCtx ) ; <nl> virtual LockResult lockRSTLComplete ( OperationContext * opCtx , Date_t deadline ) ; <nl> <nl> - virtual void beginWriteUnitOfWork ( ) ; <nl> - virtual void endWriteUnitOfWork ( ) ; <nl> + virtual void beginWriteUnitOfWork ( ) override ; <nl> + virtual void endWriteUnitOfWork ( ) override ; <nl> <nl> virtual bool inAWriteUnitOfWork ( ) const { <nl> return _wuowNestingLevel > 0 ; <nl> class LockerImpl : public Locker { <nl> restoreLockState ( nullptr , stateToRestore ) ; <nl> } <nl> <nl> + bool releaseWriteUnitOfWork ( LockSnapshot * stateOut ) override ; <nl> + void restoreWriteUnitOfWork ( OperationContext * opCtx , <nl> + const LockSnapshot & stateToRestore ) override ; <nl> + <nl> virtual void releaseTicket ( ) ; <nl> virtual void reacquireTicket ( OperationContext * opCtx ) ; <nl> <nl> mmm a / src / mongo / db / concurrency / lock_state_test . cpp <nl> ppp b / src / mongo / db / concurrency / lock_state_test . cpp <nl> TEST ( LockerImpl , saveAndRestoreDBAndCollection ) { <nl> ASSERT ( locker . unlockGlobal ( ) ) ; <nl> } <nl> <nl> + TEST ( LockerImpl , releaseWriteUnitOfWork ) { <nl> + Locker : : LockSnapshot lockInfo ; <nl> + <nl> + LockerImpl locker ; <nl> + <nl> + const ResourceId resIdDatabase ( RESOURCE_DATABASE , " TestDB " _sd ) ; <nl> + const ResourceId resIdCollection ( RESOURCE_COLLECTION , " TestDB . collection " _sd ) ; <nl> + <nl> + locker . beginWriteUnitOfWork ( ) ; <nl> + / / Lock some stuff . <nl> + locker . lockGlobal ( MODE_IX ) ; <nl> + ASSERT_EQUALS ( LOCK_OK , locker . lock ( resIdDatabase , MODE_IX ) ) ; <nl> + ASSERT_EQUALS ( LOCK_OK , locker . lock ( resIdCollection , MODE_X ) ) ; <nl> + / / Unlock them so that they will be pending to unlock . <nl> + ASSERT_FALSE ( locker . unlock ( resIdCollection ) ) ; <nl> + ASSERT_FALSE ( locker . unlock ( resIdDatabase ) ) ; <nl> + ASSERT_FALSE ( locker . unlockGlobal ( ) ) ; <nl> + <nl> + ASSERT ( locker . releaseWriteUnitOfWork ( & lockInfo ) ) ; <nl> + <nl> + / / Things shouldn ' t be locked anymore . <nl> + ASSERT_EQUALS ( MODE_NONE , locker . getLockMode ( resIdDatabase ) ) ; <nl> + ASSERT_EQUALS ( MODE_NONE , locker . getLockMode ( resIdCollection ) ) ; <nl> + ASSERT_FALSE ( locker . isLocked ( ) ) ; <nl> + <nl> + / / Destructor should succeed since the locker ' s state should be empty . <nl> + } <nl> + <nl> + TEST ( LockerImpl , restoreWriteUnitOfWork ) { <nl> + Locker : : LockSnapshot lockInfo ; <nl> + <nl> + LockerImpl locker ; <nl> + <nl> + const ResourceId resIdDatabase ( RESOURCE_DATABASE , " TestDB " _sd ) ; <nl> + const ResourceId resIdCollection ( RESOURCE_COLLECTION , " TestDB . collection " _sd ) ; <nl> + <nl> + locker . beginWriteUnitOfWork ( ) ; <nl> + / / Lock some stuff . <nl> + locker . lockGlobal ( MODE_IX ) ; <nl> + ASSERT_EQUALS ( LOCK_OK , locker . lock ( resIdDatabase , MODE_IX ) ) ; <nl> + ASSERT_EQUALS ( LOCK_OK , locker . lock ( resIdCollection , MODE_X ) ) ; <nl> + / / Unlock them so that they will be pending to unlock . <nl> + ASSERT_FALSE ( locker . unlock ( resIdCollection ) ) ; <nl> + ASSERT_FALSE ( locker . unlock ( resIdDatabase ) ) ; <nl> + ASSERT_FALSE ( locker . unlockGlobal ( ) ) ; <nl> + <nl> + ASSERT ( locker . releaseWriteUnitOfWork ( & lockInfo ) ) ; <nl> + <nl> + / / Things shouldn ' t be locked anymore . <nl> + ASSERT_EQUALS ( MODE_NONE , locker . getLockMode ( resIdDatabase ) ) ; <nl> + ASSERT_EQUALS ( MODE_NONE , locker . getLockMode ( resIdCollection ) ) ; <nl> + ASSERT_FALSE ( locker . isLocked ( ) ) ; <nl> + <nl> + / / Restore lock state . <nl> + locker . restoreWriteUnitOfWork ( nullptr , lockInfo ) ; <nl> + <nl> + / / Make sure things were re - locked . <nl> + ASSERT_EQUALS ( MODE_IX , locker . getLockMode ( resIdDatabase ) ) ; <nl> + ASSERT_EQUALS ( MODE_X , locker . getLockMode ( resIdCollection ) ) ; <nl> + ASSERT ( locker . isLocked ( ) ) ; <nl> + <nl> + locker . endWriteUnitOfWork ( ) ; <nl> + <nl> + ASSERT_EQUALS ( MODE_NONE , locker . getLockMode ( resIdDatabase ) ) ; <nl> + ASSERT_EQUALS ( MODE_NONE , locker . getLockMode ( resIdCollection ) ) ; <nl> + ASSERT_FALSE ( locker . isLocked ( ) ) ; <nl> + } <nl> + <nl> + TEST ( LockerImpl , releaseAndRestoreReadOnlyWriteUnitOfWork ) { <nl> + Locker : : LockSnapshot lockInfo ; <nl> + <nl> + LockerImpl locker ; <nl> + <nl> + const ResourceId resIdDatabase ( RESOURCE_DATABASE , " TestDB " _sd ) ; <nl> + const ResourceId resIdCollection ( RESOURCE_COLLECTION , " TestDB . collection " _sd ) ; <nl> + <nl> + / / Snapshot transactions delay shared locks as well . <nl> + locker . setSharedLocksShouldTwoPhaseLock ( true ) ; <nl> + <nl> + locker . beginWriteUnitOfWork ( ) ; <nl> + / / Lock some stuff in IS mode . <nl> + locker . lockGlobal ( MODE_IS ) ; <nl> + ASSERT_EQUALS ( LOCK_OK , locker . lock ( resIdDatabase , MODE_IS ) ) ; <nl> + ASSERT_EQUALS ( LOCK_OK , locker . lock ( resIdCollection , MODE_IS ) ) ; <nl> + / / Unlock them . <nl> + ASSERT_FALSE ( locker . unlock ( resIdCollection ) ) ; <nl> + ASSERT_FALSE ( locker . unlock ( resIdDatabase ) ) ; <nl> + ASSERT_FALSE ( locker . unlockGlobal ( ) ) ; <nl> + ASSERT_EQ ( 3u , locker . numResourcesToUnlockAtEndUnitOfWorkForTest ( ) ) ; <nl> + <nl> + / / Things shouldn ' t be locked anymore . <nl> + ASSERT_TRUE ( locker . releaseWriteUnitOfWork ( & lockInfo ) ) ; <nl> + <nl> + ASSERT_EQUALS ( MODE_NONE , locker . getLockMode ( resIdDatabase ) ) ; <nl> + ASSERT_EQUALS ( MODE_NONE , locker . getLockMode ( resIdCollection ) ) ; <nl> + ASSERT_FALSE ( locker . isLocked ( ) ) ; <nl> + <nl> + / / Restore lock state . <nl> + locker . restoreWriteUnitOfWork ( nullptr , lockInfo ) ; <nl> + <nl> + ASSERT_EQUALS ( MODE_IS , locker . getLockMode ( resIdDatabase ) ) ; <nl> + ASSERT_EQUALS ( MODE_IS , locker . getLockMode ( resIdCollection ) ) ; <nl> + ASSERT_TRUE ( locker . isLocked ( ) ) ; <nl> + <nl> + locker . endWriteUnitOfWork ( ) ; <nl> + <nl> + ASSERT_EQUALS ( MODE_NONE , locker . getLockMode ( resIdDatabase ) ) ; <nl> + ASSERT_EQUALS ( MODE_NONE , locker . getLockMode ( resIdCollection ) ) ; <nl> + ASSERT_FALSE ( locker . isLocked ( ) ) ; <nl> + } <nl> + <nl> + TEST ( LockerImpl , releaseAndRestoreEmptyWriteUnitOfWork ) { <nl> + Locker : : LockSnapshot lockInfo ; <nl> + LockerImpl locker ; <nl> + <nl> + / / Snapshot transactions delay shared locks as well . <nl> + locker . setSharedLocksShouldTwoPhaseLock ( true ) ; <nl> + <nl> + locker . beginWriteUnitOfWork ( ) ; <nl> + <nl> + / / Nothing to yield . <nl> + ASSERT_FALSE ( locker . releaseWriteUnitOfWork ( & lockInfo ) ) ; <nl> + ASSERT_FALSE ( locker . isLocked ( ) ) ; <nl> + <nl> + / / Restore lock state . <nl> + locker . restoreWriteUnitOfWork ( nullptr , lockInfo ) ; <nl> + ASSERT_FALSE ( locker . isLocked ( ) ) ; <nl> + <nl> + locker . endWriteUnitOfWork ( ) ; <nl> + ASSERT_FALSE ( locker . isLocked ( ) ) ; <nl> + } <nl> + <nl> TEST ( LockerImpl , DefaultLocker ) { <nl> const ResourceId resId ( RESOURCE_DATABASE , " TestDB " _sd ) ; <nl> <nl> mmm a / src / mongo / db / concurrency / locker . h <nl> ppp b / src / mongo / db / concurrency / locker . h <nl> class Locker { <nl> <nl> / * * <nl> * beginWriteUnitOfWork / endWriteUnitOfWork are called at the start and end of WriteUnitOfWorks . <nl> - * They can be used to implement two - phase locking . Each call to begin should be matched with an <nl> - * eventual call to end . <nl> + * They can be used to implement two - phase locking . Each call to begin or restore should be <nl> + * matched with an eventual call to end or release . <nl> * <nl> * endWriteUnitOfWork , if not called in a nested WUOW , will release all two - phase locking held <nl> * lock resources . <nl> class Locker { <nl> virtual void restoreLockState ( OperationContext * opCtx , const LockSnapshot & stateToRestore ) = 0 ; <nl> virtual void restoreLockState ( const LockSnapshot & stateToRestore ) = 0 ; <nl> <nl> + / * * <nl> + * releaseWriteUnitOfWork opts out two - phase locking and yield the locks after a WUOW <nl> + * has been released . restoreWriteUnitOfWork reaquires the locks and resume the two - phase <nl> + * locking behavior of WUOW . <nl> + * / <nl> + virtual bool releaseWriteUnitOfWork ( LockSnapshot * stateOut ) = 0 ; <nl> + virtual void restoreWriteUnitOfWork ( OperationContext * opCtx , <nl> + const LockSnapshot & stateToRestore ) = 0 ; <nl> + <nl> / * * <nl> * Releases the ticket associated with the Locker . This allows locks to be held without <nl> * contributing to reader / writer throttling . <nl> mmm a / src / mongo / db / concurrency / locker_noop . h <nl> ppp b / src / mongo / db / concurrency / locker_noop . h <nl> class LockerNoop : public Locker { <nl> MONGO_UNREACHABLE ; <nl> } <nl> <nl> - virtual void beginWriteUnitOfWork ( ) { } <nl> + virtual void beginWriteUnitOfWork ( ) override { } <nl> <nl> - virtual void endWriteUnitOfWork ( ) { } <nl> + virtual void endWriteUnitOfWork ( ) override { } <nl> <nl> virtual bool inAWriteUnitOfWork ( ) const { <nl> return false ; <nl> class LockerNoop : public Locker { <nl> MONGO_UNREACHABLE ; <nl> } <nl> <nl> + bool releaseWriteUnitOfWork ( LockSnapshot * stateOut ) override { <nl> + MONGO_UNREACHABLE ; <nl> + } <nl> + <nl> + void restoreWriteUnitOfWork ( OperationContext * opCtx , <nl> + const LockSnapshot & stateToRestore ) override { <nl> + MONGO_UNREACHABLE ; <nl> + } ; <nl> + <nl> virtual void releaseTicket ( ) { <nl> MONGO_UNREACHABLE ; <nl> } <nl> mmm a / src / mongo / db / transaction_participant . cpp <nl> ppp b / src / mongo / db / transaction_participant . cpp <nl> TransactionParticipant : : TxnResources : : TxnResources ( OperationContext * opCtx , bool <nl> } <nl> _locker - > unsetThreadId ( ) ; <nl> <nl> + / / On secondaries , we yield the locks for transactions . <nl> + if ( ! opCtx - > writesAreReplicated ( ) ) { <nl> + _lockSnapshot = std : : make_unique < Locker : : LockSnapshot > ( ) ; <nl> + _locker - > releaseWriteUnitOfWork ( _lockSnapshot . get ( ) ) ; <nl> + } <nl> + <nl> / / This thread must still respect the transaction lock timeout , since it can prevent the <nl> / / transaction from making progress . <nl> auto maxTransactionLockMillis = maxTransactionLockRequestTimeoutMillis . load ( ) ; <nl> TransactionParticipant : : TxnResources : : ~ TxnResources ( ) { <nl> / / when starting a new transaction before completing an old one . So we should <nl> / / be at WUOW nesting level 1 ( only the top level WriteUnitOfWork ) . <nl> _recoveryUnit - > abortUnitOfWork ( ) ; <nl> - _locker - > endWriteUnitOfWork ( ) ; <nl> + / / If locks are not yielded , release them . <nl> + if ( ! _lockSnapshot ) { <nl> + _locker - > endWriteUnitOfWork ( ) ; <nl> + } <nl> invariant ( ! _locker - > inAWriteUnitOfWork ( ) ) ; <nl> } <nl> } <nl> <nl> void TransactionParticipant : : TxnResources : : release ( OperationContext * opCtx ) { <nl> / / Perform operations that can fail the release before marking the TxnResources as released . <nl> + <nl> + / / Restore locks if they are yielded . <nl> + if ( _lockSnapshot ) { <nl> + invariant ( ! _locker - > isLocked ( ) ) ; <nl> + / / opCtx is passed in to enable the restoration to be interrupted . <nl> + _locker - > restoreWriteUnitOfWork ( opCtx , * _lockSnapshot ) ; <nl> + _lockSnapshot . reset ( nullptr ) ; <nl> + } <nl> _locker - > reacquireTicket ( opCtx ) ; <nl> <nl> invariant ( ! _released ) ; <nl> _released = true ; <nl> <nl> - / / We intentionally do not capture the return value of swapLockState ( ) , which is just an empty <nl> - / / locker . At the end of the operation , if the transaction is not complete , we will stash the <nl> - / / operation context ' s locker and replace it with a new empty locker . <nl> - <nl> / / It is necessary to lock the client to change the Locker on the OperationContext . <nl> stdx : : lock_guard < Client > lk ( * opCtx - > getClient ( ) ) ; <nl> invariant ( opCtx - > lockState ( ) - > getClientState ( ) = = Locker : : ClientState : : kInactive ) ; <nl> + / / We intentionally do not capture the return value of swapLockState ( ) , which is just an empty <nl> + / / locker . At the end of the operation , if the transaction is not complete , we will stash the <nl> + / / operation context ' s locker and replace it with a new empty locker . <nl> opCtx - > swapLockState ( std : : move ( _locker ) ) ; <nl> opCtx - > lockState ( ) - > updateThreadIdToCurrentThread ( ) ; <nl> <nl> void TransactionParticipant : : unstashTransactionResources ( OperationContext * opCtx <nl> : SpeculativeTransactionOpTime : : kLastApplied ) ; <nl> } <nl> <nl> + / / All locks of transactions must be acquired inside the global WUOW so that we can <nl> + / / yield and restore all locks on state transition . Otherwise , we ' d have to remember <nl> + / / which locks are managed by WUOW . <nl> + invariant ( ! opCtx - > lockState ( ) - > isLocked ( ) ) ; <nl> + <nl> / / Stashed transaction resources do not exist for this in - progress multi - document <nl> / / transaction . Set up the transaction resources on the opCtx . <nl> opCtx - > setWriteUnitOfWork ( std : : make_unique < WriteUnitOfWork > ( opCtx ) ) ; <nl> mmm a / src / mongo / db / transaction_participant . h <nl> ppp b / src / mongo / db / transaction_participant . h <nl> class TransactionParticipant { <nl> private : <nl> bool _released = false ; <nl> std : : unique_ptr < Locker > _locker ; <nl> + std : : unique_ptr < Locker : : LockSnapshot > _lockSnapshot ; <nl> std : : unique_ptr < RecoveryUnit > _recoveryUnit ; <nl> repl : : ReadConcernArgs _readConcernArgs ; <nl> WriteUnitOfWork : : RecoveryUnitState _ruState ; <nl>
SERVER - 37199 Yield locks of transactions in secondary application .
mongodb/mongo
55e72b015e2aa7297c00db29e4d93451ea61a7ca
2018-12-03T23:21:37Z
mmm a / src / mongo / tools / stat . cpp <nl> ppp b / src / mongo / tools / stat . cpp <nl> namespace mongo { <nl> out < < " vsize \ t - virtual size of process in megabytes \ n " ; <nl> out < < " res \ t - resident size of process in megabytes \ n " ; <nl> out < < " faults \ t - # of pages faults per sec \ n " ; <nl> - out < < " locked \ t - percent of time in global write lock \ n " ; <nl> + out < < " locked \ t - locked db and lock time for db with most lock use \ n " ; <nl> out < < " idx miss \ t - percent of btree page misses ( sampled ) \ n " ; <nl> out < < " qr | qw \ t - queue lengths for clients waiting ( read | write ) \ n " ; <nl> out < < " ar | aw \ t - active clients ( read | write ) \ n " ; <nl> namespace mongo { <nl> if ( ! conn . connect ( state - > host , errmsg ) ) <nl> state - > error = errmsg ; <nl> long long cycleNumber = 0 ; <nl> - <nl> + <nl> if ( ! ( state - > username . empty ( ) & & state - > password . empty ( ) ) ) <nl> conn . auth ( " admin " , state - > username , state - > password , errmsg ) ; <nl> <nl> namespace mongo { <nl> <nl> int runMany ( ) { <nl> StateMap threads ; <nl> - <nl> + <nl> { <nl> string orig = getParam ( " host " ) ; <nl> bool showPorts = false ; <nl> int main ( int argc , char * * argv ) { <nl> mongo : : Stat stat ; <nl> return stat . main ( argc , argv ) ; <nl> } <nl> - <nl>
SERVER - 4628 amending mongostat help text with new field
mongodb/mongo
a8901ef1ddffcfd9dccc9be96aacd927f81052dc
2012-07-05T18:56:03Z
mmm a / docs / configuration / listeners / runtime . rst <nl> ppp b / docs / configuration / listeners / runtime . rst <nl> <nl> Runtime <nl> = = = = = = = <nl> <nl> - Listeners support the followiung runtime settings : <nl> + Listeners support the following runtime settings : <nl> <nl> ssl . alt_alpn <nl> What % of requests use the configured : ref : ` alt_alpn < config_listener_ssl_context_alt_alpn > ` <nl> mmm a / docs / intro / deployment_types / double_proxy . rst <nl> ppp b / docs / intro / deployment_types / double_proxy . rst <nl> connections running in the main data center . <nl> In the above diagram , the front Envoy proxy running in region 1 authenticates itself with the front <nl> Envoy proxy running in region 2 via TLS mutual authentication and pinned certificates . This allows <nl> the front Envoy instances running in region 2 to trust elements of the incoming requests that <nl> - ordinarily would not be trustable ( such as the x - forwaded - for HTTP header ) . <nl> + ordinarily would not be trustable ( such as the x - forwarded - for HTTP header ) . <nl> <nl> Configuration template <nl> ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ <nl>
docs : spelling fix ( )
envoyproxy/envoy
5da46198fc1c9b9ce3d6a5fe7e132cda3fdafe25
2017-07-06T22:49:45Z
mmm a / src / python / grpcio / grpc / _cython / _cygrpc / channel . pyx . pxi <nl> ppp b / src / python / grpcio / grpc / _cython / _cygrpc / channel . pyx . pxi <nl> cdef class Channel : <nl> method , host , Timespec deadline not None ) : <nl> if queue . is_shutting_down : <nl> raise ValueError ( " queue must not be shutting down or shutdown " ) <nl> - cdef char * method_c_string = method <nl> - cdef char * host_c_string = NULL <nl> + cdef Slice method_slice = Slice . from_bytes ( method ) <nl> + cdef Slice host_slice <nl> + cdef grpc_slice * host_c_slice = NULL <nl> if host is not None : <nl> - host_c_string = host <nl> + host_slice = Slice . from_bytes ( host ) <nl> + host_c_slice = & host_slice . c_slice <nl> + else : <nl> + host_slice = Slice ( ) <nl> cdef Call operation_call = Call ( ) <nl> - operation_call . references = [ self , method , host , queue ] <nl> + operation_call . references = [ self , method_slice , host_slice , queue ] <nl> cdef grpc_call * parent_call = NULL <nl> if parent is not None : <nl> parent_call = parent . c_call <nl> with nogil : <nl> operation_call . c_call = grpc_channel_create_call ( <nl> self . c_channel , parent_call , flags , <nl> - queue . c_completion_queue , method_c_string , host_c_string , <nl> + queue . c_completion_queue , method_slice . c_slice , host_c_slice , <nl> deadline . c_time , NULL ) <nl> return operation_call <nl> <nl> mmm a / src / python / grpcio / grpc / _cython / _cygrpc / grpc . pxi <nl> ppp b / src / python / grpcio / grpc / _cython / _cygrpc / grpc . pxi <nl> cdef extern from " grpc / byte_buffer_reader . h " : <nl> pass <nl> <nl> <nl> + cdef extern from " grpc / impl / codegen / exec_ctx_fwd . h " : <nl> + <nl> + struct grpc_exec_ctx : <nl> + # We don ' t care about the internals <nl> + pass <nl> + <nl> + <nl> cdef extern from " grpc / grpc . h " : <nl> <nl> ctypedef struct grpc_slice : <nl> cdef extern from " grpc / grpc . h " : <nl> <nl> grpc_slice grpc_slice_ref ( grpc_slice s ) nogil <nl> void grpc_slice_unref ( grpc_slice s ) nogil <nl> + grpc_slice grpc_empty_slice ( ) nogil <nl> grpc_slice grpc_slice_new ( void * p , size_t len , void ( * destroy ) ( void * ) ) nogil <nl> grpc_slice grpc_slice_new_with_len ( <nl> void * p , size_t len , void ( * destroy ) ( void * , size_t ) ) nogil <nl> cdef extern from " grpc / grpc . h " : <nl> <nl> ctypedef struct grpc_arg_pointer_vtable : <nl> void * ( * copy ) ( void * ) <nl> - void ( * destroy ) ( void * ) <nl> + void ( * destroy ) ( grpc_exec_ctx * , void * ) <nl> int ( * cmp ) ( void * , void * ) <nl> <nl> ctypedef struct grpc_arg_value_pointer : <nl> cdef extern from " grpc / grpc . h " : <nl> GRPC_CHANNEL_SHUTDOWN <nl> <nl> ctypedef struct grpc_metadata : <nl> - const char * key <nl> - const char * value <nl> - size_t value_length <nl> + grpc_slice key <nl> + grpc_slice value <nl> # ignore the ' internal_data . obfuscated ' fields . <nl> <nl> ctypedef enum grpc_completion_type : <nl> cdef extern from " grpc / grpc . h " : <nl> void grpc_metadata_array_destroy ( grpc_metadata_array * array ) nogil <nl> <nl> ctypedef struct grpc_call_details : <nl> - char * method <nl> - size_t method_capacity <nl> - char * host <nl> - size_t host_capacity <nl> + grpc_slice method <nl> + grpc_slice host <nl> gpr_timespec deadline <nl> <nl> void grpc_call_details_init ( grpc_call_details * details ) nogil <nl> cdef extern from " grpc / grpc . h " : <nl> size_t trailing_metadata_count <nl> grpc_metadata * trailing_metadata <nl> grpc_status_code status <nl> - const char * status_details <nl> + grpc_slice * status_details <nl> <nl> ctypedef struct grpc_op_data_recv_status_on_client : <nl> grpc_metadata_array * trailing_metadata <nl> grpc_status_code * status <nl> - char * * status_details <nl> - size_t * status_details_capacity <nl> + grpc_slice * status_details <nl> <nl> ctypedef struct grpc_op_data_recv_close_on_server : <nl> int * cancelled <nl> cdef extern from " grpc / grpc . h " : <nl> const grpc_channel_args * args , <nl> void * reserved ) nogil <nl> grpc_call * grpc_channel_create_call ( <nl> - grpc_channel * channel , grpc_call * parent_call , uint32_t propagation_mask , <nl> - grpc_completion_queue * completion_queue , const char * method , <nl> - const char * host , gpr_timespec deadline , void * reserved ) nogil <nl> + grpc_channel * channel , grpc_call * parent_call , uint32_t propagation_mask , <nl> + grpc_completion_queue * completion_queue , grpc_slice method , <nl> + const grpc_slice * host , gpr_timespec deadline , void * reserved ) nogil <nl> grpc_connectivity_state grpc_channel_check_connectivity_state ( <nl> grpc_channel * channel , int try_to_connect ) nogil <nl> void grpc_channel_watch_connectivity_state ( <nl> cdef extern from " grpc / compression . h " : <nl> grpc_compression_algorithm default_compression_algorithm <nl> <nl> int grpc_compression_algorithm_parse ( <nl> - const char * name , size_t name_length , <nl> - grpc_compression_algorithm * algorithm ) nogil <nl> + grpc_slice value , grpc_compression_algorithm * algorithm ) nogil <nl> int grpc_compression_algorithm_name ( grpc_compression_algorithm algorithm , <nl> char * * name ) nogil <nl> grpc_compression_algorithm grpc_compression_algorithm_for_level ( <nl> mmm a / src / python / grpcio / grpc / _cython / _cygrpc / records . pxd . pxi <nl> ppp b / src / python / grpcio / grpc / _cython / _cygrpc / records . pxd . pxi <nl> cdef class Event : <nl> cdef readonly Operations batch_operations <nl> <nl> <nl> + cdef class Slice : <nl> + <nl> + cdef grpc_slice c_slice <nl> + <nl> + cdef void _assign_slice ( self , grpc_slice new_slice ) nogil <nl> + @ staticmethod <nl> + cdef Slice from_slice ( grpc_slice slice ) <nl> + <nl> + <nl> cdef class ByteBuffer : <nl> <nl> cdef grpc_byte_buffer * c_byte_buffer <nl> cdef class ChannelArgs : <nl> cdef class Metadatum : <nl> <nl> cdef grpc_metadata c_metadata <nl> - cdef object _key , _value <nl> + cdef Slice _key , <nl> + cdef Slice _value <nl> <nl> <nl> cdef class Metadata : <nl> cdef class Operation : <nl> cdef ByteBuffer _received_message <nl> cdef Metadata _received_metadata <nl> cdef grpc_status_code _received_status_code <nl> - cdef char * _received_status_details <nl> - cdef size_t _received_status_details_capacity <nl> + cdef Slice _received_status_details <nl> cdef int _received_cancelled <nl> cdef readonly bint is_valid <nl> cdef object references <nl> mmm a / src / python / grpcio / grpc / _cython / _cygrpc / records . pyx . pxi <nl> ppp b / src / python / grpcio / grpc / _cython / _cygrpc / records . pyx . pxi <nl> cdef class CallDetails : <nl> <nl> @ property <nl> def method ( self ) : <nl> - if self . c_details . method ! = NULL : <nl> - return < bytes > self . c_details . method <nl> - else : <nl> - return None <nl> + return Slice . from_slice ( self . c_details . method ) . bytes ( ) <nl> <nl> @ property <nl> def host ( self ) : <nl> - if self . c_details . host ! = NULL : <nl> - return < bytes > self . c_details . host <nl> - else : <nl> - return None <nl> + return Slice . from_slice ( self . c_details . host ) . bytes ( ) <nl> <nl> @ property <nl> def deadline ( self ) : <nl> cdef class Event : <nl> self . is_new_request = is_new_request <nl> <nl> <nl> + cdef class Slice : <nl> + <nl> + def __cinit__ ( self ) : <nl> + with nogil : <nl> + grpc_init ( ) <nl> + self . c_slice = grpc_empty_slice ( ) <nl> + <nl> + cdef void _assign_slice ( self , grpc_slice new_slice ) nogil : <nl> + grpc_slice_unref ( self . c_slice ) <nl> + self . c_slice = new_slice <nl> + <nl> + @ staticmethod <nl> + def from_bytes ( bytes data ) : <nl> + cdef Slice self = Slice ( ) <nl> + self . _assign_slice ( grpc_slice_from_copied_buffer ( data , len ( data ) ) ) <nl> + return self <nl> + <nl> + @ staticmethod <nl> + cdef Slice from_slice ( grpc_slice slice ) : <nl> + cdef Slice self = Slice ( ) <nl> + grpc_slice_ref ( slice ) <nl> + self . _assign_slice ( slice ) <nl> + return self <nl> + <nl> + def bytes ( self ) : <nl> + with nogil : <nl> + pointer = grpc_slice_start_ptr ( self . c_slice ) <nl> + length = grpc_slice_length ( self . c_slice ) <nl> + return ( < char * > pointer ) [ : length ] <nl> + <nl> + def __dealloc__ ( self ) : <nl> + with nogil : <nl> + grpc_slice_unref ( self . c_slice ) <nl> + grpc_shutdown ( ) <nl> + <nl> + <nl> cdef class ByteBuffer : <nl> <nl> def __cinit__ ( self , bytes data ) : <nl> cdef void * copy_ptr ( void * ptr ) : <nl> return ptr <nl> <nl> <nl> - cdef void destroy_ptr ( void * ptr ) : <nl> + cdef void destroy_ptr ( grpc_exec_ctx * ctx , void * ptr ) : <nl> pass <nl> <nl> <nl> cdef class ChannelArgs : <nl> <nl> cdef class Metadatum : <nl> <nl> + # TODO ( atash ) this should just accept Slice objects . <nl> def __cinit__ ( self , bytes key , bytes value ) : <nl> - self . _key = key <nl> - self . _value = value <nl> - self . c_metadata . key = self . _key <nl> - self . c_metadata . value = self . _value <nl> - self . c_metadata . value_length = len ( self . _value ) <nl> + self . _key = Slice . from_bytes ( key ) <nl> + self . _value = Slice . from_bytes ( value ) <nl> + self . c_metadata . key = self . _key . c_slice <nl> + self . c_metadata . value = self . _value . c_slice <nl> <nl> @ property <nl> def key ( self ) : <nl> - return < bytes > self . c_metadata . key <nl> + return self . _key . bytes ( ) <nl> <nl> @ property <nl> def value ( self ) : <nl> - return < bytes > self . c_metadata . value [ : self . c_metadata . value_length ] <nl> + return self . _value . bytes ( ) <nl> <nl> def __len__ ( self ) : <nl> return 2 <nl> cdef class Metadata : <nl> <nl> def __getitem__ ( self , size_t i ) : <nl> return Metadatum ( <nl> - key = < bytes > self . c_metadata_array . metadata [ i ] . key , <nl> - value = < bytes > self . c_metadata_array . metadata [ i ] . value [ <nl> - : self . c_metadata_array . metadata [ i ] . value_length ] ) <nl> + key = Slice . from_slice ( self . c_metadata_array . metadata [ i ] . key ) . bytes ( ) , <nl> + value = Slice . from_slice ( self . c_metadata_array . metadata [ i ] . value ) . bytes ( ) ) <nl> <nl> def __iter__ ( self ) : <nl> return _MetadataIterator ( self ) <nl> cdef class Operation : <nl> def __cinit__ ( self ) : <nl> grpc_init ( ) <nl> self . references = [ ] <nl> - self . _received_status_details = NULL <nl> - self . _received_status_details_capacity = 0 <nl> + self . _received_status_details = Slice ( ) <nl> self . is_valid = False <nl> <nl> @ property <nl> cdef class Operation : <nl> def received_status_details ( self ) : <nl> if self . c_op . type ! = GRPC_OP_RECV_STATUS_ON_CLIENT : <nl> raise TypeError ( " self must be an operation receiving status details " ) <nl> - if self . _received_status_details : <nl> - return self . _received_status_details <nl> - else : <nl> - return None <nl> + return self . _received_status_details . bytes ( ) <nl> <nl> @ property <nl> def received_status_details_or_none ( self ) : <nl> if self . c_op . type ! = GRPC_OP_RECV_STATUS_ON_CLIENT : <nl> return None <nl> - if self . _received_status_details : <nl> - return self . _received_status_details <nl> - else : <nl> - return None <nl> + return self . _received_status_details . bytes ( ) <nl> <nl> @ property <nl> def received_cancelled ( self ) : <nl> cdef class Operation : <nl> return False if self . _received_cancelled = = 0 else True <nl> <nl> def __dealloc__ ( self ) : <nl> - # We * almost * don ' t need to do anything ; most of the objects are handled by <nl> - # Python . The remaining one ( s ) are primitive fields filled in by GRPC core . <nl> - # This means that we need to clean up after receive_status_on_client . <nl> - if self . c_op . type = = GRPC_OP_RECV_STATUS_ON_CLIENT : <nl> - gpr_free ( self . _received_status_details ) <nl> grpc_shutdown ( ) <nl> <nl> def operation_send_initial_metadata ( Metadata metadata , int flags ) : <nl> def operation_send_status_from_server ( <nl> op . c_op . data . send_status_from_server . trailing_metadata = ( <nl> metadata . c_metadata_array . metadata ) <nl> op . c_op . data . send_status_from_server . status = code <nl> - op . c_op . data . send_status_from_server . status_details = details <nl> + cdef Slice details_slice = Slice . from_bytes ( details ) <nl> + op . c_op . data . send_status_from_server . status_details = & details_slice . c_slice <nl> op . references . append ( metadata ) <nl> - op . references . append ( details ) <nl> + op . references . append ( details_slice ) <nl> op . is_valid = True <nl> return op <nl> <nl> def operation_receive_status_on_client ( int flags ) : <nl> op . c_op . data . receive_status_on_client . status = ( <nl> & op . _received_status_code ) <nl> op . c_op . data . receive_status_on_client . status_details = ( <nl> - & op . _received_status_details ) <nl> - op . c_op . data . receive_status_on_client . status_details_capacity = ( <nl> - & op . _received_status_details_capacity ) <nl> + & op . _received_status_details . c_slice ) <nl> op . is_valid = True <nl> return op <nl> <nl>
Begin patching Cython
grpc/grpc
990983110f0c2e948ab23289f994876f1e0f565f
2016-12-08T03:37:01Z
mmm a / src / mongo / bson / bson - inl . h <nl> ppp b / src / mongo / bson / bson - inl . h <nl> namespace mongo { <nl> return true ; <nl> } <nl> <nl> + template < > inline bool BSONElement : : coerce < double > ( double * out ) const { <nl> + if ( ! isNumber ( ) ) <nl> + return false ; <nl> + * out = numberDouble ( ) ; <nl> + return true ; <nl> + } <nl> + <nl> + template < > inline bool BSONElement : : coerce < bool > ( bool * out ) const { <nl> + * out = trueValue ( ) ; <nl> + return true ; <nl> + } <nl> + <nl> template < > inline bool BSONElement : : coerce < std : : vector < std : : string > > ( std : : vector < std : : string > * out ) const { <nl> if ( type ( ) ! = mongo : : Array ) <nl> return false ; <nl>
BSONElement : : coerce for double and bool
mongodb/mongo
9291e8f4f875fc66ea53b6c365714cfe02a7c490
2012-12-21T05:01:55Z
mmm a / lib / AST / Builtins . cpp <nl> ppp b / lib / AST / Builtins . cpp <nl> createGenericParam ( ASTContext & ctx , const char * name , unsigned index ) { <nl> <nl> / / / Create a generic parameter list with multiple generic parameters . <nl> static GenericParamList * getGenericParams ( ASTContext & ctx , <nl> - unsigned numParameters , <nl> - bool isAnyObject ) { <nl> + unsigned numParameters ) { <nl> assert ( numParameters < = llvm : : array_lengthof ( GenericParamNames ) ) ; <nl> <nl> - SmallVector < GenericTypeParamDecl * , 2 > genericParams ; <nl> + SmallVector < GenericTypeParamDecl * , 2 > genericParams ; <nl> for ( unsigned i = 0 ; i ! = numParameters ; + + i ) <nl> genericParams . push_back ( createGenericParam ( ctx , GenericParamNames [ i ] , i ) ) ; <nl> <nl> - <nl> - if ( isAnyObject ) { <nl> - CanType ao = ctx . getAnyObjectType ( ) ; <nl> - SmallVector < RequirementRepr , 1 > req ; <nl> - req . push_back ( RequirementRepr : : getTypeConstraint ( TypeLoc : : withoutLoc ( genericParams [ 0 ] - > getInterfaceType ( ) ) , SourceLoc ( ) , <nl> - TypeLoc : : withoutLoc ( ao ) ) ) ; <nl> - <nl> - auto paramList = GenericParamList : : create ( ctx , SourceLoc ( ) , genericParams , <nl> - SourceLoc ( ) , req , SourceLoc ( ) ) ; <nl> - return paramList ; <nl> - } <nl> - <nl> auto paramList = GenericParamList : : create ( ctx , SourceLoc ( ) , genericParams , <nl> SourceLoc ( ) ) ; <nl> return paramList ; <nl> namespace { <nl> <nl> public : <nl> BuiltinFunctionBuilder ( ASTContext & ctx , unsigned numGenericParams = 1 , <nl> - bool isAnyObject = false ) <nl> + bool wantsAdditionalAnyObjectRequirement = false ) <nl> : Context ( ctx ) { <nl> - TheGenericParamList = getGenericParams ( ctx , numGenericParams , isAnyObject ) ; <nl> + TheGenericParamList = getGenericParams ( ctx , numGenericParams ) ; <nl> + if ( wantsAdditionalAnyObjectRequirement ) { <nl> + Requirement req ( RequirementKind : : Conformance , <nl> + TheGenericParamList - > getParams ( ) [ 0 ] - > getInterfaceType ( ) , <nl> + ctx . getAnyObjectType ( ) ) ; <nl> + addedRequirements . push_back ( req ) ; <nl> + } <nl> for ( auto gp : TheGenericParamList - > getParams ( ) ) { <nl> genericParamTypes . push_back ( <nl> gp - > getDeclaredInterfaceType ( ) - > castTo < GenericTypeParamType > ( ) ) ; <nl>
[ NFC ] Teach BuiltinFunctionBuilder to Build Requirements
apple/swift
68d2d824b76b936cb488f4ea79d5f1c72031b8dc
2020-06-11T23:18:23Z
new file mode 100644 <nl> index 00000000000 . . f1e99ee8cac <nl> mmm / dev / null <nl> ppp b / . gitattributes <nl> <nl> + * . cpp eol = lf <nl> + * . h eol = lf <nl> + * . py eol = lf <nl> + * . hpp eol = lf <nl> mmm a / scene / 2d / sprite . cpp <nl> ppp b / scene / 2d / sprite . cpp <nl> void Sprite : : set_region_rect ( const Rect2 & p_region_rect ) { <nl> if ( region & & changed ) { <nl> update ( ) ; <nl> item_rect_changed ( ) ; <nl> + _change_notify ( " region_rect " ) ; <nl> } <nl> } <nl> <nl> mmm a / tools / editor / editor_node . cpp <nl> ppp b / tools / editor / editor_node . cpp <nl> <nl> <nl> / / plugins <nl> # include " plugins / sprite_frames_editor_plugin . h " <nl> + # include " plugins / sprite_region_editor_plugin . h " <nl> # include " plugins / canvas_item_editor_plugin . h " <nl> # include " plugins / spatial_editor_plugin . h " <nl> # include " plugins / sample_editor_plugin . h " <nl> EditorNode : : EditorNode ( ) { <nl> add_editor_plugin ( memnew ( TileSetEditorPlugin ( this ) ) ) ; <nl> add_editor_plugin ( memnew ( TileMapEditorPlugin ( this ) ) ) ; <nl> add_editor_plugin ( memnew ( SpriteFramesEditorPlugin ( this ) ) ) ; <nl> + add_editor_plugin ( memnew ( SpriteRegionEditorPlugin ( this ) ) ) ; <nl> add_editor_plugin ( memnew ( Particles2DEditorPlugin ( this ) ) ) ; <nl> add_editor_plugin ( memnew ( Path2DEditorPlugin ( this ) ) ) ; <nl> add_editor_plugin ( memnew ( PathEditorPlugin ( this ) ) ) ; <nl> new file mode 100644 <nl> index 00000000000 . . dcdd86c9b51 <nl> Binary files / dev / null and b / tools / editor / icons / icon_grid . png differ <nl> new file mode 100644 <nl> index 00000000000 . . 824607f2ccd <nl> Binary files / dev / null and b / tools / editor / icons / icon_region_edit . png differ <nl> mmm a / tools / editor / plugins / polygon_2d_editor_plugin . cpp <nl> ppp b / tools / editor / plugins / polygon_2d_editor_plugin . cpp <nl> void Polygon2DEditor : : _notification ( int p_what ) { <nl> uv_button [ UV_MODE_ROTATE ] - > set_icon ( get_icon ( " ToolRotate " , " EditorIcons " ) ) ; <nl> uv_button [ UV_MODE_SCALE ] - > set_icon ( get_icon ( " ToolScale " , " EditorIcons " ) ) ; <nl> <nl> + b_snap_grid - > set_icon ( get_icon ( " Grid " , " EditorIcons " ) ) ; <nl> + b_snap_enable - > set_icon ( get_icon ( " Snap " , " EditorIcons " ) ) ; <nl> + uv_icon_zoom - > set_texture ( get_icon ( " Zoom " , " EditorIcons " ) ) ; <nl> <nl> } break ; <nl> case NOTIFICATION_FIXED_PROCESS : { <nl> void Polygon2DEditor : : _menu_option ( int p_option ) { <nl> } <nl> } <nl> <nl> + void Polygon2DEditor : : _set_use_snap ( bool p_use ) <nl> + { <nl> + use_snap = p_use ; <nl> + } <nl> + <nl> + void Polygon2DEditor : : _set_show_grid ( bool p_show ) <nl> + { <nl> + snap_show_grid = p_show ; <nl> + uv_edit_draw - > update ( ) ; <nl> + } <nl> + <nl> + void Polygon2DEditor : : _set_snap_off_x ( float p_val ) <nl> + { <nl> + snap_offset . x = p_val ; <nl> + uv_edit_draw - > update ( ) ; <nl> + } <nl> + <nl> + void Polygon2DEditor : : _set_snap_off_y ( float p_val ) <nl> + { <nl> + snap_offset . y = p_val ; <nl> + uv_edit_draw - > update ( ) ; <nl> + } <nl> + <nl> + void Polygon2DEditor : : _set_snap_step_x ( float p_val ) <nl> + { <nl> + snap_step . x = p_val ; <nl> + uv_edit_draw - > update ( ) ; <nl> + } <nl> + <nl> + void Polygon2DEditor : : _set_snap_step_y ( float p_val ) <nl> + { <nl> + snap_step . y = p_val ; <nl> + uv_edit_draw - > update ( ) ; <nl> + } <nl> + <nl> void Polygon2DEditor : : _wip_close ( ) { <nl> <nl> undo_redo - > create_action ( " Create Poly " ) ; <nl> void Polygon2DEditor : : _uv_input ( const InputEvent & p_input ) { <nl> <nl> Vector2 tuv = mtx . xform ( uv_prev [ i ] ) ; <nl> if ( tuv . distance_to ( Vector2 ( mb . x , mb . y ) ) < 8 ) { <nl> - <nl> + uv_drag_from = tuv ; <nl> uv_drag_index = i ; <nl> } <nl> } <nl> void Polygon2DEditor : : _uv_input ( const InputEvent & p_input ) { <nl> <nl> } else if ( uv_drag ) { <nl> <nl> - Vector2 uv_drag_to ( mm . x , mm . y ) ; <nl> + Vector2 uv_drag_to = snap_point ( Vector2 ( mm . x , mm . y ) ) ; <nl> Vector2 drag = mtx . affine_inverse ( ) . xform ( uv_drag_to ) - mtx . affine_inverse ( ) . xform ( uv_drag_from ) ; <nl> <nl> <nl> void Polygon2DEditor : : _uv_draw ( ) { <nl> uv_edit_draw - > draw_texture ( base_tex , Point2 ( ) ) ; <nl> VS : : get_singleton ( ) - > canvas_item_add_set_transform ( uv_edit_draw - > get_canvas_item ( ) , Matrix32 ( ) ) ; <nl> <nl> + if ( snap_show_grid ) { <nl> + Size2 s = uv_edit_draw - > get_size ( ) ; <nl> + int last_cell ; <nl> + <nl> + if ( snap_step . x ! = 0 ) { <nl> + for ( int i = 0 ; i < s . width ; i + + ) { <nl> + int cell = Math : : fast_ftoi ( Math : : floor ( ( mtx . affine_inverse ( ) . xform ( Vector2 ( i , 0 ) ) . x - snap_offset . x ) / snap_step . x ) ) ; <nl> + if ( i = = 0 ) <nl> + last_cell = cell ; <nl> + if ( last_cell ! = cell ) <nl> + uv_edit_draw - > draw_line ( Point2 ( i , 0 ) , Point2 ( i , s . height ) , Color ( 0 . 3 , 0 . 7 , 1 , 0 . 3 ) ) ; <nl> + last_cell = cell ; <nl> + } <nl> + } <nl> + <nl> + if ( snap_step . y ! = 0 ) { <nl> + for ( int i = 0 ; i < s . height ; i + + ) { <nl> + int cell = Math : : fast_ftoi ( Math : : floor ( ( mtx . affine_inverse ( ) . xform ( Vector2 ( 0 , i ) ) . y - snap_offset . y ) / snap_step . y ) ) ; <nl> + if ( i = = 0 ) <nl> + last_cell = cell ; <nl> + if ( last_cell ! = cell ) <nl> + uv_edit_draw - > draw_line ( Point2 ( 0 , i ) , Point2 ( s . width , i ) , Color ( 0 . 3 , 0 . 7 , 1 , 0 . 3 ) ) ; <nl> + last_cell = cell ; <nl> + } <nl> + } <nl> + } <nl> + <nl> DVector < Vector2 > uvs = node - > get_uv ( ) ; <nl> Ref < Texture > handle = get_icon ( " EditorHandle " , " EditorIcons " ) ; <nl> <nl> void Polygon2DEditor : : _bind_methods ( ) { <nl> ObjectTypeDB : : bind_method ( _MD ( " _uv_input " ) , & Polygon2DEditor : : _uv_input ) ; <nl> ObjectTypeDB : : bind_method ( _MD ( " _uv_scroll_changed " ) , & Polygon2DEditor : : _uv_scroll_changed ) ; <nl> ObjectTypeDB : : bind_method ( _MD ( " _node_removed " ) , & Polygon2DEditor : : _node_removed ) ; <nl> + ObjectTypeDB : : bind_method ( _MD ( " _set_use_snap " ) , & Polygon2DEditor : : _set_use_snap ) ; <nl> + ObjectTypeDB : : bind_method ( _MD ( " _set_show_grid " ) , & Polygon2DEditor : : _set_show_grid ) ; <nl> + ObjectTypeDB : : bind_method ( _MD ( " _set_snap_off_x " ) , & Polygon2DEditor : : _set_snap_off_x ) ; <nl> + ObjectTypeDB : : bind_method ( _MD ( " _set_snap_off_y " ) , & Polygon2DEditor : : _set_snap_off_y ) ; <nl> + ObjectTypeDB : : bind_method ( _MD ( " _set_snap_step_x " ) , & Polygon2DEditor : : _set_snap_step_x ) ; <nl> + ObjectTypeDB : : bind_method ( _MD ( " _set_snap_step_y " ) , & Polygon2DEditor : : _set_snap_step_y ) ; <nl> + <nl> <nl> + } <nl> <nl> + inline float _snap_scalar ( float p_offset , float p_step , float p_target ) { <nl> + return p_step ! = 0 ? Math : : stepify ( p_target - p_offset , p_step ) + p_offset : p_target ; <nl> + } <nl> + <nl> + Vector2 Polygon2DEditor : : snap_point ( Vector2 p_target ) const { <nl> + if ( use_snap ) { <nl> + p_target . x = _snap_scalar ( snap_offset . x * uv_draw_zoom - uv_draw_ofs . x , snap_step . x * uv_draw_zoom , p_target . x ) ; <nl> + p_target . y = _snap_scalar ( snap_offset . y * uv_draw_zoom - uv_draw_ofs . y , snap_step . y * uv_draw_zoom , p_target . y ) ; <nl> + } <nl> + <nl> + return p_target ; <nl> } <nl> <nl> Polygon2DEditor : : Polygon2DEditor ( EditorNode * p_editor ) { <nl> Polygon2DEditor : : Polygon2DEditor ( EditorNode * p_editor ) { <nl> editor = p_editor ; <nl> undo_redo = editor - > get_undo_redo ( ) ; <nl> <nl> + snap_step = Vector2 ( 10 , 10 ) ; <nl> + use_snap = false ; <nl> + snap_show_grid = false ; <nl> + <nl> add_child ( memnew ( VSeparator ) ) ; <nl> button_create = memnew ( ToolButton ) ; <nl> add_child ( button_create ) ; <nl> Polygon2DEditor : : Polygon2DEditor ( EditorNode * p_editor ) { <nl> uv_menu - > get_popup ( ) - > add_separator ( ) ; <nl> uv_menu - > get_popup ( ) - > add_item ( " Clear UV " , UVEDIT_UV_CLEAR ) ; <nl> uv_menu - > get_popup ( ) - > connect ( " item_pressed " , this , " _menu_option " ) ; <nl> + <nl> + uv_mode_hb - > add_child ( memnew ( VSeparator ) ) ; <nl> + <nl> + b_snap_enable = memnew ( ToolButton ) ; <nl> + uv_mode_hb - > add_child ( b_snap_enable ) ; <nl> + b_snap_enable - > set_text ( " Snap " ) ; <nl> + b_snap_enable - > set_focus_mode ( FOCUS_NONE ) ; <nl> + b_snap_enable - > set_toggle_mode ( true ) ; <nl> + b_snap_enable - > set_pressed ( use_snap ) ; <nl> + b_snap_enable - > set_tooltip ( " Enable Snap " ) ; <nl> + b_snap_enable - > connect ( " toggled " , this , " _set_use_snap " ) ; <nl> + <nl> + b_snap_grid = memnew ( ToolButton ) ; <nl> + uv_mode_hb - > add_child ( b_snap_grid ) ; <nl> + b_snap_grid - > set_text ( " Grid " ) ; <nl> + b_snap_grid - > set_focus_mode ( FOCUS_NONE ) ; <nl> + b_snap_grid - > set_toggle_mode ( true ) ; <nl> + b_snap_grid - > set_pressed ( snap_show_grid ) ; <nl> + b_snap_grid - > set_tooltip ( " Show Grid " ) ; <nl> + b_snap_grid - > connect ( " toggled " , this , " _set_show_grid " ) ; <nl> + <nl> + uv_mode_hb - > add_child ( memnew ( VSeparator ) ) ; <nl> + uv_mode_hb - > add_child ( memnew ( Label ( " Grid Offset : " ) ) ) ; <nl> + <nl> + SpinBox * sb_off_x = memnew ( SpinBox ) ; <nl> + sb_off_x - > set_min ( - 256 ) ; <nl> + sb_off_x - > set_max ( 256 ) ; <nl> + sb_off_x - > set_step ( 1 ) ; <nl> + sb_off_x - > set_val ( snap_offset . x ) ; <nl> + sb_off_x - > set_suffix ( " px " ) ; <nl> + sb_off_x - > connect ( " value_changed " , this , " _set_snap_off_x " ) ; <nl> + uv_mode_hb - > add_child ( sb_off_x ) ; <nl> + <nl> + SpinBox * sb_off_y = memnew ( SpinBox ) ; <nl> + sb_off_y - > set_min ( - 256 ) ; <nl> + sb_off_y - > set_max ( 256 ) ; <nl> + sb_off_y - > set_step ( 1 ) ; <nl> + sb_off_y - > set_val ( snap_offset . y ) ; <nl> + sb_off_y - > set_suffix ( " px " ) ; <nl> + sb_off_y - > connect ( " value_changed " , this , " _set_snap_off_y " ) ; <nl> + uv_mode_hb - > add_child ( sb_off_y ) ; <nl> + <nl> + uv_mode_hb - > add_child ( memnew ( VSeparator ) ) ; <nl> + uv_mode_hb - > add_child ( memnew ( Label ( " Grid Step : " ) ) ) ; <nl> + <nl> + SpinBox * sb_step_x = memnew ( SpinBox ) ; <nl> + sb_step_x - > set_min ( - 256 ) ; <nl> + sb_step_x - > set_max ( 256 ) ; <nl> + sb_step_x - > set_step ( 1 ) ; <nl> + sb_step_x - > set_val ( snap_step . x ) ; <nl> + sb_step_x - > set_suffix ( " px " ) ; <nl> + sb_step_x - > connect ( " value_changed " , this , " _set_snap_step_x " ) ; <nl> + uv_mode_hb - > add_child ( sb_step_x ) ; <nl> + <nl> + SpinBox * sb_step_y = memnew ( SpinBox ) ; <nl> + sb_step_y - > set_min ( - 256 ) ; <nl> + sb_step_y - > set_max ( 256 ) ; <nl> + sb_step_y - > set_step ( 1 ) ; <nl> + sb_step_y - > set_val ( snap_step . y ) ; <nl> + sb_step_y - > set_suffix ( " px " ) ; <nl> + sb_step_y - > connect ( " value_changed " , this , " _set_snap_step_y " ) ; <nl> + uv_mode_hb - > add_child ( sb_step_y ) ; <nl> + <nl> uv_mode_hb - > add_child ( memnew ( VSeparator ) ) ; <nl> uv_icon_zoom = memnew ( TextureFrame ) ; <nl> - uv_main_hb - > add_child ( uv_icon_zoom ) ; <nl> + uv_mode_hb - > add_child ( uv_icon_zoom ) ; <nl> uv_zoom = memnew ( HSlider ) ; <nl> uv_zoom - > set_min ( 0 . 01 ) ; <nl> uv_zoom - > set_max ( 4 ) ; <nl> mmm a / tools / editor / plugins / polygon_2d_editor_plugin . h <nl> ppp b / tools / editor / plugins / polygon_2d_editor_plugin . h <nl> class Polygon2DEditor : public HBoxContainer { <nl> UVMode uv_mode ; <nl> AcceptDialog * uv_edit ; <nl> ToolButton * uv_button [ 4 ] ; <nl> + ToolButton * b_snap_enable ; <nl> + ToolButton * b_snap_grid ; <nl> Control * uv_edit_draw ; <nl> HSlider * uv_zoom ; <nl> SpinBox * uv_zoom_value ; <nl> class Polygon2DEditor : public HBoxContainer { <nl> Vector < Vector2 > wip ; <nl> bool wip_active ; <nl> <nl> + bool use_snap ; <nl> + bool snap_show_grid ; <nl> + Vector2 snap_offset ; <nl> + Vector2 snap_step ; <nl> + <nl> void _uv_scroll_changed ( float ) ; <nl> void _uv_input ( const InputEvent & p_input ) ; <nl> void _uv_draw ( ) ; <nl> class Polygon2DEditor : public HBoxContainer { <nl> void _canvas_draw ( ) ; <nl> void _menu_option ( int p_option ) ; <nl> <nl> + void _set_use_snap ( bool p_use ) ; <nl> + void _set_show_grid ( bool p_show ) ; <nl> + void _set_snap_off_x ( float p_val ) ; <nl> + void _set_snap_off_y ( float p_val ) ; <nl> + void _set_snap_step_x ( float p_val ) ; <nl> + void _set_snap_step_y ( float p_val ) ; <nl> + <nl> protected : <nl> void _notification ( int p_what ) ; <nl> void _node_removed ( Node * p_node ) ; <nl> static void _bind_methods ( ) ; <nl> + <nl> + Vector2 snap_point ( Vector2 p_target ) const ; <nl> + <nl> public : <nl> <nl> bool forward_input_event ( const InputEvent & p_event ) ; <nl> new file mode 100644 <nl> index 00000000000 . . 35c53cf5626 <nl> mmm / dev / null <nl> ppp b / tools / editor / plugins / sprite_region_editor_plugin . cpp <nl> <nl> + / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> + / * sprite_region_editor_plugin . cpp * / <nl> + / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> + / * This file is part of : * / <nl> + / * GODOT ENGINE * / <nl> + / * http : / / www . godotengine . org * / <nl> + / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> + / * Copyright ( c ) 2007 - 2015 Juan Linietsky , Ariel Manzur . * / <nl> + / * * / <nl> + / * Author : Mariano Suligoy * / <nl> + / * * / <nl> + / * Permission is hereby granted , free of charge , to any person obtaining * / <nl> + / * a copy of this software and associated documentation files ( the * / <nl> + / * " Software " ) , to deal in the Software without restriction , including * / <nl> + / * without limitation the rights to use , copy , modify , merge , publish , * / <nl> + / * distribute , sublicense , and / or sell copies of the Software , and to * / <nl> + / * permit persons to whom the Software is furnished to do so , subject to * / <nl> + / * the following conditions : * / <nl> + / * * / <nl> + / * The above copyright notice and this permission notice shall be * / <nl> + / * included in all copies or substantial portions of the Software . * / <nl> + / * * / <nl> + / * THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , * / <nl> + / * EXPRESS OR IMPLIED , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * / <nl> + / * MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . * / <nl> + / * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY * / <nl> + / * CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , * / <nl> + / * TORT OR OTHERWISE , ARISING FROM , OUT OF OR IN CONNECTION WITH THE * / <nl> + / * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE . * / <nl> + / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> + <nl> + # include " sprite_region_editor_plugin . h " <nl> + # include " scene / gui / check_box . h " <nl> + # include " os / input . h " <nl> + # include " os / keyboard . h " <nl> + <nl> + void SpriteRegionEditor : : _region_draw ( ) <nl> + { <nl> + Ref < Texture > base_tex = node - > get_texture ( ) ; <nl> + if ( base_tex . is_null ( ) ) <nl> + return ; <nl> + <nl> + Matrix32 mtx ; <nl> + mtx . elements [ 2 ] = - draw_ofs ; <nl> + mtx . scale_basis ( Vector2 ( draw_zoom , draw_zoom ) ) ; <nl> + <nl> + VS : : get_singleton ( ) - > canvas_item_set_clip ( edit_draw - > get_canvas_item ( ) , true ) ; <nl> + VS : : get_singleton ( ) - > canvas_item_add_set_transform ( edit_draw - > get_canvas_item ( ) , mtx ) ; <nl> + edit_draw - > draw_texture ( base_tex , Point2 ( ) ) ; <nl> + VS : : get_singleton ( ) - > canvas_item_add_set_transform ( edit_draw - > get_canvas_item ( ) , Matrix32 ( ) ) ; <nl> + <nl> + if ( snap_show_grid ) { <nl> + Size2 s = edit_draw - > get_size ( ) ; <nl> + int last_cell ; <nl> + <nl> + if ( snap_step . x ! = 0 ) { <nl> + for ( int i = 0 ; i < s . width ; i + + ) { <nl> + int cell = Math : : fast_ftoi ( Math : : floor ( ( mtx . affine_inverse ( ) . xform ( Vector2 ( i , 0 ) ) . x - snap_offset . x ) / snap_step . x ) ) ; <nl> + if ( i = = 0 ) <nl> + last_cell = cell ; <nl> + if ( last_cell ! = cell ) <nl> + edit_draw - > draw_line ( Point2 ( i , 0 ) , Point2 ( i , s . height ) , Color ( 0 . 3 , 0 . 7 , 1 , 0 . 3 ) ) ; <nl> + last_cell = cell ; <nl> + } <nl> + } <nl> + <nl> + if ( snap_step . y ! = 0 ) { <nl> + for ( int i = 0 ; i < s . height ; i + + ) { <nl> + int cell = Math : : fast_ftoi ( Math : : floor ( ( mtx . affine_inverse ( ) . xform ( Vector2 ( 0 , i ) ) . y - snap_offset . y ) / snap_step . y ) ) ; <nl> + if ( i = = 0 ) <nl> + last_cell = cell ; <nl> + if ( last_cell ! = cell ) <nl> + edit_draw - > draw_line ( Point2 ( 0 , i ) , Point2 ( s . width , i ) , Color ( 0 . 3 , 0 . 7 , 1 , 0 . 3 ) ) ; <nl> + last_cell = cell ; <nl> + } <nl> + } <nl> + } <nl> + <nl> + Ref < Texture > select_handle = get_icon ( " EditorHandle " , " EditorIcons " ) ; <nl> + <nl> + Rect2 scroll_rect ( Point2 ( ) , mtx . basis_xform ( base_tex - > get_size ( ) ) ) ; <nl> + scroll_rect . expand_to ( mtx . basis_xform ( edit_draw - > get_size ( ) ) ) ; <nl> + <nl> + Vector2 endpoints [ 4 ] = { <nl> + mtx . basis_xform ( rect . pos ) , <nl> + mtx . basis_xform ( rect . pos + Vector2 ( rect . size . x , 0 ) ) , <nl> + mtx . basis_xform ( rect . pos + rect . size ) , <nl> + mtx . basis_xform ( rect . pos + Vector2 ( 0 , rect . size . y ) ) <nl> + } ; <nl> + <nl> + for ( int i = 0 ; i < 4 ; i + + ) { <nl> + <nl> + int prev = ( i + 3 ) % 4 ; <nl> + int next = ( i + 1 ) % 4 ; <nl> + <nl> + Vector2 ofs = ( ( endpoints [ i ] - endpoints [ prev ] ) . normalized ( ) + ( ( endpoints [ i ] - endpoints [ next ] ) . normalized ( ) ) ) . normalized ( ) ; <nl> + ofs * = 1 . 4144 * ( select_handle - > get_size ( ) . width / 2 ) ; <nl> + <nl> + edit_draw - > draw_line ( endpoints [ i ] - draw_ofs , endpoints [ next ] - draw_ofs , Color ( 0 . 9 , 0 . 5 , 0 . 5 ) , 2 ) ; <nl> + <nl> + edit_draw - > draw_texture ( select_handle , ( endpoints [ i ] + ofs - ( select_handle - > get_size ( ) / 2 ) ) . floor ( ) - draw_ofs ) ; <nl> + <nl> + ofs = ( endpoints [ next ] - endpoints [ i ] ) / 2 ; <nl> + ofs + = ( endpoints [ next ] - endpoints [ i ] ) . tangent ( ) . normalized ( ) * ( select_handle - > get_size ( ) . width / 2 ) ; <nl> + <nl> + edit_draw - > draw_texture ( select_handle , ( endpoints [ i ] + ofs - ( select_handle - > get_size ( ) / 2 ) ) . floor ( ) - draw_ofs ) ; <nl> + <nl> + scroll_rect . expand_to ( endpoints [ i ] ) ; <nl> + } <nl> + <nl> + scroll_rect = scroll_rect . grow ( 200 ) ; <nl> + updating_scroll = true ; <nl> + hscroll - > set_min ( scroll_rect . pos . x ) ; <nl> + hscroll - > set_max ( scroll_rect . pos . x + scroll_rect . size . x ) ; <nl> + hscroll - > set_page ( edit_draw - > get_size ( ) . x ) ; <nl> + hscroll - > set_val ( draw_ofs . x ) ; <nl> + hscroll - > set_step ( 0 . 001 ) ; <nl> + <nl> + vscroll - > set_min ( scroll_rect . pos . y ) ; <nl> + vscroll - > set_max ( scroll_rect . pos . y + scroll_rect . size . y ) ; <nl> + vscroll - > set_page ( edit_draw - > get_size ( ) . y ) ; <nl> + vscroll - > set_val ( draw_ofs . y ) ; <nl> + vscroll - > set_step ( 0 . 001 ) ; <nl> + updating_scroll = false ; <nl> + } <nl> + <nl> + void SpriteRegionEditor : : _region_input ( const InputEvent & p_input ) <nl> + { <nl> + Matrix32 mtx ; <nl> + mtx . elements [ 2 ] = - draw_ofs ; <nl> + mtx . scale_basis ( Vector2 ( draw_zoom , draw_zoom ) ) ; <nl> + <nl> + Vector2 endpoints [ 8 ] = { <nl> + mtx . xform ( rect . pos ) + Vector2 ( - 4 , - 4 ) , <nl> + mtx . xform ( rect . pos + Vector2 ( rect . size . x / 2 , 0 ) ) + Vector2 ( 0 , - 4 ) , <nl> + mtx . xform ( rect . pos + Vector2 ( rect . size . x , 0 ) ) + Vector2 ( 4 , - 4 ) , <nl> + mtx . xform ( rect . pos + Vector2 ( rect . size . x , rect . size . y / 2 ) ) + Vector2 ( 4 , 0 ) , <nl> + mtx . xform ( rect . pos + rect . size ) + Vector2 ( 4 , 4 ) , <nl> + mtx . xform ( rect . pos + Vector2 ( rect . size . x / 2 , rect . size . y ) ) + Vector2 ( 0 , 4 ) , <nl> + mtx . xform ( rect . pos + Vector2 ( 0 , rect . size . y ) ) + Vector2 ( - 4 , 4 ) , <nl> + mtx . xform ( rect . pos + Vector2 ( 0 , rect . size . y / 2 ) ) + Vector2 ( - 4 , 0 ) <nl> + } ; <nl> + <nl> + if ( p_input . type = = InputEvent : : MOUSE_BUTTON ) { <nl> + <nl> + <nl> + const InputEventMouseButton & mb = p_input . mouse_button ; <nl> + <nl> + if ( mb . button_index = = BUTTON_LEFT ) { <nl> + <nl> + <nl> + if ( mb . pressed ) { <nl> + <nl> + drag_from = mtx . affine_inverse ( ) . xform ( Vector2 ( mb . x , mb . y ) ) ; <nl> + drag_from = snap_point ( drag_from ) ; <nl> + drag = true ; <nl> + rect_prev = node - > get_region_rect ( ) ; <nl> + <nl> + drag_index = - 1 ; <nl> + for ( int i = 0 ; i < 8 ; i + + ) { <nl> + <nl> + Vector2 tuv = endpoints [ i ] ; <nl> + if ( tuv . distance_to ( Vector2 ( mb . x , mb . y ) ) < 8 ) { <nl> + drag_index = i ; <nl> + creating = false ; <nl> + } <nl> + } <nl> + <nl> + if ( drag_index = = - 1 ) { <nl> + creating = true ; <nl> + rect = Rect2 ( drag_from , Size2 ( ) ) ; <nl> + } <nl> + <nl> + } else if ( drag ) { <nl> + <nl> + undo_redo - > create_action ( " Set region_rect " ) ; <nl> + undo_redo - > add_do_method ( node , " set_region_rect " , node - > get_region_rect ( ) ) ; <nl> + undo_redo - > add_undo_method ( node , " set_region_rect " , rect_prev ) ; <nl> + undo_redo - > add_do_method ( edit_draw , " update " ) ; <nl> + undo_redo - > add_undo_method ( edit_draw , " update " ) ; <nl> + undo_redo - > commit_action ( ) ; <nl> + <nl> + drag = false ; <nl> + } <nl> + <nl> + } else if ( mb . button_index = = BUTTON_RIGHT & & mb . pressed ) { <nl> + <nl> + if ( drag ) { <nl> + drag = false ; <nl> + node - > set_region_rect ( rect_prev ) ; <nl> + rect = rect_prev ; <nl> + edit_draw - > update ( ) ; <nl> + } <nl> + <nl> + } else if ( mb . button_index = = BUTTON_WHEEL_UP & & mb . pressed ) { <nl> + <nl> + zoom - > set_val ( zoom - > get_val ( ) / 0 . 9 ) ; <nl> + } else if ( mb . button_index = = BUTTON_WHEEL_DOWN & & mb . pressed ) { <nl> + <nl> + zoom - > set_val ( zoom - > get_val ( ) * 0 . 9 ) ; <nl> + } <nl> + <nl> + } else if ( p_input . type = = InputEvent : : MOUSE_MOTION ) { <nl> + <nl> + const InputEventMouseMotion & mm = p_input . mouse_motion ; <nl> + <nl> + if ( mm . button_mask & BUTTON_MASK_MIDDLE | | Input : : get_singleton ( ) - > is_key_pressed ( KEY_SPACE ) ) { <nl> + <nl> + Vector2 draged ( mm . relative_x , mm . relative_y ) ; <nl> + hscroll - > set_val ( hscroll - > get_val ( ) - draged . x ) ; <nl> + vscroll - > set_val ( vscroll - > get_val ( ) - draged . y ) ; <nl> + <nl> + } else if ( drag ) { <nl> + <nl> + Vector2 new_pos = mtx . affine_inverse ( ) . xform ( Vector2 ( mm . x , mm . y ) ) ; <nl> + new_pos = snap_point ( new_pos ) ; <nl> + <nl> + if ( creating ) { <nl> + rect = Rect2 ( drag_from , Size2 ( ) ) ; <nl> + rect . expand_to ( new_pos ) ; <nl> + node - > set_region_rect ( rect ) ; <nl> + edit_draw - > update ( ) ; <nl> + return ; <nl> + } <nl> + <nl> + switch ( drag_index ) { <nl> + case 0 : { <nl> + Vector2 p = rect_prev . pos + rect_prev . size ; <nl> + rect = Rect2 ( p , Size2 ( ) ) ; <nl> + rect . expand_to ( new_pos ) ; <nl> + node - > set_region_rect ( rect ) ; <nl> + } break ; <nl> + case 1 : { <nl> + Vector2 p = rect_prev . pos + Vector2 ( 0 , rect_prev . size . y ) ; <nl> + rect = Rect2 ( p , Size2 ( rect_prev . size . x , 0 ) ) ; <nl> + rect . expand_to ( new_pos ) ; <nl> + node - > set_region_rect ( rect ) ; <nl> + } break ; <nl> + case 2 : { <nl> + Vector2 p = rect_prev . pos + Vector2 ( 0 , rect_prev . size . y ) ; <nl> + rect = Rect2 ( p , Size2 ( ) ) ; <nl> + rect . expand_to ( new_pos ) ; <nl> + node - > set_region_rect ( rect ) ; <nl> + } break ; <nl> + case 3 : { <nl> + Vector2 p = rect_prev . pos ; <nl> + rect = Rect2 ( p , Size2 ( 0 , rect_prev . size . y ) ) ; <nl> + rect . expand_to ( new_pos ) ; <nl> + node - > set_region_rect ( rect ) ; <nl> + } break ; <nl> + case 4 : { <nl> + Vector2 p = rect_prev . pos ; <nl> + rect = Rect2 ( p , Size2 ( ) ) ; <nl> + rect . expand_to ( new_pos ) ; <nl> + node - > set_region_rect ( rect ) ; <nl> + } break ; <nl> + case 5 : { <nl> + Vector2 p = rect_prev . pos ; <nl> + rect = Rect2 ( p , Size2 ( rect_prev . size . x , 0 ) ) ; <nl> + rect . expand_to ( new_pos ) ; <nl> + node - > set_region_rect ( rect ) ; <nl> + } break ; <nl> + case 6 : { <nl> + Vector2 p = rect_prev . pos + Vector2 ( rect_prev . size . x , 0 ) ; <nl> + rect = Rect2 ( p , Size2 ( ) ) ; <nl> + rect . expand_to ( new_pos ) ; <nl> + node - > set_region_rect ( rect ) ; <nl> + } break ; <nl> + case 7 : { <nl> + Vector2 p = rect_prev . pos + Vector2 ( rect_prev . size . x , 0 ) ; <nl> + rect = Rect2 ( p , Size2 ( 0 , rect_prev . size . y ) ) ; <nl> + rect . expand_to ( new_pos ) ; <nl> + node - > set_region_rect ( rect ) ; <nl> + } break ; <nl> + <nl> + } <nl> + edit_draw - > update ( ) ; <nl> + } <nl> + <nl> + } <nl> + } <nl> + <nl> + void SpriteRegionEditor : : _scroll_changed ( float ) <nl> + { <nl> + if ( updating_scroll ) <nl> + return ; <nl> + <nl> + draw_ofs . x = hscroll - > get_val ( ) ; <nl> + draw_ofs . y = vscroll - > get_val ( ) ; <nl> + draw_zoom = zoom - > get_val ( ) ; <nl> + print_line ( " _scroll_changed " ) ; <nl> + edit_draw - > update ( ) ; <nl> + } <nl> + <nl> + void SpriteRegionEditor : : _set_use_snap ( bool p_use ) <nl> + { <nl> + use_snap = p_use ; <nl> + } <nl> + <nl> + void SpriteRegionEditor : : _set_show_grid ( bool p_show ) <nl> + { <nl> + snap_show_grid = p_show ; <nl> + edit_draw - > update ( ) ; <nl> + } <nl> + <nl> + void SpriteRegionEditor : : _set_snap_off_x ( float p_val ) <nl> + { <nl> + snap_offset . x = p_val ; <nl> + edit_draw - > update ( ) ; <nl> + } <nl> + <nl> + void SpriteRegionEditor : : _set_snap_off_y ( float p_val ) <nl> + { <nl> + snap_offset . y = p_val ; <nl> + edit_draw - > update ( ) ; <nl> + } <nl> + <nl> + void SpriteRegionEditor : : _set_snap_step_x ( float p_val ) <nl> + { <nl> + snap_step . x = p_val ; <nl> + edit_draw - > update ( ) ; <nl> + } <nl> + <nl> + void SpriteRegionEditor : : _set_snap_step_y ( float p_val ) <nl> + { <nl> + snap_step . y = p_val ; <nl> + edit_draw - > update ( ) ; <nl> + } <nl> + <nl> + void SpriteRegionEditor : : _notification ( int p_what ) <nl> + { <nl> + switch ( p_what ) { <nl> + <nl> + case NOTIFICATION_READY : { <nl> + edit_node - > set_icon ( get_icon ( " RegionEdit " , " EditorIcons " ) ) ; <nl> + b_snap_grid - > set_icon ( get_icon ( " Grid " , " EditorIcons " ) ) ; <nl> + b_snap_enable - > set_icon ( get_icon ( " Snap " , " EditorIcons " ) ) ; <nl> + icon_zoom - > set_texture ( get_icon ( " Zoom " , " EditorIcons " ) ) ; <nl> + } break ; <nl> + } <nl> + } <nl> + <nl> + void SpriteRegionEditor : : _node_removed ( Node * p_node ) <nl> + { <nl> + if ( p_node = = node ) { <nl> + node = NULL ; <nl> + hide ( ) ; <nl> + } <nl> + } <nl> + <nl> + void SpriteRegionEditor : : _bind_methods ( ) <nl> + { <nl> + ObjectTypeDB : : bind_method ( _MD ( " _edit_node " ) , & SpriteRegionEditor : : _edit_node ) ; <nl> + ObjectTypeDB : : bind_method ( _MD ( " _region_draw " ) , & SpriteRegionEditor : : _region_draw ) ; <nl> + ObjectTypeDB : : bind_method ( _MD ( " _region_input " ) , & SpriteRegionEditor : : _region_input ) ; <nl> + ObjectTypeDB : : bind_method ( _MD ( " _scroll_changed " ) , & SpriteRegionEditor : : _scroll_changed ) ; <nl> + ObjectTypeDB : : bind_method ( _MD ( " _node_removed " ) , & SpriteRegionEditor : : _node_removed ) ; <nl> + ObjectTypeDB : : bind_method ( _MD ( " _set_use_snap " ) , & SpriteRegionEditor : : _set_use_snap ) ; <nl> + ObjectTypeDB : : bind_method ( _MD ( " _set_show_grid " ) , & SpriteRegionEditor : : _set_show_grid ) ; <nl> + ObjectTypeDB : : bind_method ( _MD ( " _set_snap_off_x " ) , & SpriteRegionEditor : : _set_snap_off_x ) ; <nl> + ObjectTypeDB : : bind_method ( _MD ( " _set_snap_off_y " ) , & SpriteRegionEditor : : _set_snap_off_y ) ; <nl> + ObjectTypeDB : : bind_method ( _MD ( " _set_snap_step_x " ) , & SpriteRegionEditor : : _set_snap_step_x ) ; <nl> + ObjectTypeDB : : bind_method ( _MD ( " _set_snap_step_y " ) , & SpriteRegionEditor : : _set_snap_step_y ) ; <nl> + } <nl> + <nl> + void SpriteRegionEditor : : edit ( Node * p_sprite ) <nl> + { <nl> + if ( p_sprite ) { <nl> + node = p_sprite - > cast_to < Sprite > ( ) ; <nl> + node - > connect ( " exit_tree " , this , " _node_removed " , varray ( ) , CONNECT_ONESHOT ) ; <nl> + } else { <nl> + if ( node ) <nl> + node - > disconnect ( " exit_tree " , this , " _node_removed " ) ; <nl> + node = NULL ; <nl> + } <nl> + <nl> + } <nl> + void SpriteRegionEditor : : _edit_node ( ) <nl> + { <nl> + if ( node - > get_texture ( ) . is_null ( ) ) { <nl> + <nl> + error - > set_text ( " No texture in this sprite . \ nSet a texture to be able to edit Region . " ) ; <nl> + error - > popup_centered_minsize ( ) ; <nl> + return ; <nl> + } <nl> + <nl> + rect = node - > get_region_rect ( ) ; <nl> + dlg_editor - > popup_centered_ratio ( 0 . 85 ) ; <nl> + } <nl> + <nl> + inline float _snap_scalar ( float p_offset , float p_step , float p_target ) { <nl> + return p_step ! = 0 ? Math : : stepify ( p_target - p_offset , p_step ) + p_offset : p_target ; <nl> + } <nl> + <nl> + Vector2 SpriteRegionEditor : : snap_point ( Vector2 p_target ) const { <nl> + if ( use_snap ) { <nl> + p_target . x = _snap_scalar ( snap_offset . x , snap_step . x , p_target . x ) ; <nl> + p_target . y = _snap_scalar ( snap_offset . y , snap_step . y , p_target . y ) ; <nl> + } <nl> + p_target = p_target . snapped ( Size2 ( 1 , 1 ) ) ; <nl> + <nl> + return p_target ; <nl> + } <nl> + <nl> + SpriteRegionEditor : : SpriteRegionEditor ( EditorNode * p_editor ) <nl> + { <nl> + node = NULL ; <nl> + editor = p_editor ; <nl> + undo_redo = editor - > get_undo_redo ( ) ; <nl> + <nl> + snap_step = Vector2 ( 10 , 10 ) ; <nl> + use_snap = false ; <nl> + snap_show_grid = false ; <nl> + <nl> + add_child ( memnew ( VSeparator ) ) ; <nl> + edit_node = memnew ( ToolButton ) ; <nl> + add_child ( edit_node ) ; <nl> + edit_node - > connect ( " pressed " , this , " _edit_node " ) ; <nl> + <nl> + dlg_editor = memnew ( AcceptDialog ) ; <nl> + add_child ( dlg_editor ) ; <nl> + dlg_editor - > set_title ( " Sprite Region Editor " ) ; <nl> + dlg_editor - > set_self_opacity ( 0 . 9 ) ; <nl> + <nl> + VBoxContainer * main_vb = memnew ( VBoxContainer ) ; <nl> + dlg_editor - > add_child ( main_vb ) ; <nl> + dlg_editor - > set_child_rect ( main_vb ) ; <nl> + HBoxContainer * hb_tools = memnew ( HBoxContainer ) ; <nl> + main_vb - > add_child ( hb_tools ) ; <nl> + <nl> + b_snap_enable = memnew ( ToolButton ) ; <nl> + hb_tools - > add_child ( b_snap_enable ) ; <nl> + b_snap_enable - > set_text ( " Snap " ) ; <nl> + b_snap_enable - > set_focus_mode ( FOCUS_NONE ) ; <nl> + b_snap_enable - > set_toggle_mode ( true ) ; <nl> + b_snap_enable - > set_pressed ( use_snap ) ; <nl> + b_snap_enable - > set_tooltip ( " Enable Snap " ) ; <nl> + b_snap_enable - > connect ( " toggled " , this , " _set_use_snap " ) ; <nl> + <nl> + b_snap_grid = memnew ( ToolButton ) ; <nl> + hb_tools - > add_child ( b_snap_grid ) ; <nl> + b_snap_grid - > set_text ( " Grid " ) ; <nl> + b_snap_grid - > set_focus_mode ( FOCUS_NONE ) ; <nl> + b_snap_grid - > set_toggle_mode ( true ) ; <nl> + b_snap_grid - > set_pressed ( snap_show_grid ) ; <nl> + b_snap_grid - > set_tooltip ( " Show Grid " ) ; <nl> + b_snap_grid - > connect ( " toggled " , this , " _set_show_grid " ) ; <nl> + <nl> + hb_tools - > add_child ( memnew ( VSeparator ) ) ; <nl> + hb_tools - > add_child ( memnew ( Label ( " Grid Offset : " ) ) ) ; <nl> + <nl> + SpinBox * sb_off_x = memnew ( SpinBox ) ; <nl> + sb_off_x - > set_min ( - 256 ) ; <nl> + sb_off_x - > set_max ( 256 ) ; <nl> + sb_off_x - > set_step ( 1 ) ; <nl> + sb_off_x - > set_val ( snap_offset . x ) ; <nl> + sb_off_x - > set_suffix ( " px " ) ; <nl> + sb_off_x - > connect ( " value_changed " , this , " _set_snap_off_x " ) ; <nl> + hb_tools - > add_child ( sb_off_x ) ; <nl> + <nl> + SpinBox * sb_off_y = memnew ( SpinBox ) ; <nl> + sb_off_y - > set_min ( - 256 ) ; <nl> + sb_off_y - > set_max ( 256 ) ; <nl> + sb_off_y - > set_step ( 1 ) ; <nl> + sb_off_y - > set_val ( snap_offset . y ) ; <nl> + sb_off_y - > set_suffix ( " px " ) ; <nl> + sb_off_y - > connect ( " value_changed " , this , " _set_snap_off_y " ) ; <nl> + hb_tools - > add_child ( sb_off_y ) ; <nl> + <nl> + hb_tools - > add_child ( memnew ( VSeparator ) ) ; <nl> + hb_tools - > add_child ( memnew ( Label ( " Grid Step : " ) ) ) ; <nl> + <nl> + SpinBox * sb_step_x = memnew ( SpinBox ) ; <nl> + sb_step_x - > set_min ( - 256 ) ; <nl> + sb_step_x - > set_max ( 256 ) ; <nl> + sb_step_x - > set_step ( 1 ) ; <nl> + sb_step_x - > set_val ( snap_step . x ) ; <nl> + sb_step_x - > set_suffix ( " px " ) ; <nl> + sb_step_x - > connect ( " value_changed " , this , " _set_snap_step_x " ) ; <nl> + hb_tools - > add_child ( sb_step_x ) ; <nl> + <nl> + SpinBox * sb_step_y = memnew ( SpinBox ) ; <nl> + sb_step_y - > set_min ( - 256 ) ; <nl> + sb_step_y - > set_max ( 256 ) ; <nl> + sb_step_y - > set_step ( 1 ) ; <nl> + sb_step_y - > set_val ( snap_step . y ) ; <nl> + sb_step_y - > set_suffix ( " px " ) ; <nl> + sb_step_y - > connect ( " value_changed " , this , " _set_snap_step_y " ) ; <nl> + hb_tools - > add_child ( sb_step_y ) ; <nl> + <nl> + / / MARIANOGNU : : TODO : Add more tools ? <nl> + <nl> + HBoxContainer * main_hb = memnew ( HBoxContainer ) ; <nl> + main_vb - > add_child ( main_hb ) ; <nl> + edit_draw = memnew ( Control ) ; <nl> + main_hb - > add_child ( edit_draw ) ; <nl> + main_hb - > set_v_size_flags ( SIZE_EXPAND_FILL ) ; <nl> + edit_draw - > set_h_size_flags ( SIZE_EXPAND_FILL ) ; <nl> + <nl> + <nl> + hb_tools - > add_child ( memnew ( VSeparator ) ) ; <nl> + icon_zoom = memnew ( TextureFrame ) ; <nl> + hb_tools - > add_child ( icon_zoom ) ; <nl> + <nl> + zoom = memnew ( HSlider ) ; <nl> + zoom - > set_min ( 0 . 01 ) ; <nl> + zoom - > set_max ( 4 ) ; <nl> + zoom - > set_val ( 1 ) ; <nl> + zoom - > set_step ( 0 . 01 ) ; <nl> + hb_tools - > add_child ( zoom ) ; <nl> + zoom - > set_custom_minimum_size ( Size2 ( 200 , 0 ) ) ; <nl> + zoom_value = memnew ( SpinBox ) ; <nl> + zoom - > share ( zoom_value ) ; <nl> + zoom_value - > set_custom_minimum_size ( Size2 ( 50 , 0 ) ) ; <nl> + hb_tools - > add_child ( zoom_value ) ; <nl> + zoom - > connect ( " value_changed " , this , " _scroll_changed " ) ; <nl> + <nl> + <nl> + <nl> + vscroll = memnew ( VScrollBar ) ; <nl> + main_hb - > add_child ( vscroll ) ; <nl> + vscroll - > connect ( " value_changed " , this , " _scroll_changed " ) ; <nl> + hscroll = memnew ( HScrollBar ) ; <nl> + main_vb - > add_child ( hscroll ) ; <nl> + hscroll - > connect ( " value_changed " , this , " _scroll_changed " ) ; <nl> + <nl> + edit_draw - > connect ( " draw " , this , " _region_draw " ) ; <nl> + edit_draw - > connect ( " input_event " , this , " _region_input " ) ; <nl> + draw_zoom = 1 . 0 ; <nl> + updating_scroll = false ; <nl> + <nl> + error = memnew ( AcceptDialog ) ; <nl> + add_child ( error ) ; <nl> + <nl> + } <nl> + <nl> + void SpriteRegionEditorPlugin : : edit ( Object * p_node ) <nl> + { <nl> + region_editor - > edit ( p_node - > cast_to < Node > ( ) ) ; <nl> + } <nl> + <nl> + bool SpriteRegionEditorPlugin : : handles ( Object * p_node ) const <nl> + { <nl> + return p_node - > is_type ( " Sprite " ) ; <nl> + } <nl> + <nl> + void SpriteRegionEditorPlugin : : make_visible ( bool p_visible ) <nl> + { <nl> + if ( p_visible ) { <nl> + region_editor - > show ( ) ; <nl> + } else { <nl> + region_editor - > hide ( ) ; <nl> + region_editor - > edit ( NULL ) ; <nl> + } <nl> + } <nl> + <nl> + SpriteRegionEditorPlugin : : SpriteRegionEditorPlugin ( EditorNode * p_node ) <nl> + { <nl> + editor = p_node ; <nl> + region_editor = memnew ( SpriteRegionEditor ( p_node ) ) ; <nl> + CanvasItemEditor : : get_singleton ( ) - > add_control_to_menu_panel ( region_editor ) ; <nl> + <nl> + region_editor - > hide ( ) ; <nl> + } <nl> + <nl> new file mode 100644 <nl> index 00000000000 . . cf69395f40b <nl> mmm / dev / null <nl> ppp b / tools / editor / plugins / sprite_region_editor_plugin . h <nl> <nl> + / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> + / * sprite_region_editor_plugin . h * / <nl> + / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> + / * This file is part of : * / <nl> + / * GODOT ENGINE * / <nl> + / * http : / / www . godotengine . org * / <nl> + / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> + / * Copyright ( c ) 2007 - 2015 Juan Linietsky , Ariel Manzur . * / <nl> + / * * / <nl> + / * Author : Mariano Suligoy * / <nl> + / * * / <nl> + / * Permission is hereby granted , free of charge , to any person obtaining * / <nl> + / * a copy of this software and associated documentation files ( the * / <nl> + / * " Software " ) , to deal in the Software without restriction , including * / <nl> + / * without limitation the rights to use , copy , modify , merge , publish , * / <nl> + / * distribute , sublicense , and / or sell copies of the Software , and to * / <nl> + / * permit persons to whom the Software is furnished to do so , subject to * / <nl> + / * the following conditions : * / <nl> + / * * / <nl> + / * The above copyright notice and this permission notice shall be * / <nl> + / * included in all copies or substantial portions of the Software . * / <nl> + / * * / <nl> + / * THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , * / <nl> + / * EXPRESS OR IMPLIED , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * / <nl> + / * MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . * / <nl> + / * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY * / <nl> + / * CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , * / <nl> + / * TORT OR OTHERWISE , ARISING FROM , OUT OF OR IN CONNECTION WITH THE * / <nl> + / * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE . * / <nl> + / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> + <nl> + # ifndef SPRITE_REGION_EDITOR_PLUGIN_H <nl> + # define SPRITE_REGION_EDITOR_PLUGIN_H <nl> + <nl> + # include " canvas_item_editor_plugin . h " <nl> + # include " tools / editor / editor_plugin . h " <nl> + # include " tools / editor / editor_node . h " <nl> + # include " scene / 2d / sprite . h " <nl> + <nl> + class SpriteRegionEditor : public HBoxContainer { <nl> + <nl> + OBJ_TYPE ( SpriteRegionEditor , HBoxContainer ) ; <nl> + <nl> + ToolButton * edit_node ; <nl> + / / Button * use_region ; <nl> + ToolButton * b_snap_enable ; <nl> + ToolButton * b_snap_grid ; <nl> + TextureFrame * icon_zoom ; <nl> + HSlider * zoom ; <nl> + SpinBox * zoom_value ; <nl> + Control * edit_draw ; <nl> + <nl> + VScrollBar * vscroll ; <nl> + HScrollBar * hscroll ; <nl> + <nl> + Sprite * node ; <nl> + EditorNode * editor ; <nl> + AcceptDialog * dlg_editor ; <nl> + UndoRedo * undo_redo ; <nl> + <nl> + Vector2 draw_ofs ; <nl> + float draw_zoom ; <nl> + bool updating_scroll ; <nl> + <nl> + bool use_snap ; <nl> + bool snap_show_grid ; <nl> + Vector2 snap_offset ; <nl> + Vector2 snap_step ; <nl> + <nl> + Rect2 rect ; <nl> + Rect2 rect_prev ; <nl> + bool drag ; <nl> + bool creating ; <nl> + Vector2 drag_from ; <nl> + int drag_index ; <nl> + <nl> + AcceptDialog * error ; <nl> + <nl> + void _set_use_snap ( bool p_use ) ; <nl> + void _set_show_grid ( bool p_show ) ; <nl> + void _set_snap_off_x ( float p_val ) ; <nl> + void _set_snap_off_y ( float p_val ) ; <nl> + void _set_snap_step_x ( float p_val ) ; <nl> + void _set_snap_step_y ( float p_val ) ; <nl> + <nl> + protected : <nl> + <nl> + void _notification ( int p_what ) ; <nl> + void _node_removed ( Node * p_node ) ; <nl> + static void _bind_methods ( ) ; <nl> + <nl> + Vector2 snap_point ( Vector2 p_target ) const ; <nl> + <nl> + public : <nl> + <nl> + void edit ( ) ; <nl> + void _edit_node ( ) ; <nl> + void _region_draw ( ) ; <nl> + void _region_input ( const InputEvent & p_input ) ; <nl> + void _scroll_changed ( float ) ; <nl> + <nl> + void edit ( Node * p_sprite ) ; <nl> + SpriteRegionEditor ( EditorNode * p_editor ) ; <nl> + <nl> + } ; <nl> + <nl> + class SpriteRegionEditorPlugin : public EditorPlugin <nl> + { <nl> + <nl> + OBJ_TYPE ( SpriteRegionEditorPlugin , EditorPlugin ) ; <nl> + <nl> + SpriteRegionEditor * region_editor ; <nl> + EditorNode * editor ; <nl> + public : <nl> + <nl> + virtual String get_name ( ) const { return " Sprite " ; } <nl> + bool has_main_screen ( ) const { return false ; } <nl> + virtual void edit ( Object * p_node ) ; <nl> + virtual bool handles ( Object * p_node ) const ; <nl> + virtual void make_visible ( bool p_visible ) ; <nl> + <nl> + SpriteRegionEditorPlugin ( EditorNode * p_node ) ; <nl> + } ; <nl> + <nl> + # endif / / SPRITE_REGION_EDITOR_PLUGIN_H <nl>
Merge pull request from MarianoGnu / sprite - edit
godotengine/godot
8154dff3d00634d2dd5f0baced2e8ce3dedc4fef
2015-09-28T12:32:42Z
mmm a / contrib / Python / cntk / ops / __init__ . py <nl> ppp b / contrib / Python / cntk / ops / __init__ . py <nl> def transpose_dimensions ( x , axis1 , axis2 , name = None ) : <nl> : class : ` cntk . graph . ComputationNode ` <nl> " " " <nl> from cntk . ops . cntk2 import TransposeDimensions <nl> - return TransposeDimensions ( x , axis1 , axis2 , name = name ) <nl> + cntk_axis1 = axis1 + 1 <nl> + cntk_axis2 = axis2 + 1 <nl> + return TransposeDimensions ( x , cntk_axis1 , cntk_axis2 , name = name ) <nl> <nl> def slice ( x , begin_index , end_index , axis = 0 , name = None ) : <nl> ' ' ' <nl> mmm a / contrib / Python / cntk / ops / tests / reshaping_test . py <nl> ppp b / contrib / Python / cntk / ops / tests / reshaping_test . py <nl> <nl> import cntk as C <nl> <nl> RESHAPE_TEST_CASES = [ <nl> - # ( inputShape , outputShape , expectedOutputShape ) <nl> + # ( input_shape , output_shape , expected_output_shape ) <nl> ( [ 2 , 3 ] , [ 3 , 2 ] , [ 3 , 2 ] ) , <nl> ( [ 2 , 3 ] , [ 6 , 1 ] , [ 6 , 1 ] ) , <nl> ( [ 2 , 3 ] , [ 6 , 1 ] , [ 6 , 1 ] ) , <nl> ( [ 6 , 1 ] , [ 2 , 3 ] , [ 2 , 3 ] ) , <nl> ( [ 2 , 3 , 5 ] , [ 5 , 6 ] , [ 5 , 6 ] ) , <nl> - # now we test the feature that we can set one dimension of the outputShape to 0 meaning that it ' s value is inferred <nl> + # now we test the feature that we can set one dimension of the output_shape to 0 meaning that it ' s value is inferred <nl> ( [ 2 , 3 , 5 ] , [ 0 , 6 ] , [ 5 , 6 ] ) , <nl> ( [ 2 , 3 , 5 ] , [ 5 , 0 ] , [ 5 , 6 ] ) , <nl> ] <nl> <nl> - @ pytest . mark . parametrize ( " inputShape , outputShape , expectedOutputShape " , RESHAPE_TEST_CASES ) <nl> - def test_op_reshape ( inputShape , outputShape , expectedOutputShape , device_id , precision ) : <nl> + @ pytest . mark . parametrize ( " input_shape , output_shape , expected_output_shape " , RESHAPE_TEST_CASES ) <nl> + def test_op_reshape ( input_shape , output_shape , expected_output_shape , device_id , precision ) : <nl> # Forward pass test <nl> # = = = = = = = = = = = = = = = = = = <nl> # we compute the expected output for the forward pass <nl> def test_op_reshape ( inputShape , outputShape , expectedOutputShape , device_id , pre <nl> # the first for sequences ( length = 1 , since we have dynamic_axis = ' ' ) <nl> # the second for batch of one sample <nl> <nl> - num_tensor_elements = np . multiply . reduce ( inputShape ) <nl> - input_tensor = np . arange ( num_tensor_elements ) . reshape ( inputShape ) <nl> + num_tensor_elements = np . multiply . reduce ( input_shape ) <nl> + input_tensor = np . arange ( num_tensor_elements ) . reshape ( input_shape ) <nl> <nl> - expected_tensor = input_tensor . reshape ( expectedOutputShape , order = ' F ' ) <nl> + expected_tensor = input_tensor . reshape ( expected_output_shape , order = ' F ' ) <nl> <nl> a = I ( [ input_tensor ] ) <nl> <nl> # reshape into output shape <nl> - reshaped_input = C . reshape ( a , outputShape ) <nl> + <nl> + reshaped_input = C . reshape ( a , output_shape ) <nl> <nl> unittest_helper ( reshaped_input , None , [ [ expected_tensor ] ] , device_id = device_id , <nl> precision = precision , clean_up = True , backward_pass = False ) <nl> def test_op_reshape ( inputShape , outputShape , expectedOutputShape , device_id , pre <nl> a = I ( [ input_tensor ] ) <nl> <nl> # reshape into output shape <nl> - reshaped_input = C . reshape ( a , outputShape ) <nl> + reshaped_input = C . reshape ( a , output_shape ) <nl> <nl> some_factor = 100 <nl> weight = expected_tensor * some_factor <nl> def test_op_slice_overload ( device_id , precision ) : <nl> <nl> with pytest . raises ( IndexError ) : <nl> result = a [ 1 , object ( ) , 2 ] <nl> + <nl> + <nl> + TRANSPOSE_DIMS_TEST_CASES = [ <nl> + # ( input_shape , axis1 , axis2 , expected_output_shape ) <nl> + ( [ 2 , 3 ] , 0 , 1 , [ 3 , 2 ] ) , <nl> + ( [ 2 , 3 ] , 1 , 0 , [ 3 , 2 ] ) , <nl> + ( [ 2 , 3 , 5 ] , 0 , 2 , [ 5 , 3 , 2 ] ) , <nl> + ( [ 2 , 2 , 2 ] , 0 , 1 , [ 2 , 2 , 2 ] ) , <nl> + ] <nl> + <nl> + @ pytest . mark . parametrize ( " input_shape , axis1 , axis2 , expected_output_shape " , TRANSPOSE_DIMS_TEST_CASES ) <nl> + def test_op_transpose_dimensions ( input_shape , axis1 , axis2 , expected_output_shape , device_id , precision ) : <nl> + # Forward pass test <nl> + # = = = = = = = = = = = = = = = = = = <nl> + # we compute the expected output for the forward pass <nl> + # we need two surrounding brackets <nl> + # the first for sequences ( length = 1 , since we have dynamic_axis = ' ' ) <nl> + # the second for batch of one sample <nl> + <nl> + num_tensor_elements = np . multiply . reduce ( input_shape ) <nl> + input_tensor = np . arange ( num_tensor_elements ) . reshape ( input_shape ) <nl> + <nl> + permutated_axes = np . arange ( len ( input_shape ) ) <nl> + axis1_idx = permutated_axes [ axis1 ] <nl> + permutated_axes [ axis1 ] = permutated_axes [ axis2 ] <nl> + permutated_axes [ axis2 ] = axis1_idx <nl> + expected_tensor = input_tensor . transpose ( * permutated_axes ) <nl> + <nl> + a = I ( [ input_tensor ] ) <nl> + <nl> + # swap two axes <nl> + reshaped_input = C . transpose_dimensions ( a , axis1 , axis2 ) <nl> + <nl> + unittest_helper ( reshaped_input , None , [ [ expected_tensor ] ] , device_id = device_id , <nl> + precision = precision , clean_up = True , backward_pass = False ) <nl> + <nl> + # Backward pass test <nl> + # = = = = = = = = = = = = = = = = = = <nl> + # Reshaping is just moving the input values to different indexes of the result tensor . <nl> + # If we would compute the gradients on the unmodified tensor , reshape would get 1 for all inputs . <nl> + # For testing the gradients we want to have different gradients for each input index otherwise we can ' t <nl> + # test if they get wrongly permuted during test . To this end we multiply the reshaping result with some weight tensor . <nl> + # For convienience choose ' 100 * expected_tensor ' as weight . <nl> + # The expected gradient is identical to this weight tensor reshaped according the input shape . <nl> + <nl> + a = I ( [ input_tensor ] ) <nl> + <nl> + # swap two axes <nl> + reshaped_input = C . transpose_dimensions ( a , axis1 , axis2 ) <nl> + <nl> + some_factor = 100 <nl> + weight = expected_tensor <nl> + output = reshaped_input * weight <nl> + <nl> + expected_gradient = weight . transpose ( * permutated_axes ) <nl> + <nl> + unittest_helper ( output , None , [ [ input_tensor ] ] , device_id = device_id , <nl> + precision = precision , clean_up = False , backward_pass = True , input_node = a ) <nl> + <nl> mmm a / contrib / Python / cntk / tests / test_utils . py <nl> ppp b / contrib / Python / cntk / tests / test_utils . py <nl> def unittest_helper ( root_node , input_numpy , expected , device_id = - 1 , precision = " f <nl> ctx . precision = precision <nl> assert not ctx . input_nodes <nl> result = ctx . eval ( root_node , input_numpy , backward_pass , input_node ) <nl> - <nl> + <nl> assert len ( result ) = = len ( expected ) <nl> for res , exp in zip ( result , expected ) : <nl> + print ( res ) <nl> + print ( exp ) <nl> + print ( ' = = = = ' ) <nl> assert np . allclose ( res , exp , atol = TOLERANCE_ABSOLUTE ) <nl> assert res . shape = = AA ( exp ) . shape <nl>
add first version of transpose unit tests
microsoft/CNTK
15d21d1f9edc61bc98c91043b61ab5307601013b
2016-05-25T14:12:34Z
mmm a / atom / browser / native_window_views . cc <nl> ppp b / atom / browser / native_window_views . cc <nl> NativeWindowViews : : NativeWindowViews ( content : : WebContents * web_contents , <nl> window_ ( new views : : Widget ) , <nl> web_view_ ( inspectable_web_contents ( ) - > GetView ( ) - > GetView ( ) ) , <nl> menu_bar_autohide_ ( false ) , <nl> - menu_bar_show_ ( false ) , <nl> + menu_bar_visible_ ( false ) , <nl> menu_bar_alt_pressed_ ( false ) , <nl> keyboard_event_handler_ ( new views : : UnhandledKeyboardEventHandler ) , <nl> use_content_size_ ( false ) , <nl> gfx : : Size NativeWindowViews : : GetContentSize ( ) { <nl> <nl> gfx : : Size content_size = <nl> window_ - > non_client_view ( ) - > frame_view ( ) - > GetBoundsForClientView ( ) . size ( ) ; <nl> - if ( menu_bar_ & & menu_bar_show_ ) <nl> + if ( menu_bar_ & & menu_bar_visible_ ) <nl> content_size . set_height ( content_size . height ( ) - kMenuBarHeight ) ; <nl> return content_size ; <nl> } <nl> void NativeWindowViews : : OnWidgetActivationChanged ( <nl> GetWebContents ( ) - > Focus ( ) ; <nl> <nl> / / Hide menu bar when window is blured . <nl> - if ( ! active & & menu_bar_autohide_ & & menu_bar_show_ ) { <nl> + if ( ! active & & menu_bar_autohide_ & & menu_bar_visible_ ) { <nl> SetMenuBarVisibility ( false ) ; <nl> Layout ( ) ; <nl> } <nl> views : : NonClientFrameView * NativeWindowViews : : CreateNonClientFrameView ( <nl> <nl> void NativeWindowViews : : HandleMouseDown ( ) { <nl> / / Hide menu bar when web view is clicked . <nl> - if ( menu_bar_autohide_ & & menu_bar_show_ ) { <nl> + if ( menu_bar_autohide_ & & menu_bar_visible_ ) { <nl> SetMenuBarVisibility ( false ) ; <nl> Layout ( ) ; <nl> } <nl> void NativeWindowViews : : HandleKeyboardEvent ( <nl> event . modifiers = = 0 & & menu_bar_alt_pressed_ ) { <nl> / / When a single Alt is released right after a Alt is pressed : <nl> menu_bar_alt_pressed_ = false ; <nl> - SetMenuBarVisibility ( ! menu_bar_show_ ) ; <nl> + SetMenuBarVisibility ( ! menu_bar_visible_ ) ; <nl> Layout ( ) ; <nl> } else { <nl> / / When any other keys except single Alt have been pressed / released : <nl> gfx : : Rect NativeWindowViews : : ContentBoundsToWindowBounds ( <nl> const gfx : : Rect & bounds ) { <nl> gfx : : Rect window_bounds = <nl> window_ - > non_client_view ( ) - > GetWindowBoundsForClientBounds ( bounds ) ; <nl> - if ( menu_bar_ & & menu_bar_show_ ) <nl> + if ( menu_bar_ & & menu_bar_visible_ ) <nl> window_bounds . set_height ( window_bounds . height ( ) + kMenuBarHeight ) ; <nl> return window_bounds ; <nl> } <nl> void NativeWindowViews : : SetMenuBarVisibility ( bool visible ) { <nl> if ( ! menu_bar_ ) <nl> return ; <nl> <nl> - menu_bar_show_ = visible ; <nl> + menu_bar_visible_ = visible ; <nl> if ( visible ) { <nl> DCHECK_EQ ( child_count ( ) , 1 ) ; <nl> AddChildView ( menu_bar_ . get ( ) ) ; <nl> mmm a / atom / browser / native_window_views . h <nl> ppp b / atom / browser / native_window_views . h <nl> class NativeWindowViews : public NativeWindow , <nl> <nl> scoped_ptr < MenuBar > menu_bar_ ; <nl> bool menu_bar_autohide_ ; <nl> - bool menu_bar_show_ ; <nl> + bool menu_bar_visible_ ; <nl> bool menu_bar_alt_pressed_ ; <nl> <nl> # if defined ( USE_X11 ) <nl>
menu_bar_show_ = > menu_bar_visible_
electron/electron
8cc49ffa80ab909cce3c7c1f8051f71d4e5c64db
2014-08-07T08:48:30Z
mmm a / lib / SimpleHttpClient / SimpleHttpClient . cpp <nl> ppp b / lib / SimpleHttpClient / SimpleHttpClient . cpp <nl> namespace triagens { <nl> <nl> / / we need to read a at least one byte to make progress <nl> bool progress ; <nl> + std : : cout < < " ReadBufV : " < < ( unsigned long ) _readBuffer . c_str ( ) < < " " <nl> + < < _readBuffer . length ( ) < < " " <nl> + < < _readBufferOffset < < std : : endl ; <nl> + <nl> bool res = _connection - > handleRead ( remainingTime , _readBuffer , progress ) ; <nl> <nl> + std : : cout < < " ReadBufN : " < < ( unsigned long ) _readBuffer . c_str ( ) < < " " <nl> + < < _readBuffer . length ( ) < < " " <nl> + < < _readBufferOffset < < std : : endl ; <nl> + <nl> / / If there was an error , then we are doomed : <nl> if ( ! res ) { <nl> std : : cout < < " doomed \ n " ; <nl> namespace triagens { <nl> _readBuffer . clear ( ) ; <nl> _readBufferOffset = 0 ; <nl> <nl> + std : : cout < < " ReadBufC : " < < ( unsigned long ) _readBuffer . c_str ( ) < < " " <nl> + < < _readBuffer . length ( ) < < " " <nl> + < < _readBufferOffset < < std : : endl ; <nl> if ( _result ) { <nl> _result - > clear ( ) ; <nl> } <nl> namespace triagens { <nl> / / mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - <nl> <nl> void SimpleHttpClient : : processHeader ( ) { <nl> + TRI_ASSERT ( _readBufferOffset < = _readBuffer . length ( ) ) ; <nl> size_t remain = _readBuffer . length ( ) - _readBufferOffset ; <nl> char const * ptr = _readBuffer . c_str ( ) + _readBufferOffset ; <nl> char const * pos = ( char * ) memchr ( ptr , ' \ n ' , remain ) ; <nl> <nl> + / / We enforce the following invariants : <nl> + / / ptr = _readBuffer . c_str ( ) + _readBufferOffset <nl> + / / _readBuffer . length ( ) > = _readBufferOffset <nl> + / / remain = _readBuffer . length ( ) - _readBufferOffset <nl> while ( pos ) { <nl> + TRI_ASSERT ( _readBufferOffset < = _readBuffer . length ( ) ) ; <nl> + TRI_ASSERT ( ptr = = _readBuffer . c_str ( ) + _readBufferOffset ) ; <nl> + TRI_ASSERT ( remain = = _readBuffer . length ( ) - _readBufferOffset ) ; <nl> + <nl> if ( pos > ptr & & * ( pos - 1 ) = = ' \ r ' ) { <nl> / / adjust eol position <nl> - - pos ; <nl> namespace triagens { <nl> <nl> / / end of header found <nl> if ( * ptr = = ' \ r ' | | * ptr = = ' \ 0 ' ) { <nl> - size_t len = pos - ( _readBuffer . c_str ( ) + _readBufferOffset ) ; <nl> - _readBufferOffset + = ( len + 1 ) ; <nl> + size_t len = pos - ptr ; <nl> + _readBufferOffset + = len + 1 ; <nl> + ptr + = len + 1 ; <nl> + remain - = len + 1 ; <nl> <nl> if ( * pos = = ' \ r ' ) { <nl> / / adjust offset if line ended with \ r \ n <nl> + + _readBufferOffset ; <nl> + ptr + + ; <nl> + remain - - ; <nl> } <nl> <nl> / / handle chunks <nl> namespace triagens { <nl> if ( ! _keepAlive ) { <nl> _connection - > disconnect ( ) ; <nl> } <nl> + return ; <nl> } <nl> <nl> / / found content - length header in response <nl> namespace triagens { <nl> } <nl> <nl> ptr + = len + 1 ; <nl> - <nl> - TRI_ASSERT ( remain > = ( len + 1 ) ) ; <nl> + _readBufferOffset + = len + 1 ; <nl> remain - = ( len + 1 ) ; <nl> - <nl> + <nl> + TRI_ASSERT ( _readBufferOffset < = _readBuffer . length ( ) ) ; <nl> + TRI_ASSERT ( ptr = = _readBuffer . c_str ( ) + _readBufferOffset ) ; <nl> + TRI_ASSERT ( remain = = _readBuffer . length ( ) - _readBufferOffset ) ; <nl> pos = ( char * ) memchr ( ptr , ' \ n ' , remain ) ; <nl> <nl> if ( pos = = nullptr ) { <nl> - _readBufferOffset = ptr - _readBuffer . c_str ( ) + 1 ; <nl> + _readBufferOffset + + ; <nl> + ptr + + ; <nl> + remain - - ; <nl> } <nl> } <nl> } <nl>
Introduce invariants in processHeader .
arangodb/arangodb
8182539b66266534eac21873587079dda84654a8
2014-12-03T15:46:18Z
mmm a / docs / source / community / persons_of_interest . rst <nl> ppp b / docs / source / community / persons_of_interest . rst <nl> torch . * <nl> torch . nn <nl> ~ ~ ~ ~ ~ ~ ~ ~ <nl> <nl> - - Thomas Viehmann ( ` t - vi < https : / / github . com / t - vi > ` __ ) <nl> - Adam Paszke ( ` apaszke < https : / / github . com / apaszke > ` __ ) <nl> - Greg Chanan ( ` gchanan < https : / / github . com / gchanan > ` __ ) <nl> - Soumith Chintala ( ` soumith < https : / / github . com / soumith > ` __ ) <nl>
update persons of interest ( )
pytorch/pytorch
7eb0a71484dc5d7e64690f7d8dc0da8d548f9f49
2020-10-06T04:28:00Z
mmm a / utils / gyb_syntax_support / DeclNodes . py <nl> ppp b / utils / gyb_syntax_support / DeclNodes . py <nl> <nl> is_optional = True ) , <nl> ] ) , <nl> <nl> - # infix - operator - group - > ' : ' identifier <nl> + # infix - operator - group - > ' : ' identifier ' , ' ? identifier ? <nl> Node ( ' InfixOperatorGroup ' , kind = ' Syntax ' , <nl> description = ' ' ' <nl> A clause to specify precedence group in infix operator declaration . <nl> <nl> description = ' ' ' <nl> The name of the precedence group for the operator <nl> ' ' ' ) , <nl> + Child ( ' TrailingComma ' , kind = ' CommaToken ' , <nl> + is_optional = True ) , <nl> + Child ( ' ProtocolName ' , kind = ' IdentifierToken ' , <nl> + description = ' ' ' <nl> + The protocol associated with the operator <nl> + ' ' ' , <nl> + is_optional = True ) , <nl> ] ) , <nl> <nl> # precedence - group - decl - > attributes ? modifiers ? ' precedencegroup ' <nl>
Merge remote - tracking branch ' origin / master ' into master - next
apple/swift
d45f9cac9e8667acf6e0090b9502b289204db86f
2018-09-18T15:29:13Z
new file mode 100644 <nl> index 000000000000 . . 68102d613531 <nl> mmm / dev / null <nl> ppp b / jstests / index_check7 . js <nl> <nl> + <nl> + t = db . index_check7 <nl> + t . drop ( ) <nl> + <nl> + for ( var i = 0 ; i < 100 ; i + + ) <nl> + t . save ( { x : i } ) <nl> + <nl> + t . ensureIndex ( { x : 1 } ) <nl> + assert . eq ( 1 , t . find ( { x : 27 } ) . explain ( ) . nscanned , " A " ) <nl> + <nl> + t . ensureIndex ( { x : - 1 } ) <nl> + assert . eq ( 1 , t . find ( { x : 27 } ) . explain ( ) . nscanned , " B " ) <nl> + <nl> + assert . eq ( 41 , t . find ( { x : { $ gt : 59 } } ) . explain ( ) . nscanned , " C " ) ; <nl> + <nl>
test for SERVER - 486
mongodb/mongo
c6420d62e0748d5df85cc0499126c3e797c3f6cc
2009-12-15T02:10:37Z
mmm a / modules / core / test / ocl / test_arithm . cpp <nl> ppp b / modules / core / test / ocl / test_arithm . cpp <nl> OCL_TEST_P ( Flip , X ) <nl> <nl> OCL_OFF ( cv : : flip ( src1_roi , dst1_roi , 0 ) ) ; <nl> OCL_ON ( cv : : flip ( usrc1_roi , udst1_roi , 0 ) ) ; <nl> - Near ( 1e - 5 ) ; <nl> + Near ( 0 ) ; <nl> } <nl> } <nl> <nl> OCL_TEST_P ( Flip , Y ) <nl> <nl> OCL_OFF ( cv : : flip ( src1_roi , dst1_roi , 1 ) ) ; <nl> OCL_ON ( cv : : flip ( usrc1_roi , udst1_roi , 1 ) ) ; <nl> - Near ( 1e - 5 ) ; <nl> + Near ( 0 ) ; <nl> } <nl> } <nl> <nl> OCL_TEST_P ( Flip , BOTH ) <nl> <nl> OCL_OFF ( cv : : flip ( src1_roi , dst1_roi , - 1 ) ) ; <nl> OCL_ON ( cv : : flip ( usrc1_roi , udst1_roi , - 1 ) ) ; <nl> - Near ( 1e - 5 ) ; <nl> + Near ( 0 ) ; <nl> } <nl> } <nl> <nl>
Fixed test for ocl_flip
opencv/opencv
f5a01f15a59c06989801bd2e5a5aeab6f25bbdc2
2013-12-06T11:53:00Z
mmm a / atom / browser / api / atom_api_app . cc <nl> ppp b / atom / browser / api / atom_api_app . cc <nl> void App : : BuildPrototype ( <nl> base : : Bind ( & Browser : : SetUserActivity , browser ) ) <nl> . SetMethod ( " getCurrentActivityType " , <nl> base : : Bind ( & Browser : : GetCurrentActivityType , browser ) ) <nl> + . SetMethod ( " setAboutPanelOptions " , <nl> + base : : Bind ( & Browser : : SetAboutPanelOptions , browser ) ) <nl> # endif <nl> # if defined ( OS_WIN ) <nl> . SetMethod ( " setUserTasks " , base : : Bind ( & Browser : : SetUserTasks , browser ) ) <nl> mmm a / atom / browser / browser . h <nl> ppp b / atom / browser / browser . h <nl> <nl> # include " base / macros . h " <nl> # include " base / observer_list . h " <nl> # include " base / strings / string16 . h " <nl> + # include " base / values . h " <nl> # include " native_mate / arguments . h " <nl> <nl> # if defined ( OS_WIN ) <nl> <nl> # endif <nl> <nl> namespace base { <nl> - class DictionaryValue ; <nl> class FilePath ; <nl> } <nl> <nl> class Browser : public WindowListObserver { <nl> <nl> / / Set docks ' icon . <nl> void DockSetIcon ( const gfx : : Image & image ) ; <nl> + <nl> + void ShowAboutPanel ( ) ; <nl> + void SetAboutPanelOptions ( const base : : DictionaryValue & options ) ; <nl> # endif / / defined ( OS_MACOSX ) <nl> <nl> # if defined ( OS_WIN ) <nl> class Browser : public WindowListObserver { <nl> base : : string16 app_user_model_id_ ; <nl> # endif <nl> <nl> + # if defined ( OS_MACOSX ) <nl> + base : : DictionaryValue about_panel_options_ ; <nl> + # endif <nl> + <nl> DISALLOW_COPY_AND_ASSIGN ( Browser ) ; <nl> } ; <nl> <nl> mmm a / atom / browser / browser_mac . mm <nl> ppp b / atom / browser / browser_mac . mm <nl> <nl> setApplicationIconImage : image . AsNSImage ( ) ] ; <nl> } <nl> <nl> + void Browser : : ShowAboutPanel ( ) { <nl> + NSDictionary * options = DictionaryValueToNSDictionary ( about_panel_options_ ) ; <nl> + [ [ AtomApplication sharedApplication ] <nl> + orderFrontStandardAboutPanelWithOptions : options ] ; <nl> + } <nl> + <nl> + void Browser : : SetAboutPanelOptions ( const base : : DictionaryValue & options ) { <nl> + about_panel_options_ . Clear ( ) ; <nl> + about_panel_options_ . MergeDictionary ( & options ) ; <nl> + } <nl> + <nl> } / / namespace atom <nl> mmm a / atom / browser / mac / atom_application . mm <nl> ppp b / atom / browser / mac / atom_application . mm <nl> - ( void ) updateAccessibilityEnabled : ( BOOL ) enabled { <nl> atom : : Browser : : Get ( ) - > OnAccessibilitySupportChanged ( ) ; <nl> } <nl> <nl> + - ( void ) orderFrontStandardAboutPanel : ( id ) sender { <nl> + atom : : Browser : : Get ( ) - > ShowAboutPanel ( ) ; <nl> + } <nl> + <nl> @ end <nl>
Support setting about panel options
electron/electron
7de6a06acffee1568af9aa37d17c57fcc0140d87
2016-10-10T20:30:58Z
mmm a / src / trap - handler / handler - outside . cc <nl> ppp b / src / trap - handler / handler - outside . cc <nl> bool RegisterDefaultTrapHandler ( ) { return false ; } <nl> void RemoveTrapHandler ( ) { } <nl> # endif <nl> <nl> - bool g_is_trap_handler_enabled ; <nl> + bool g_is_trap_handler_enabled = false ; <nl> + bool g_can_enable_trap_handler = true ; <nl> <nl> bool EnableTrapHandler ( bool use_v8_handler ) { <nl> + if ( ! g_can_enable_trap_handler ) { <nl> + / / Enabling the trap handler after IsTrapHandlerEnabled was called can lead <nl> + / / to problems because code or objects might have been generated under the <nl> + / / assumption that trap handlers are disabled . <nl> + FATAL ( " EnableTrapHandler called after IsTrapHandlerEnabled " ) ; <nl> + } <nl> + / / We should only enable the trap handler once . <nl> + g_can_enable_trap_handler = false ; <nl> + CHECK ( ! g_is_trap_handler_enabled ) ; <nl> if ( ! V8_TRAP_HANDLER_SUPPORTED ) { <nl> return false ; <nl> } <nl> mmm a / src / trap - handler / trap - handler . h <nl> ppp b / src / trap - handler / trap - handler . h <nl> void V8_EXPORT_PRIVATE ReleaseHandlerData ( int index ) ; <nl> # endif <nl> <nl> extern bool g_is_trap_handler_enabled ; <nl> + extern bool g_can_enable_trap_handler ; <nl> + <nl> / / Enables trap handling for WebAssembly bounds checks . <nl> / / <nl> / / use_v8_handler indicates that V8 should install its own handler <nl> V8_EXPORT_PRIVATE bool EnableTrapHandler ( bool use_v8_handler ) ; <nl> <nl> inline bool IsTrapHandlerEnabled ( ) { <nl> DCHECK_IMPLIES ( g_is_trap_handler_enabled , V8_TRAP_HANDLER_SUPPORTED ) ; <nl> + / / Disallow enabling the trap handler after retrieving the current value . <nl> + / / Re - enabling them late can produce issues because code or objects might have <nl> + / / been generated under the assumption that trap handlers are disabled . <nl> + g_can_enable_trap_handler = false ; <nl> return g_is_trap_handler_enabled ; <nl> } <nl> <nl> mmm a / test / unittests / wasm / trap - handler - x64 - unittest . cc <nl> ppp b / test / unittests / wasm / trap - handler - x64 - unittest . cc <nl> class TrapHandlerTest : public TestWithIsolate , <nl> public : : testing : : WithParamInterface < TrapHandlerStyle > { <nl> protected : <nl> void SetUp ( ) override { <nl> - CHECK ( trap_handler : : EnableTrapHandler ( false ) ) ; <nl> + InstallFallbackHandler ( ) ; <nl> + SetupTrapHandler ( GetParam ( ) ) ; <nl> backing_store_ = BackingStore : : AllocateWasmMemory ( i_isolate ( ) , 1 , 1 , <nl> SharedFlag : : kNotShared ) ; <nl> CHECK ( backing_store_ ) ; <nl> class TrapHandlerTest : public TestWithIsolate , <nl> GetRandomMmapAddr ( ) ) ; <nl> <nl> InitRecoveryCode ( ) ; <nl> + } <nl> <nl> + void InstallFallbackHandler ( ) { <nl> # if V8_OS_LINUX | | V8_OS_MACOSX | | V8_OS_FREEBSD <nl> / / Set up a signal handler to recover from the expected crash . <nl> struct sigaction action ; <nl> class TrapHandlerTest : public TestWithIsolate , <nl> } <nl> # endif <nl> <nl> - public : <nl> void SetupTrapHandler ( TrapHandlerStyle style ) { <nl> bool use_default_handler = style = = kDefault ; <nl> g_use_as_first_chance_handler = ! use_default_handler ; <nl> CHECK ( v8 : : V8 : : EnableWebAssemblyTrapHandler ( use_default_handler ) ) ; <nl> } <nl> <nl> + public : <nl> void GenerateSetThreadInWasmFlagCode ( MacroAssembler * masm ) { <nl> masm - > Move ( scratch , <nl> i_isolate ( ) - > thread_local_top ( ) - > thread_in_wasm_flag_address_ , <nl> TEST_P ( TrapHandlerTest , TestTrapHandlerRecovery ) { <nl> CodeDesc desc ; <nl> masm . GetCode ( nullptr , & desc ) ; <nl> <nl> - SetupTrapHandler ( GetParam ( ) ) ; <nl> trap_handler : : ProtectedInstructionData protected_instruction { crash_offset , <nl> recovery_offset } ; <nl> trap_handler : : RegisterHandlerData ( reinterpret_cast < Address > ( desc . buffer ) , <nl> TEST_P ( TrapHandlerTest , TestReleaseHandlerData ) { <nl> reinterpret_cast < Address > ( desc . buffer ) , desc . instr_size , 1 , <nl> & protected_instruction ) ; <nl> <nl> - SetupTrapHandler ( GetParam ( ) ) ; <nl> - <nl> ExecuteBuffer ( ) ; <nl> <nl> / / Deregister from the trap handler . The trap handler should not do the <nl> TEST_P ( TrapHandlerTest , TestNoThreadInWasmFlag ) { <nl> trap_handler : : RegisterHandlerData ( reinterpret_cast < Address > ( desc . buffer ) , <nl> desc . instr_size , 1 , & protected_instruction ) ; <nl> <nl> - SetupTrapHandler ( GetParam ( ) ) ; <nl> - <nl> ExecuteExpectCrash ( buffer_ . get ( ) ) ; <nl> } <nl> <nl> TEST_P ( TrapHandlerTest , TestCrashInWasmNoProtectedInstruction ) { <nl> trap_handler : : RegisterHandlerData ( reinterpret_cast < Address > ( desc . buffer ) , <nl> desc . instr_size , 1 , & protected_instruction ) ; <nl> <nl> - SetupTrapHandler ( GetParam ( ) ) ; <nl> - <nl> ExecuteExpectCrash ( buffer_ . get ( ) ) ; <nl> } <nl> <nl> TEST_P ( TrapHandlerTest , TestCrashInWasmWrongCrashType ) { <nl> trap_handler : : RegisterHandlerData ( reinterpret_cast < Address > ( desc . buffer ) , <nl> desc . instr_size , 1 , & protected_instruction ) ; <nl> <nl> - SetupTrapHandler ( GetParam ( ) ) ; <nl> - <nl> # if V8_OS_POSIX <nl> / / On Posix , the V8 default trap handler does not register for SIGFPE , <nl> / / therefore the thread - in - wasm flag is never reset in this test . We <nl> TEST_P ( TrapHandlerTest , TestCrashInOtherThread ) { <nl> trap_handler : : RegisterHandlerData ( reinterpret_cast < Address > ( desc . buffer ) , <nl> desc . instr_size , 1 , & protected_instruction ) ; <nl> <nl> - SetupTrapHandler ( GetParam ( ) ) ; <nl> - <nl> CodeRunner runner ( this , buffer_ . get ( ) ) ; <nl> CHECK ( ! GetThreadInWasmFlag ( ) ) ; <nl> / / Set the thread - in - wasm flag manually in this thread . <nl>
[ wasm ] Disallow late enabling of trap handlers
v8/v8
bcb0a7c5c5e13bb895391c63ec30d048f6f58dd1
2020-10-26T09:31:36Z
mmm a / docs / proposals / ArrayBridge . rst <nl> ppp b / docs / proposals / ArrayBridge . rst <nl> <nl> : orphan : <nl> <nl> - . . = = = mmm ArrayBridge . rst - Proposal for Bridging Swift Array and NSArray mmm = = = . . <nl> + . . = = = - - ArrayBridge . rst - Proposal for Bridging Swift Array and NSArray - - = = = . . <nl> . . <nl> . . This source file is part of the Swift . org open source project <nl> . . <nl> <nl> . . See http : / / swift . org / LICENSE . txt for license information <nl> . . See http : / / swift . org / CONTRIBUTORS . txt for the list of Swift project authors <nl> . . <nl> - . . = = = mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - = = = . . <nl> + . . = = = mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm = = = . . <nl> <nl> = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = <nl> Bridging Swift Arrays to / from Cocoa <nl> <nl> Basic Requirements <nl> = = = = = = = = = = = = = = = = = = <nl> <nl> - A successfully - bridged array type would be both “ great for Cocoa ” and <nl> - “ great for C . ” <nl> + A successfully - bridged array type would be both " great for Cocoa " and <nl> + " great for C . " <nl> <nl> - Being “ great for Cocoa ” means this must work and be efficient : : <nl> + Being " great for Cocoa " means this must work and be efficient : : <nl> <nl> var a = [ cocoaObject1 , cocoaObject2 ] <nl> someCocoaObject . passAnNSArray ( a ) <nl> Being “ great for Cocoa ” means this must work and be efficient : : <nl> <nl> var c : AnyObject [ ] = someNSWindow . views <nl> <nl> - Being “ great For C ” means that an array created in Swift should have <nl> + Being " great For C " means that an array created in Swift should have <nl> C - like performance and must be representable as a base pointer and <nl> length , for interaction with C APIs , at zero cost . <nl> <nl> as failing to satisfy the requirements . <nl> We considered the earlier proposal by Joe that would make ` ` T [ ] ` ` a <nl> ( hand - rolled ) existential wrapper type . Among other things , we felt <nl> this approach would expose multiple array types too prominently and <nl> - would tend to “ bless ” an inappropriately - specific protocol as the <nl> + would tend to " bless " an inappropriately - specific protocol as the <nl> generic collection interface ( for example , a generic collection should <nl> not be indexable with ` ` Int ` ` ) . <nl> <nl>
fit in 80 columns and convert awesome quotes to ascii quotes .
apple/swift
7e16637994185cb243daa5d93cf5615f3e739d69
2014-02-10T04:17:05Z
mmm a / code / mathematical - algorithms / Check_is_square / FindingISquare_binary_search . cpp <nl> ppp b / code / mathematical - algorithms / Check_is_square / FindingISquare_binary_search . cpp <nl> <nl> - # include < bits / stdc + + . h > <nl> + # include < iostream > <nl> using namespace std ; <nl> <nl> typedef long long int lld ; <nl> bool IsSquare ( lld number ) <nl> if ( number = = 0 | | number = = 1 ) <nl> return true ; <nl> while ( min < max ) { <nl> - if ( mid * mid > number ) <nl> - { <nl> - max = mid - 1 ; <nl> - mid = min + ( max - min ) / 2 ; <nl> - } <nl> - else if ( mid * mid < number ) <nl> - { <nl> - min = mid + 1 ; <nl> - mid = min + ( max - min ) / 2 ; <nl> + if ( mid * mid > number ) <nl> + { <nl> + max = mid - 1 ; <nl> + mid = min + ( max - min ) / 2 ; <nl> + } <nl> + else if ( mid * mid < number ) <nl> + { <nl> + min = mid + 1 ; <nl> + mid = min + ( max - min ) / 2 ; <nl> <nl> - } <nl> - if ( mid * mid = = number ) <nl> - return true ; <nl> + } <nl> + if ( mid * mid = = number ) <nl> + return true ; <nl> <nl> } <nl> return false ; <nl> bool IsSquare ( lld number ) <nl> int main ( ) { <nl> <nl> int t ; <nl> - cout < < " Enter the Number of testcases : " < < ' \ n ' ; <nl> + cout < < " Enter the number of testcases : " < < ' \ n ' ; <nl> cin > > t ; <nl> lld number ; <nl> while ( t - - ) <nl> { <nl> cin > > number ; <nl> - bool result = IsSquare ( number ) ; <nl> - if ( result ) <nl> - { <nl> + if ( IsSquare ( number ) ) <nl> cout < < " Natural Square number " < < ' \ n ' ; <nl> - } <nl> else <nl> - { <nl> cout < < " Not a Natural Square " < < ' \ n ' ; <nl> - } <nl> } <nl> return 0 ; <nl> } <nl>
added changes after review
OpenGenus/cosmos
9cceef430ecb903735594343bb55616ea7a47603
2017-10-23T10:47:00Z
mmm a / env / io_posix . cc <nl> ppp b / env / io_posix . cc <nl> <nl> # include < algorithm > <nl> # if defined ( OS_LINUX ) <nl> # include < linux / fs . h > <nl> + # ifndef FALLOC_FL_KEEP_SIZE <nl> # include < linux / falloc . h > <nl> # endif <nl> + # endif <nl> # include < stdio . h > <nl> # include < stdlib . h > <nl> # include < string . h > <nl>
Fixed FALLOC_FL_KEEP_SIZE undefined ( )
facebook/rocksdb
cfc20019d198ca0eb8e749aec968d5e870be4c25
2019-09-06T00:37:21Z
mmm a / test / IDE / print_stdlib . swift <nl> ppp b / test / IDE / print_stdlib . swift <nl> <nl> / / Make sure we don ' t crash while printing the standard library . <nl> / / <nl> - / / RUN : % swift - ide - test - print - module - module - to - print = Swift - source - filename % s - skip - private - stdlib - decls - fully - qualified - types - if - ambiguous - synthesize - sugar - on - types > % t . txt <nl> + / / RUN : % swift - ide - test - print - module - module - to - print = Swift - source - filename % s - accessibility - filter - public - skip - private - stdlib - decls - fully - qualified - types - if - ambiguous - synthesize - sugar - on - types > % t . txt <nl> / / RUN : FileCheck - check - prefix = CHECK - ARGC - input - file % t . txt % s <nl> / / RUN : FileCheck - input - file % t . txt % s <nl> / / RUN : FileCheck - check - prefix = CHECK - SUGAR - input - file % t . txt % s <nl>
stdlib printing test : only print public declarations
apple/swift
36d41e04c4328c95f7666fa30f6b94f4e8b3d577
2014-07-23T16:15:11Z
mmm a / docs / en / sql - reference / statements / alter / partition . md <nl> ppp b / docs / en / sql - reference / statements / alter / partition . md <nl> OPTIMIZE TABLE table_not_partitioned PARTITION tuple ( ) FINAL ; <nl> # # UPDATE \ | DELETE IN PARTITION { # update - delete - in - partition } <nl> <nl> ` ` ` sql <nl> - ALTER TABLE table_name ( UPDATE update_expr ) | ( DELETE ) [ IN PARTITION partition_id ] WHERE where_expr <nl> + ALTER TABLE table_name UPDATE update_expr [ IN PARTITION partition_id ] WHERE where_expr <nl> + ` ` ` <nl> + <nl> + ` ` ` sql <nl> + ALTER TABLE table_name DELETE [ IN PARTITION partition_id ] WHERE where_expr <nl> ` ` ` <nl> <nl> ` IN PARTITION ` specifies the partition to which the [ UPDATE ] ( . . / . . / . . / sql - reference / statements / alter / update . md # alter - table - update - statements ) or [ DELETE ] ( . . / . . / . . / sql - reference / statements / alter / delete . md # alter - mutations ) expressions will be applied as a result of the query ` ALTER TABLE ` . New parts will be created only from the specified partition . <nl>
Update partition . md
ClickHouse/ClickHouse
3db2d4d0acb4463f28ae449f0767795d7da0a81d
2020-11-28T10:07:10Z
mmm a / hphp / runtime / vm / jit / code - gen - x64 . cpp <nl> ppp b / hphp / runtime / vm / jit / code - gen - x64 . cpp <nl> PhysReg CodeGenerator : : prepXMMReg ( Asm & as , const SSATmp * src , <nl> <nl> void CodeGenerator : : doubleCmp ( Asm & a , RegXMM xmmReg0 , RegXMM xmmReg1 ) { <nl> a . ucomisd ( xmmReg0 , xmmReg1 ) ; <nl> - Label notPF ; <nl> - a . jnp8 ( notPF ) ; <nl> - / / PF means the doubles were unordered . We treat this as ! equal , so <nl> - / / clear ZF . <nl> - a . orq ( 1 , m_rScratch ) ; <nl> - asm_label ( a , notPF ) ; <nl> + ifThen ( a , CC_P , [ & ] { <nl> + / / PF means the doubles were unordered . We treat this as ! equal , so <nl> + / / clear ZF . <nl> + a . orq ( 1 , m_rScratch ) ; <nl> + } ) ; <nl> } <nl> <nl> void CodeGenerator : : emitCompare ( IRInstruction * inst ) { <nl> void CodeGenerator : : cgAssertNonNull ( IRInstruction * inst ) { <nl> auto srcReg = srcLoc ( 0 ) . reg ( ) ; <nl> auto dstReg = dstLoc ( 0 ) . reg ( ) ; <nl> if ( RuntimeOption : : EvalHHIRGenerateAsserts ) { <nl> - Label nonNull ; <nl> m_as . testq ( srcReg , srcReg ) ; <nl> - m_as . jne8 ( nonNull ) ; <nl> - m_as . ud2 ( ) ; <nl> - asm_label ( m_as , nonNull ) ; <nl> + ifThen ( m_as , CC_Z , [ & ] { <nl> + m_as . ud2 ( ) ; <nl> + } ) ; <nl> } <nl> emitMovRegReg ( m_as , srcReg , dstReg ) ; <nl> } <nl> void CodeGenerator : : cgConvDblToInt ( IRInstruction * inst ) { <nl> <nl> ifThen ( a , CC_B , [ & ] { <nl> / / src0 > 0 ( CF = 1 - > less than 0 or unordered ) <nl> - Label isUnordered ; <nl> - a . jp8 ( isUnordered ) ; <nl> - <nl> - emitLoadImm ( a , maxULongAsDouble , rCgXMM1 ) ; <nl> - <nl> - a . ucomisd ( rCgXMM1 , srcReg ) ; <nl> - <nl> - ifThenElse ( a , CC_B , [ & ] { <nl> - / / src0 > ULONG_MAX <nl> - a . xorq ( dstReg , dstReg ) ; <nl> - <nl> - } , [ & ] { <nl> - / / 0 < src0 < = ULONG_MAX <nl> - emitLoadImm ( a , maxLongAsDouble , rCgXMM1 ) ; <nl> - emitMovRegReg ( a , srcReg , rCgXMM0 ) ; <nl> - <nl> - / / we know that LONG_MAX < src0 < = UINT_MAX , therefore , <nl> - / / 0 < src0 - ULONG_MAX < = LONG_MAX <nl> - a . subsd ( rCgXMM1 , rCgXMM0 ) ; <nl> - a . cvttsd2siq ( rCgXMM0 , dstReg ) ; <nl> - <nl> - / / We want to simulate integer overflow so we take the resulting integer <nl> - / / and flip its sign bit ( NB : we don ' t use orq here because it ' s <nl> - / / possible that src0 = = LONG_MAX in which case cvttsd2siq will yeild <nl> - / / an indefiniteInteger , which we would like to make zero ) <nl> - a . xorq ( rIndef , dstReg ) ; <nl> + ifThen ( a , CC_NP , [ & ] { <nl> + emitLoadImm ( a , maxULongAsDouble , rCgXMM1 ) ; <nl> + a . ucomisd ( rCgXMM1 , srcReg ) ; <nl> + ifThenElse ( a , CC_B , [ & ] { <nl> + / / src0 > ULONG_MAX <nl> + a . xorq ( dstReg , dstReg ) ; <nl> + <nl> + } , [ & ] { <nl> + / / 0 < src0 < = ULONG_MAX <nl> + emitLoadImm ( a , maxLongAsDouble , rCgXMM1 ) ; <nl> + emitMovRegReg ( a , srcReg , rCgXMM0 ) ; <nl> + <nl> + / / we know that LONG_MAX < src0 < = UINT_MAX , therefore , <nl> + / / 0 < src0 - ULONG_MAX < = LONG_MAX <nl> + a . subsd ( rCgXMM1 , rCgXMM0 ) ; <nl> + a . cvttsd2siq ( rCgXMM0 , dstReg ) ; <nl> + <nl> + / / We want to simulate integer overflow so we take the resulting <nl> + / / integer and flip its sign bit ( NB : we don ' t use orq here <nl> + / / because it ' s possible that src0 = = LONG_MAX in which case <nl> + / / cvttsd2siq will yeild an indefiniteInteger , which we would <nl> + / / like to make zero ) <nl> + a . xorq ( rIndef , dstReg ) ; <nl> + } ) ; <nl> } ) ; <nl> - <nl> - asm_label ( a , isUnordered ) ; <nl> } ) ; <nl> } ) ; <nl> } <nl>
Use ifThen in a few more places , instead of Label / asm_label
facebook/hhvm
a5bbf3617acc675a04cbcbacc9da0109506f275d
2014-04-16T21:13:19Z
new file mode 100644 <nl> index 00000000000 . . 3853698b22c <nl> mmm / dev / null <nl> ppp b / dbms / src / Storages / MergeTree / registerStorageMergeTree . cpp <nl> <nl> + # include < Storages / StorageFactory . h > <nl> + # include < Storages / StorageMergeTree . h > <nl> + # include < Storages / StorageReplicatedMergeTree . h > <nl> + <nl> + <nl> + namespace DB <nl> + { <nl> + <nl> + static void create ( <nl> + ASTs & args , <nl> + const String & data_path , <nl> + const String & table_name , <nl> + const String & database_name , <nl> + Context & local_context , <nl> + Context & context , <nl> + const NamesAndTypesList & columns , <nl> + const NamesAndTypesList & materialized_columns , <nl> + const NamesAndTypesList & alias_columns , <nl> + const ColumnDefaults & column_defaults , <nl> + bool attach , <nl> + bool has_force_restore_data_flag ) <nl> + { <nl> + / * * [ Replicated ] [ | Summing | Collapsing | Aggregating | Replacing | Graphite ] MergeTree ( 2 * 7 combinations ) engines <nl> + * The argument for the engine should be : <nl> + * - ( for Replicated ) The path to the table in ZooKeeper <nl> + * - ( for Replicated ) Replica name in ZooKeeper <nl> + * - the name of the column with the date ; <nl> + * - ( optional ) expression for sampling <nl> + * ( the query with ` SAMPLE x ` will select rows that have a lower value in this column than ` x * UINT32_MAX ` ) ; <nl> + * - an expression for sorting ( either a scalar expression or a tuple of several ) ; <nl> + * - index_granularity ; <nl> + * - ( for Collapsing ) the name of Int8 column that contains ` sign ` type with the change of " visit " ( taking values 1 and - 1 ) . <nl> + * For example : ENGINE = ReplicatedCollapsingMergeTree ( ' / tables / mytable ' , ' rep02 ' , EventDate , ( CounterID , EventDate , intHash32 ( UniqID ) , VisitID ) , 8192 , Sign ) . <nl> + * - ( for Summing , optional ) a tuple of columns to be summed . If not specified , all numeric columns that are not included in the primary key are used . <nl> + * - ( for Replacing , optional ) the column name of one of the UInt types , which stands for " version " <nl> + * For example : ENGINE = ReplicatedCollapsingMergeTree ( ' / tables / mytable ' , ' rep02 ' , EventDate , ( CounterID , EventDate , intHash32 ( UniqID ) , VisitID ) , 8192 , Sign ) . <nl> + * - ( for Graphite ) the parameter name in config file with settings of thinning rules . <nl> + * <nl> + * MergeTree ( date , [ sample_key ] , primary_key , index_granularity ) <nl> + * CollapsingMergeTree ( date , [ sample_key ] , primary_key , index_granularity , sign ) <nl> + * SummingMergeTree ( date , [ sample_key ] , primary_key , index_granularity , [ columns_to_sum ] ) <nl> + * AggregatingMergeTree ( date , [ sample_key ] , primary_key , index_granularity ) <nl> + * ReplacingMergeTree ( date , [ sample_key ] , primary_key , index_granularity , [ version_column ] ) <nl> + * GraphiteMergeTree ( date , [ sample_key ] , primary_key , index_granularity , ' config_element ' ) <nl> + * <nl> + * Alternatively , you can specify : <nl> + * - Partitioning expression in the PARTITION BY clause ; <nl> + * - Primary key in the ORDER BY clause ; <nl> + * - Sampling expression in the SAMPLE BY clause ; <nl> + * - Additional MergeTreeSettings in the SETTINGS clause ; <nl> + * / <nl> + <nl> + bool is_extended_storage_def = <nl> + storage_def . partition_by | | storage_def . order_by | | storage_def . sample_by | | storage_def . settings ; <nl> + <nl> + String name_part = name . substr ( 0 , name . size ( ) - strlen ( " MergeTree " ) ) ; <nl> + <nl> + bool replicated = startsWith ( name_part , " Replicated " ) ; <nl> + if ( replicated ) <nl> + name_part = name_part . substr ( strlen ( " Replicated " ) ) ; <nl> + <nl> + MergeTreeData : : MergingParams merging_params ; <nl> + merging_params . mode = MergeTreeData : : MergingParams : : Ordinary ; <nl> + <nl> + if ( name_part = = " Collapsing " ) <nl> + merging_params . mode = MergeTreeData : : MergingParams : : Collapsing ; <nl> + else if ( name_part = = " Summing " ) <nl> + merging_params . mode = MergeTreeData : : MergingParams : : Summing ; <nl> + else if ( name_part = = " Aggregating " ) <nl> + merging_params . mode = MergeTreeData : : MergingParams : : Aggregating ; <nl> + else if ( name_part = = " Replacing " ) <nl> + merging_params . mode = MergeTreeData : : MergingParams : : Replacing ; <nl> + else if ( name_part = = " Graphite " ) <nl> + merging_params . mode = MergeTreeData : : MergingParams : : Graphite ; <nl> + else if ( ! name_part . empty ( ) ) <nl> + throw Exception ( <nl> + " Unknown storage " + name + getMergeTreeVerboseHelp ( is_extended_storage_def ) , <nl> + ErrorCodes : : UNKNOWN_STORAGE ) ; <nl> + <nl> + / / / NOTE Quite complicated . <nl> + <nl> + size_t min_num_params = 0 ; <nl> + size_t max_num_params = 0 ; <nl> + String needed_params ; <nl> + <nl> + auto add_mandatory_param = [ & ] ( const char * desc ) <nl> + { <nl> + + + min_num_params ; <nl> + + + max_num_params ; <nl> + needed_params + = needed_params . empty ( ) ? " \ n " : " , \ n " ; <nl> + needed_params + = desc ; <nl> + } ; <nl> + auto add_optional_param = [ & ] ( const char * desc ) <nl> + { <nl> + + + max_num_params ; <nl> + needed_params + = needed_params . empty ( ) ? " \ n " : " , \ n [ " ; <nl> + needed_params + = desc ; <nl> + needed_params + = " ] " ; <nl> + } ; <nl> + <nl> + if ( replicated ) <nl> + { <nl> + add_mandatory_param ( " path in ZooKeeper " ) ; <nl> + add_mandatory_param ( " replica name " ) ; <nl> + } <nl> + <nl> + if ( ! is_extended_storage_def ) <nl> + { <nl> + add_mandatory_param ( " name of column with date " ) ; <nl> + add_optional_param ( " sampling element of primary key " ) ; <nl> + add_mandatory_param ( " primary key expression " ) ; <nl> + add_mandatory_param ( " index granularity " ) ; <nl> + } <nl> + <nl> + switch ( merging_params . mode ) <nl> + { <nl> + default : <nl> + break ; <nl> + case MergeTreeData : : MergingParams : : Summing : <nl> + add_optional_param ( " list of columns to sum " ) ; <nl> + break ; <nl> + case MergeTreeData : : MergingParams : : Replacing : <nl> + add_optional_param ( " version " ) ; <nl> + break ; <nl> + case MergeTreeData : : MergingParams : : Collapsing : <nl> + add_mandatory_param ( " sign column " ) ; <nl> + break ; <nl> + case MergeTreeData : : MergingParams : : Graphite : <nl> + add_mandatory_param ( " ' config_element_for_graphite_schema ' " ) ; <nl> + break ; <nl> + } <nl> + <nl> + if ( args . size ( ) < min_num_params | | args . size ( ) > max_num_params ) <nl> + { <nl> + String msg ; <nl> + if ( is_extended_storage_def ) <nl> + msg + = " With extended storage definition syntax storage " + name + " requires " ; <nl> + else <nl> + msg + = " Storage " + name + " requires " ; <nl> + <nl> + if ( max_num_params ) <nl> + { <nl> + if ( min_num_params = = max_num_params ) <nl> + msg + = toString ( min_num_params ) + " parameters : " ; <nl> + else <nl> + msg + = toString ( min_num_params ) + " to " + toString ( max_num_params ) + " parameters : " ; <nl> + msg + = needed_params ; <nl> + } <nl> + else <nl> + msg + = " no parameters " ; <nl> + <nl> + msg + = getMergeTreeVerboseHelp ( is_extended_storage_def ) ; <nl> + <nl> + throw Exception ( msg , ErrorCodes : : NUMBER_OF_ARGUMENTS_DOESNT_MATCH ) ; <nl> + } <nl> + <nl> + / / / For Replicated . <nl> + String zookeeper_path ; <nl> + String replica_name ; <nl> + <nl> + if ( replicated ) <nl> + { <nl> + auto ast = typeid_cast < ASTLiteral * > ( & * args [ 0 ] ) ; <nl> + if ( ast & & ast - > value . getType ( ) = = Field : : Types : : String ) <nl> + zookeeper_path = safeGet < String > ( ast - > value ) ; <nl> + else <nl> + throw Exception ( <nl> + " Path in ZooKeeper must be a string literal " + getMergeTreeVerboseHelp ( is_extended_storage_def ) , <nl> + ErrorCodes : : BAD_ARGUMENTS ) ; <nl> + <nl> + ast = typeid_cast < ASTLiteral * > ( & * args [ 1 ] ) ; <nl> + if ( ast & & ast - > value . getType ( ) = = Field : : Types : : String ) <nl> + replica_name = safeGet < String > ( ast - > value ) ; <nl> + else <nl> + throw Exception ( <nl> + " Replica name must be a string literal " + getMergeTreeVerboseHelp ( is_extended_storage_def ) , <nl> + ErrorCodes : : BAD_ARGUMENTS ) ; <nl> + <nl> + if ( replica_name . empty ( ) ) <nl> + throw Exception ( <nl> + " No replica name in config " + getMergeTreeVerboseHelp ( is_extended_storage_def ) , <nl> + ErrorCodes : : NO_REPLICA_NAME_GIVEN ) ; <nl> + <nl> + args . erase ( args . begin ( ) , args . begin ( ) + 2 ) ; <nl> + } <nl> + <nl> + if ( merging_params . mode = = MergeTreeData : : MergingParams : : Collapsing ) <nl> + { <nl> + if ( auto ast = typeid_cast < ASTIdentifier * > ( & * args . back ( ) ) ) <nl> + merging_params . sign_column = ast - > name ; <nl> + else <nl> + throw Exception ( <nl> + " Sign column name must be an unquoted string " + getMergeTreeVerboseHelp ( is_extended_storage_def ) , <nl> + ErrorCodes : : BAD_ARGUMENTS ) ; <nl> + <nl> + args . pop_back ( ) ; <nl> + } <nl> + else if ( merging_params . mode = = MergeTreeData : : MergingParams : : Replacing ) <nl> + { <nl> + / / / If the last element is not index_granularity or replica_name ( a literal ) , then this is the name of the version column . <nl> + if ( ! args . empty ( ) & & ! typeid_cast < const ASTLiteral * > ( & * args . back ( ) ) ) <nl> + { <nl> + if ( auto ast = typeid_cast < ASTIdentifier * > ( & * args . back ( ) ) ) <nl> + merging_params . version_column = ast - > name ; <nl> + else <nl> + throw Exception ( <nl> + " Version column name must be an unquoted string " + getMergeTreeVerboseHelp ( is_extended_storage_def ) , <nl> + ErrorCodes : : BAD_ARGUMENTS ) ; <nl> + <nl> + args . pop_back ( ) ; <nl> + } <nl> + } <nl> + else if ( merging_params . mode = = MergeTreeData : : MergingParams : : Summing ) <nl> + { <nl> + / / / If the last element is not index_granularity or replica_name ( a literal ) , then this is a list of summable columns . <nl> + if ( ! args . empty ( ) & & ! typeid_cast < const ASTLiteral * > ( & * args . back ( ) ) ) <nl> + { <nl> + merging_params . columns_to_sum = extractColumnNames ( args . back ( ) ) ; <nl> + args . pop_back ( ) ; <nl> + } <nl> + } <nl> + else if ( merging_params . mode = = MergeTreeData : : MergingParams : : Graphite ) <nl> + { <nl> + String graphite_config_name ; <nl> + String error_msg = " Last parameter of GraphiteMergeTree must be name ( in single quotes ) of element in configuration file with Graphite options " ; <nl> + error_msg + = getMergeTreeVerboseHelp ( is_extended_storage_def ) ; <nl> + <nl> + if ( auto ast = typeid_cast < ASTLiteral * > ( & * args . back ( ) ) ) <nl> + { <nl> + if ( ast - > value . getType ( ) ! = Field : : Types : : String ) <nl> + throw Exception ( error_msg , ErrorCodes : : BAD_ARGUMENTS ) ; <nl> + <nl> + graphite_config_name = ast - > value . get < String > ( ) ; <nl> + } <nl> + else <nl> + throw Exception ( error_msg , ErrorCodes : : BAD_ARGUMENTS ) ; <nl> + <nl> + args . pop_back ( ) ; <nl> + setGraphitePatternsFromConfig ( context , graphite_config_name , merging_params . graphite_params ) ; <nl> + } <nl> + <nl> + String date_column_name ; <nl> + ASTPtr partition_expr_list ; <nl> + ASTPtr primary_expr_list ; <nl> + ASTPtr sampling_expression ; <nl> + MergeTreeSettings storage_settings = context . getMergeTreeSettings ( ) ; <nl> + <nl> + if ( is_extended_storage_def ) <nl> + { <nl> + if ( storage_def . partition_by ) <nl> + partition_expr_list = extractKeyExpressionList ( * storage_def . partition_by ) ; <nl> + <nl> + if ( storage_def . order_by ) <nl> + primary_expr_list = extractKeyExpressionList ( * storage_def . order_by ) ; <nl> + <nl> + if ( storage_def . sample_by ) <nl> + sampling_expression = storage_def . sample_by - > ptr ( ) ; <nl> + <nl> + storage_settings . loadFromQuery ( storage_def ) ; <nl> + } <nl> + else <nl> + { <nl> + / / / If there is an expression for sampling . MergeTree ( date , [ sample_key ] , primary_key , index_granularity ) <nl> + if ( args . size ( ) = = 4 ) <nl> + { <nl> + sampling_expression = args [ 1 ] ; <nl> + args . erase ( args . begin ( ) + 1 ) ; <nl> + } <nl> + <nl> + / / / Now only three parameters remain - date ( or partitioning expression ) , primary_key , index_granularity . <nl> + <nl> + if ( auto ast = typeid_cast < ASTIdentifier * > ( args [ 0 ] . get ( ) ) ) <nl> + date_column_name = ast - > name ; <nl> + else <nl> + throw Exception ( <nl> + " Date column name must be an unquoted string " + getMergeTreeVerboseHelp ( is_extended_storage_def ) , <nl> + ErrorCodes : : BAD_ARGUMENTS ) ; <nl> + <nl> + primary_expr_list = extractKeyExpressionList ( * args [ 1 ] ) ; <nl> + <nl> + auto ast = typeid_cast < ASTLiteral * > ( & * args . back ( ) ) ; <nl> + if ( ast & & ast - > value . getType ( ) = = Field : : Types : : UInt64 ) <nl> + storage_settings . index_granularity = safeGet < UInt64 > ( ast - > value ) ; <nl> + else <nl> + throw Exception ( <nl> + " Index granularity must be a positive integer " + getMergeTreeVerboseHelp ( is_extended_storage_def ) , <nl> + ErrorCodes : : BAD_ARGUMENTS ) ; <nl> + } <nl> + <nl> + if ( replicated ) <nl> + return StorageReplicatedMergeTree : : create ( <nl> + zookeeper_path , replica_name , attach , data_path , database_name , table_name , <nl> + columns , materialized_columns , alias_columns , column_defaults , <nl> + context , primary_expr_list , date_column_name , partition_expr_list , <nl> + sampling_expression , merging_params , storage_settings , <nl> + has_force_restore_data_flag ) ; <nl> + else <nl> + return StorageMergeTree : : create ( <nl> + data_path , database_name , table_name , <nl> + columns , materialized_columns , alias_columns , column_defaults , attach , <nl> + context , primary_expr_list , date_column_name , partition_expr_list , <nl> + sampling_expression , merging_params , storage_settings , <nl> + has_force_restore_data_flag ) ; <nl> + } <nl> + <nl> + <nl> + void registerStorageMergeTree ( StorageFactory & factory ) <nl> + { <nl> + factory . registerStorage ( " MergeTree " , create ) ; <nl> + factory . registerStorage ( " CollapsingMergeTree " , create ) ; <nl> + factory . registerStorage ( " ReplacingMergeTree " , create ) ; <nl> + factory . registerStorage ( " AggregatingMergeTree " , create ) ; <nl> + factory . registerStorage ( " SummingMergeTree " , create ) ; <nl> + factory . registerStorage ( " GraphiteMergeTree " , create ) ; <nl> + <nl> + factory . registerStorage ( " ReplicatedMergeTree " , create ) ; <nl> + factory . registerStorage ( " ReplicatedCollapsingMergeTree " , create ) ; <nl> + factory . registerStorage ( " ReplicatedReplacingMergeTree " , create ) ; <nl> + factory . registerStorage ( " ReplicatedAggregatingMergeTree " , create ) ; <nl> + factory . registerStorage ( " ReplicatedSummingMergeTree " , create ) ; <nl> + factory . registerStorage ( " ReplicatedGraphiteMergeTree " , create ) ; <nl> + } <nl> + <nl> + } <nl> mmm a / dbms / src / Storages / StorageBuffer . cpp <nl> ppp b / dbms / src / Storages / StorageBuffer . cpp <nl> <nl> # include < DataStreams / IProfilingBlockInputStream . h > <nl> # include < Databases / IDatabase . h > <nl> # include < Storages / StorageBuffer . h > <nl> + # include < Storages / StorageFactory . h > <nl> # include < Parsers / ASTInsertQuery . h > <nl> # include < Parsers / ASTIdentifier . h > <nl> # include < Parsers / ASTExpressionList . h > <nl> void StorageBuffer : : alter ( const AlterCommands & params , const String & database_ <nl> columns , materialized_columns , alias_columns , column_defaults , { } ) ; <nl> } <nl> <nl> + <nl> + void registerStorageBuffer ( StorageFactory & factory ) <nl> + { <nl> + / * * Buffer ( db , table , num_buckets , min_time , max_time , min_rows , max_rows , min_bytes , max_bytes ) <nl> + * <nl> + * db , table - in which table to put data from buffer . <nl> + * num_buckets - level of parallelism . <nl> + * min_time , max_time , min_rows , max_rows , min_bytes , max_bytes - conditions for flushing the buffer . <nl> + * / <nl> + <nl> + factory . registerStorage ( " Buffer " , [ ] ( <nl> + ASTs & args , <nl> + const String & , <nl> + const String & table_name , <nl> + const String & , <nl> + Context & local_context , <nl> + Context & context , <nl> + const NamesAndTypesList & columns , <nl> + const NamesAndTypesList & materialized_columns , <nl> + const NamesAndTypesList & alias_columns , <nl> + const ColumnDefaults & column_defaults , <nl> + bool , <nl> + bool ) <nl> + { <nl> + if ( args . size ( ) ! = 9 ) <nl> + throw Exception ( " Storage Buffer requires 9 parameters : " <nl> + " destination_database , destination_table , num_buckets , min_time , max_time , min_rows , max_rows , min_bytes , max_bytes . " , <nl> + ErrorCodes : : NUMBER_OF_ARGUMENTS_DOESNT_MATCH ) ; <nl> + <nl> + args [ 0 ] = evaluateConstantExpressionOrIdentifierAsLiteral ( args [ 0 ] , local_context ) ; <nl> + args [ 1 ] = evaluateConstantExpressionOrIdentifierAsLiteral ( args [ 1 ] , local_context ) ; <nl> + <nl> + String destination_database = static_cast < const ASTLiteral & > ( * args [ 0 ] ) . value . safeGet < String > ( ) ; <nl> + String destination_table = static_cast < const ASTLiteral & > ( * args [ 1 ] ) . value . safeGet < String > ( ) ; <nl> + <nl> + UInt64 num_buckets = applyVisitor ( FieldVisitorConvertToNumber < UInt64 > ( ) , typeid_cast < ASTLiteral & > ( * args [ 2 ] ) . value ) ; <nl> + <nl> + Int64 min_time = applyVisitor ( FieldVisitorConvertToNumber < Int64 > ( ) , typeid_cast < ASTLiteral & > ( * args [ 3 ] ) . value ) ; <nl> + Int64 max_time = applyVisitor ( FieldVisitorConvertToNumber < Int64 > ( ) , typeid_cast < ASTLiteral & > ( * args [ 4 ] ) . value ) ; <nl> + UInt64 min_rows = applyVisitor ( FieldVisitorConvertToNumber < UInt64 > ( ) , typeid_cast < ASTLiteral & > ( * args [ 5 ] ) . value ) ; <nl> + UInt64 max_rows = applyVisitor ( FieldVisitorConvertToNumber < UInt64 > ( ) , typeid_cast < ASTLiteral & > ( * args [ 6 ] ) . value ) ; <nl> + UInt64 min_bytes = applyVisitor ( FieldVisitorConvertToNumber < UInt64 > ( ) , typeid_cast < ASTLiteral & > ( * args [ 7 ] ) . value ) ; <nl> + UInt64 max_bytes = applyVisitor ( FieldVisitorConvertToNumber < UInt64 > ( ) , typeid_cast < ASTLiteral & > ( * args [ 8 ] ) . value ) ; <nl> + <nl> + return StorageBuffer : : create ( <nl> + table_name , columns , <nl> + materialized_columns , alias_columns , column_defaults , <nl> + context , <nl> + num_buckets , <nl> + StorageBuffer : : Thresholds { min_time , min_rows , min_bytes } , <nl> + StorageBuffer : : Thresholds { max_time , max_rows , max_bytes } , <nl> + destination_database , destination_table ) ; <nl> + } ) ; <nl> + } <nl> + <nl> } <nl> mmm a / dbms / src / Storages / StorageDictionary . cpp <nl> ppp b / dbms / src / Storages / StorageDictionary . cpp <nl> <nl> # include < sstream > <nl> - # include < Parsers / ASTCreateQuery . h > <nl> # include < DataTypes / DataTypesNumber . h > <nl> # include < DataTypes / DataTypeDate . h > <nl> # include < Dictionaries / IDictionarySource . h > <nl> # include < Dictionaries / DictionaryStructure . h > <nl> - # include < Dictionaries / CacheDictionary . h > <nl> # include < Storages / StorageDictionary . h > <nl> + # include < Storages / StorageFactory . h > <nl> # include < Interpreters / Context . h > <nl> # include < Interpreters / ExternalDictionaries . h > <nl> + # include < Parsers / ASTLiteral . h > <nl> # include < common / logger_useful . h > <nl> # include < Common / typeid_cast . h > <nl> <nl> + <nl> namespace DB <nl> { <nl> <nl> - StoragePtr StorageDictionary : : create ( <nl> - const String & table_name , <nl> - Context & context , <nl> - const ASTCreateQuery & query , <nl> - const NamesAndTypesList & columns , <nl> - const NamesAndTypesList & materialized_columns , <nl> - const NamesAndTypesList & alias_columns , <nl> - const ColumnDefaults & column_defaults ) <nl> + namespace ErrorCodes <nl> { <nl> - const ASTFunction & engine = * query . storage - > engine ; <nl> - String dictionary_name ; <nl> - if ( engine . arguments ) <nl> - { <nl> - std : : stringstream iss ; <nl> - engine . arguments - > format ( IAST : : FormatSettings ( iss , false , false ) ) ; <nl> - dictionary_name = iss . str ( ) ; <nl> - } <nl> - <nl> - const auto & dictionary = context . getExternalDictionaries ( ) . getDictionary ( dictionary_name ) ; <nl> - const DictionaryStructure & dictionary_structure = dictionary - > getStructure ( ) ; <nl> - return ext : : shared_ptr_helper < StorageDictionary > : : create ( <nl> - table_name , columns , materialized_columns , alias_columns , <nl> - column_defaults , dictionary_structure , dictionary_name ) ; <nl> + extern const int NUMBER_OF_ARGUMENTS_DOESNT_MATCH ; <nl> } <nl> <nl> - StoragePtr StorageDictionary : : create ( <nl> - const String & table_name , <nl> - const NamesAndTypesList & columns , <nl> - const NamesAndTypesList & materialized_columns , <nl> - const NamesAndTypesList & alias_columns , <nl> - const ColumnDefaults & column_defaults , <nl> - const DictionaryStructure & dictionary_structure , <nl> - const String & dictionary_name ) <nl> - { <nl> - return ext : : shared_ptr_helper < StorageDictionary > : : create ( <nl> - table_name , columns , materialized_columns , alias_columns , <nl> - column_defaults , dictionary_structure , dictionary_name ) ; <nl> - } <nl> <nl> StorageDictionary : : StorageDictionary ( <nl> const String & table_name_ , <nl> void StorageDictionary : : checkNamesAndTypesCompatibleWithDictionary ( const Diction <nl> } <nl> } <nl> <nl> + <nl> + void registerStorageDictionary ( StorageFactory & factory ) <nl> + { <nl> + factory . registerStorage ( " Dictionary " , [ ] ( <nl> + ASTs & args , <nl> + const String & , <nl> + const String & table_name , <nl> + const String & , <nl> + Context & , <nl> + Context & context , <nl> + const NamesAndTypesList & columns , <nl> + const NamesAndTypesList & materialized_columns , <nl> + const NamesAndTypesList & alias_columns , <nl> + const ColumnDefaults & column_defaults , <nl> + bool , <nl> + bool ) <nl> + { <nl> + if ( args . size ( ) ! = 1 ) <nl> + throw Exception ( " Storage Dictionary requires single parameter : name of dictionary " , <nl> + ErrorCodes : : NUMBER_OF_ARGUMENTS_DOESNT_MATCH ) ; <nl> + <nl> + String dictionary_name = typeid_cast < const ASTLiteral & > ( * args [ 0 ] ) . value . safeGet < String > ( ) ; <nl> + <nl> + const auto & dictionary = context . getExternalDictionaries ( ) . getDictionary ( dictionary_name ) ; <nl> + const DictionaryStructure & dictionary_structure = dictionary - > getStructure ( ) ; <nl> + <nl> + return StorageDictionary : : create ( <nl> + table_name , columns , materialized_columns , alias_columns , <nl> + column_defaults , dictionary_structure , dictionary_name ) ; <nl> + } ) ; <nl> + } <nl> + <nl> } <nl> mmm a / dbms / src / Storages / StorageDictionary . h <nl> ppp b / dbms / src / Storages / StorageDictionary . h <nl> class ExternalDictionaries ; <nl> class StorageDictionary : private ext : : shared_ptr_helper < StorageDictionary > , public IStorage <nl> { <nl> public : <nl> - static StoragePtr create ( const String & table_name_ , <nl> - Context & context_ , <nl> - const ASTCreateQuery & query , <nl> - const NamesAndTypesList & columns_ , <nl> - const NamesAndTypesList & materialized_columns_ , <nl> - const NamesAndTypesList & alias_columns_ , <nl> - const ColumnDefaults & column_defaults_ ) ; <nl> - <nl> - static StoragePtr create ( const String & table_name , <nl> - const NamesAndTypesList & columns , <nl> - const NamesAndTypesList & materialized_columns , <nl> - const NamesAndTypesList & alias_columns , <nl> - const ColumnDefaults & column_defaults , <nl> - const DictionaryStructure & dictionary_structure , <nl> - const String & dictionary_name ) ; <nl> - <nl> std : : string getName ( ) const override { return " Dictionary " ; } <nl> std : : string getTableName ( ) const override { return table_name ; } <nl> const NamesAndTypesList & getColumnsListImpl ( ) const override { return columns ; } <nl> mmm a / dbms / src / Storages / StorageFactory . cpp <nl> ppp b / dbms / src / Storages / StorageFactory . cpp <nl> <nl> + # include < Storages / StorageFactory . h > <nl> + # include < Interpreters / Context . h > <nl> + # include < Parsers / ASTFunction . h > <nl> + # include < Parsers / ASTStorage . h > <nl> + # include < Common / Exception . h > <nl> + <nl> + <nl> + namespace DB <nl> + { <nl> + <nl> + namespace ErrorCodes <nl> + { <nl> + extern const int UNKNOWN_STORAGE ; <nl> + extern const int LOGICAL_ERROR ; <nl> + extern const int INCORRECT_QUERY ; <nl> + extern const int ENGINE_REQUIRED ; <nl> + extern const int FUNCTION_CANNOT_HAVE_PARAMETERS ; <nl> + extern const int BAD_ARGUMENTS ; <nl> + } <nl> + <nl> + <nl> + / / / Some types are only for intermediate values of expressions and cannot be used in tables . <nl> + static void checkAllTypesAreAllowedInTable ( const NamesAndTypesList & names_and_types ) <nl> + { <nl> + for ( const auto & elem : names_and_types ) <nl> + if ( elem . type - > cannotBeStoredInTables ( ) ) <nl> + throw Exception ( " Data type " + elem . type - > getName ( ) + " cannot be used in tables " , ErrorCodes : : DATA_TYPE_CANNOT_BE_USED_IN_TABLES ) ; <nl> + } <nl> + <nl> + <nl> + void StorageFactory : : registerStorage ( const std : : string & name , Creator creator ) <nl> + { <nl> + if ( ! storages . emplace ( name , std : : move ( creator ) ) . second ) <nl> + throw Exception ( " TableFunctionFactory : the table function name ' " + name + " ' is not unique " , <nl> + ErrorCodes : : LOGICAL_ERROR ) ; <nl> + } <nl> + <nl> + <nl> + StoragePtr StorageFactory : : get ( <nl> + ASTCreateQuery & query , <nl> + const String & data_path , <nl> + const String & table_name , <nl> + const String & database_name , <nl> + Context & local_context , <nl> + Context & context , <nl> + const NamesAndTypesList & columns , <nl> + const NamesAndTypesList & materialized_columns , <nl> + const NamesAndTypesList & alias_columns , <nl> + const ColumnDefaults & column_defaults , <nl> + bool attach , <nl> + bool has_force_restore_data_flag ) const <nl> + { <nl> + String name ; <nl> + ASTs args ; <nl> + <nl> + if ( query . is_view ) <nl> + { <nl> + if ( query . storage ) <nl> + throw Exception ( " Specifying ENGINE is not allowed for a View " , ErrorCodes : : INCORRECT_QUERY ) ; <nl> + <nl> + name = " View " ; <nl> + } <nl> + else <nl> + { <nl> + / / / Check for some special types , that are not allowed to be stored in tables . Example : NULL data type . <nl> + / / / Exception : any type is allowed in View , because plain ( non - materialized ) View does not store anything itself . <nl> + checkAllTypesAreAllowedInTable ( columns ) ; <nl> + checkAllTypesAreAllowedInTable ( materialized_columns ) ; <nl> + checkAllTypesAreAllowedInTable ( alias_columns ) ; <nl> + <nl> + if ( query . is_materialized_view ) <nl> + { <nl> + name = " MaterializedView " ; <nl> + } <nl> + else <nl> + { <nl> + if ( ! query . storage ) <nl> + throw Exception ( " Incorrect CREATE query : ENGINE required " , ErrorCodes : : ENGINE_REQUIRED ) ; <nl> + <nl> + const ASTStorage & storage_def = * query . storage ; <nl> + const ASTFunction & engine_def = * storage_def . engine ; <nl> + <nl> + if ( engine_def . parameters ) <nl> + throw Exception ( <nl> + " Engine definition cannot take the form of a parametric function " , ErrorCodes : : FUNCTION_CANNOT_HAVE_PARAMETERS ) ; <nl> + <nl> + if ( engine_def . arguments ) <nl> + args = engine_def . arguments - > children ; <nl> + <nl> + name = engine_def . name ; <nl> + <nl> + if ( ( storage_def . partition_by | | storage_def . order_by | | storage_def . sample_by | | storage_def . settings ) <nl> + & & ! endsWith ( name , " MergeTree " ) ) <nl> + { <nl> + throw Exception ( <nl> + " Engine " + name + " doesn ' t support PARTITION BY , ORDER BY , SAMPLE BY or SETTINGS clauses . " <nl> + " Currently only the MergeTree family of engines supports them " , ErrorCodes : : BAD_ARGUMENTS ) ; <nl> + } <nl> + <nl> + if ( name = = " View " ) <nl> + { <nl> + throw Exception ( <nl> + " Direct creation of tables with ENGINE View is not supported , use CREATE VIEW statement " , <nl> + ErrorCodes : : INCORRECT_QUERY ) ; <nl> + } <nl> + else if ( name = = " MaterializedView " ) <nl> + { <nl> + throw Exception ( <nl> + " Direct creation of tables with ENGINE MaterializedView is not supported , use CREATE MATERIALIZED VIEW statement " , <nl> + ErrorCodes : : INCORRECT_QUERY ) ; <nl> + } <nl> + } <nl> + } <nl> + <nl> + auto it = storages . find ( name ) ; <nl> + if ( it = = storages . end ( ) ) <nl> + throw Exception ( " Unknown table engine " + name , ErrorCodes : : UNKNOWN_STORAGE ) ; <nl> + <nl> + return it - > second ( args , data_path , <nl> + table_name , database_name , <nl> + local_context , context , <nl> + columns , materialized_columns , alias_columns , column_defaults , <nl> + attach , has_force_restore_data_flag ) ; <nl> + } <nl> + <nl> + } <nl> + <nl> + <nl> # include < sparsehash / dense_hash_map > <nl> # include < unistd . h > <nl> # include < Poco / Util / Application . h > <nl> static void setGraphitePatternsFromConfig ( const Context & context , <nl> } <nl> <nl> <nl> - / / / Some types are only for intermediate values of expressions and cannot be used in tables . <nl> - static void checkAllTypesAreAllowedInTable ( const NamesAndTypesList & names_and_types ) <nl> - { <nl> - for ( const auto & elem : names_and_types ) <nl> - if ( elem . type - > cannotBeStoredInTables ( ) ) <nl> - throw Exception ( " Data type " + elem . type - > getName ( ) + " cannot be used in tables " , ErrorCodes : : DATA_TYPE_CANNOT_BE_USED_IN_TABLES ) ; <nl> - } <nl> - <nl> <nl> static String getMergeTreeVerboseHelp ( bool is_extended_syntax ) <nl> { <nl> StoragePtr StorageFactory : : get ( <nl> materialized_columns , alias_columns , column_defaults , <nl> context . getSettings ( ) . max_compress_block_size ) ; <nl> } <nl> - else if ( name = = " Dictionary " ) <nl> - { <nl> - return StorageDictionary : : create ( <nl> - table_name , context , query , columns , <nl> - materialized_columns , alias_columns , column_defaults ) ; <nl> - } <nl> else if ( name = = " TinyLog " ) <nl> { <nl> check_arguments_empty ( ) ; <nl> StoragePtr StorageFactory : : get ( <nl> remote_database , remote_table , cluster_name , <nl> context , sharding_key , data_path ) ; <nl> } <nl> - else if ( name = = " Buffer " ) <nl> - { <nl> - / * * Buffer ( db , table , num_buckets , min_time , max_time , min_rows , max_rows , min_bytes , max_bytes ) <nl> - * <nl> - * db , table - in which table to put data from buffer . <nl> - * num_buckets - level of parallelism . <nl> - * min_time , max_time , min_rows , max_rows , min_bytes , max_bytes - conditions for flushing the buffer . <nl> - * / <nl> - <nl> - if ( ! args_ptr | | args_ptr - > size ( ) ! = 9 ) <nl> - throw Exception ( " Storage Buffer requires 9 parameters : " <nl> - " destination_database , destination_table , num_buckets , min_time , max_time , min_rows , max_rows , min_bytes , max_bytes . " , <nl> - ErrorCodes : : NUMBER_OF_ARGUMENTS_DOESNT_MATCH ) ; <nl> - ASTs & args = * args_ptr ; <nl> - <nl> - args [ 0 ] = evaluateConstantExpressionOrIdentifierAsLiteral ( args [ 0 ] , local_context ) ; <nl> - args [ 1 ] = evaluateConstantExpressionOrIdentifierAsLiteral ( args [ 1 ] , local_context ) ; <nl> <nl> - String destination_database = static_cast < const ASTLiteral & > ( * args [ 0 ] ) . value . safeGet < String > ( ) ; <nl> - String destination_table = static_cast < const ASTLiteral & > ( * args [ 1 ] ) . value . safeGet < String > ( ) ; <nl> - <nl> - UInt64 num_buckets = applyVisitor ( FieldVisitorConvertToNumber < UInt64 > ( ) , typeid_cast < ASTLiteral & > ( * args [ 2 ] ) . value ) ; <nl> - <nl> - Int64 min_time = applyVisitor ( FieldVisitorConvertToNumber < Int64 > ( ) , typeid_cast < ASTLiteral & > ( * args [ 3 ] ) . value ) ; <nl> - Int64 max_time = applyVisitor ( FieldVisitorConvertToNumber < Int64 > ( ) , typeid_cast < ASTLiteral & > ( * args [ 4 ] ) . value ) ; <nl> - UInt64 min_rows = applyVisitor ( FieldVisitorConvertToNumber < UInt64 > ( ) , typeid_cast < ASTLiteral & > ( * args [ 5 ] ) . value ) ; <nl> - UInt64 max_rows = applyVisitor ( FieldVisitorConvertToNumber < UInt64 > ( ) , typeid_cast < ASTLiteral & > ( * args [ 6 ] ) . value ) ; <nl> - UInt64 min_bytes = applyVisitor ( FieldVisitorConvertToNumber < UInt64 > ( ) , typeid_cast < ASTLiteral & > ( * args [ 7 ] ) . value ) ; <nl> - UInt64 max_bytes = applyVisitor ( FieldVisitorConvertToNumber < UInt64 > ( ) , typeid_cast < ASTLiteral & > ( * args [ 8 ] ) . value ) ; <nl> - <nl> - return StorageBuffer : : create ( <nl> - table_name , columns , <nl> - materialized_columns , alias_columns , column_defaults , <nl> - context , <nl> - num_buckets , <nl> - StorageBuffer : : Thresholds { min_time , min_rows , min_bytes } , <nl> - StorageBuffer : : Thresholds { max_time , max_rows , max_bytes } , <nl> - destination_database , destination_table ) ; <nl> - } <nl> else if ( name = = " Kafka " ) <nl> { <nl> # if USE_RDKAFKA <nl> StoragePtr StorageFactory : : get ( <nl> } <nl> else if ( endsWith ( name , " MergeTree " ) ) <nl> { <nl> - / * * [ Replicated ] [ | Summing | Collapsing | Aggregating | Unsorted | Replacing | Graphite ] MergeTree ( 2 * 7 combinations ) engines <nl> - * The argument for the engine should be : <nl> - * - ( for Replicated ) The path to the table in ZooKeeper <nl> - * - ( for Replicated ) Replica name in ZooKeeper <nl> - * - the name of the column with the date ; <nl> - * - ( optional ) expression for sampling <nl> - * ( the query with ` SAMPLE x ` will select rows that have a lower value in this column than ` x * UINT32_MAX ` ) ; <nl> - * - an expression for sorting ( either a scalar expression or a tuple of several ) ; <nl> - * - index_granularity ; <nl> - * - ( for Collapsing ) the name of Int8 column that contains ` sign ` type with the change of " visit " ( taking values 1 and - 1 ) . <nl> - * For example : ENGINE = ReplicatedCollapsingMergeTree ( ' / tables / mytable ' , ' rep02 ' , EventDate , ( CounterID , EventDate , intHash32 ( UniqID ) , VisitID ) , 8192 , Sign ) . <nl> - * - ( for Summing , optional ) a tuple of columns to be summed . If not specified , all numeric columns that are not included in the primary key are used . <nl> - * - ( for Replacing , optional ) the column name of one of the UInt types , which stands for " version " <nl> - * For example : ENGINE = ReplicatedCollapsingMergeTree ( ' / tables / mytable ' , ' rep02 ' , EventDate , ( CounterID , EventDate , intHash32 ( UniqID ) , VisitID ) , 8192 , Sign ) . <nl> - * - ( for Graphite ) the parameter name in config file with settings of thinning rules . <nl> - * <nl> - * MergeTree ( date , [ sample_key ] , primary_key , index_granularity ) <nl> - * CollapsingMergeTree ( date , [ sample_key ] , primary_key , index_granularity , sign ) <nl> - * SummingMergeTree ( date , [ sample_key ] , primary_key , index_granularity , [ columns_to_sum ] ) <nl> - * AggregatingMergeTree ( date , [ sample_key ] , primary_key , index_granularity ) <nl> - * ReplacingMergeTree ( date , [ sample_key ] , primary_key , index_granularity , [ version_column ] ) <nl> - * GraphiteMergeTree ( date , [ sample_key ] , primary_key , index_granularity , ' config_element ' ) <nl> - * UnsortedMergeTree ( date , index_granularity ) TODO Add description below . <nl> - * <nl> - * Alternatively , you can specify : <nl> - * - Partitioning expression in the PARTITION BY clause ; <nl> - * - Primary key in the ORDER BY clause ; <nl> - * - Sampling expression in the SAMPLE BY clause ; <nl> - * - Additional MergeTreeSettings in the SETTINGS clause ; <nl> - * / <nl> - <nl> - bool is_extended_storage_def = <nl> - storage_def . partition_by | | storage_def . order_by | | storage_def . sample_by | | storage_def . settings ; <nl> - <nl> - String name_part = name . substr ( 0 , name . size ( ) - strlen ( " MergeTree " ) ) ; <nl> - <nl> - bool replicated = startsWith ( name_part , " Replicated " ) ; <nl> - if ( replicated ) <nl> - name_part = name_part . substr ( strlen ( " Replicated " ) ) ; <nl> - <nl> - MergeTreeData : : MergingParams merging_params ; <nl> - merging_params . mode = MergeTreeData : : MergingParams : : Ordinary ; <nl> - <nl> - if ( name_part = = " Collapsing " ) <nl> - merging_params . mode = MergeTreeData : : MergingParams : : Collapsing ; <nl> - else if ( name_part = = " Summing " ) <nl> - merging_params . mode = MergeTreeData : : MergingParams : : Summing ; <nl> - else if ( name_part = = " Aggregating " ) <nl> - merging_params . mode = MergeTreeData : : MergingParams : : Aggregating ; <nl> - else if ( name_part = = " Unsorted " ) <nl> - merging_params . mode = MergeTreeData : : MergingParams : : Unsorted ; <nl> - else if ( name_part = = " Replacing " ) <nl> - merging_params . mode = MergeTreeData : : MergingParams : : Replacing ; <nl> - else if ( name_part = = " Graphite " ) <nl> - merging_params . mode = MergeTreeData : : MergingParams : : Graphite ; <nl> - else if ( ! name_part . empty ( ) ) <nl> - throw Exception ( <nl> - " Unknown storage " + name + getMergeTreeVerboseHelp ( is_extended_storage_def ) , <nl> - ErrorCodes : : UNKNOWN_STORAGE ) ; <nl> - <nl> - ASTs args ; <nl> - if ( args_ptr ) <nl> - args = * args_ptr ; <nl> - <nl> - / / / NOTE Quite complicated . <nl> - <nl> - size_t min_num_params = 0 ; <nl> - size_t max_num_params = 0 ; <nl> - String needed_params ; <nl> - <nl> - auto add_mandatory_param = [ & ] ( const char * desc ) <nl> - { <nl> - + + min_num_params ; <nl> - + + max_num_params ; <nl> - needed_params + = needed_params . empty ( ) ? " \ n " : " , \ n " ; <nl> - needed_params + = desc ; <nl> - } ; <nl> - auto add_optional_param = [ & ] ( const char * desc ) <nl> - { <nl> - + + max_num_params ; <nl> - needed_params + = needed_params . empty ( ) ? " \ n " : " , \ n [ " ; <nl> - needed_params + = desc ; <nl> - needed_params + = " ] " ; <nl> - } ; <nl> - <nl> - if ( replicated ) <nl> - { <nl> - add_mandatory_param ( " path in ZooKeeper " ) ; <nl> - add_mandatory_param ( " replica name " ) ; <nl> - } <nl> - <nl> - if ( ! is_extended_storage_def ) <nl> - { <nl> - if ( merging_params . mode = = MergeTreeData : : MergingParams : : Unsorted ) <nl> - { <nl> - if ( args . size ( ) = = min_num_params ) <nl> - is_extended_storage_def = true ; <nl> - else <nl> - { <nl> - add_mandatory_param ( " name of column with date " ) ; <nl> - add_mandatory_param ( " index granularity " ) ; <nl> - } <nl> - } <nl> - else <nl> - { <nl> - add_mandatory_param ( " name of column with date " ) ; <nl> - add_optional_param ( " sampling element of primary key " ) ; <nl> - add_mandatory_param ( " primary key expression " ) ; <nl> - add_mandatory_param ( " index granularity " ) ; <nl> - } <nl> - } <nl> - <nl> - switch ( merging_params . mode ) <nl> - { <nl> - default : <nl> - break ; <nl> - case MergeTreeData : : MergingParams : : Summing : <nl> - add_optional_param ( " list of columns to sum " ) ; <nl> - break ; <nl> - case MergeTreeData : : MergingParams : : Replacing : <nl> - add_optional_param ( " version " ) ; <nl> - break ; <nl> - case MergeTreeData : : MergingParams : : Collapsing : <nl> - add_mandatory_param ( " sign column " ) ; <nl> - break ; <nl> - case MergeTreeData : : MergingParams : : Graphite : <nl> - add_mandatory_param ( " ' config_element_for_graphite_schema ' " ) ; <nl> - break ; <nl> - } <nl> - <nl> - if ( args . size ( ) < min_num_params | | args . size ( ) > max_num_params ) <nl> - { <nl> - String msg ; <nl> - if ( is_extended_storage_def ) <nl> - msg + = " With extended storage definition syntax storage " + name + " requires " ; <nl> - else <nl> - msg + = " Storage " + name + " requires " ; <nl> - <nl> - if ( max_num_params ) <nl> - { <nl> - if ( min_num_params = = max_num_params ) <nl> - msg + = toString ( min_num_params ) + " parameters : " ; <nl> - else <nl> - msg + = toString ( min_num_params ) + " to " + toString ( max_num_params ) + " parameters : " ; <nl> - msg + = needed_params ; <nl> - } <nl> - else <nl> - msg + = " no parameters " ; <nl> - <nl> - msg + = getMergeTreeVerboseHelp ( is_extended_storage_def ) ; <nl> - <nl> - throw Exception ( msg , ErrorCodes : : NUMBER_OF_ARGUMENTS_DOESNT_MATCH ) ; <nl> - } <nl> <nl> - / / / For Replicated . <nl> - String zookeeper_path ; <nl> - String replica_name ; <nl> - <nl> - if ( replicated ) <nl> - { <nl> - auto ast = typeid_cast < ASTLiteral * > ( & * args [ 0 ] ) ; <nl> - if ( ast & & ast - > value . getType ( ) = = Field : : Types : : String ) <nl> - zookeeper_path = safeGet < String > ( ast - > value ) ; <nl> - else <nl> - throw Exception ( <nl> - " Path in ZooKeeper must be a string literal " + getMergeTreeVerboseHelp ( is_extended_storage_def ) , <nl> - ErrorCodes : : BAD_ARGUMENTS ) ; <nl> - <nl> - ast = typeid_cast < ASTLiteral * > ( & * args [ 1 ] ) ; <nl> - if ( ast & & ast - > value . getType ( ) = = Field : : Types : : String ) <nl> - replica_name = safeGet < String > ( ast - > value ) ; <nl> - else <nl> - throw Exception ( <nl> - " Replica name must be a string literal " + getMergeTreeVerboseHelp ( is_extended_storage_def ) , <nl> - ErrorCodes : : BAD_ARGUMENTS ) ; <nl> - <nl> - if ( replica_name . empty ( ) ) <nl> - throw Exception ( <nl> - " No replica name in config " + getMergeTreeVerboseHelp ( is_extended_storage_def ) , <nl> - ErrorCodes : : NO_REPLICA_NAME_GIVEN ) ; <nl> - <nl> - args . erase ( args . begin ( ) , args . begin ( ) + 2 ) ; <nl> - } <nl> - <nl> - if ( merging_params . mode = = MergeTreeData : : MergingParams : : Collapsing ) <nl> - { <nl> - if ( auto ast = typeid_cast < ASTIdentifier * > ( & * args . back ( ) ) ) <nl> - merging_params . sign_column = ast - > name ; <nl> - else <nl> - throw Exception ( <nl> - " Sign column name must be an unquoted string " + getMergeTreeVerboseHelp ( is_extended_storage_def ) , <nl> - ErrorCodes : : BAD_ARGUMENTS ) ; <nl> - <nl> - args . pop_back ( ) ; <nl> - } <nl> - else if ( merging_params . mode = = MergeTreeData : : MergingParams : : Replacing ) <nl> - { <nl> - / / / If the last element is not index_granularity or replica_name ( a literal ) , then this is the name of the version column . <nl> - if ( ! args . empty ( ) & & ! typeid_cast < const ASTLiteral * > ( & * args . back ( ) ) ) <nl> - { <nl> - if ( auto ast = typeid_cast < ASTIdentifier * > ( & * args . back ( ) ) ) <nl> - merging_params . version_column = ast - > name ; <nl> - else <nl> - throw Exception ( <nl> - " Version column name must be an unquoted string " + getMergeTreeVerboseHelp ( is_extended_storage_def ) , <nl> - ErrorCodes : : BAD_ARGUMENTS ) ; <nl> - <nl> - args . pop_back ( ) ; <nl> - } <nl> - } <nl> - else if ( merging_params . mode = = MergeTreeData : : MergingParams : : Summing ) <nl> - { <nl> - / / / If the last element is not index_granularity or replica_name ( a literal ) , then this is a list of summable columns . <nl> - if ( ! args . empty ( ) & & ! typeid_cast < const ASTLiteral * > ( & * args . back ( ) ) ) <nl> - { <nl> - merging_params . columns_to_sum = extractColumnNames ( args . back ( ) ) ; <nl> - args . pop_back ( ) ; <nl> - } <nl> - } <nl> - else if ( merging_params . mode = = MergeTreeData : : MergingParams : : Graphite ) <nl> - { <nl> - String graphite_config_name ; <nl> - String error_msg = " Last parameter of GraphiteMergeTree must be name ( in single quotes ) of element in configuration file with Graphite options " ; <nl> - error_msg + = getMergeTreeVerboseHelp ( is_extended_storage_def ) ; <nl> - <nl> - if ( auto ast = typeid_cast < ASTLiteral * > ( & * args . back ( ) ) ) <nl> - { <nl> - if ( ast - > value . getType ( ) ! = Field : : Types : : String ) <nl> - throw Exception ( error_msg , ErrorCodes : : BAD_ARGUMENTS ) ; <nl> - <nl> - graphite_config_name = ast - > value . get < String > ( ) ; <nl> - } <nl> - else <nl> - throw Exception ( error_msg , ErrorCodes : : BAD_ARGUMENTS ) ; <nl> - <nl> - args . pop_back ( ) ; <nl> - setGraphitePatternsFromConfig ( context , graphite_config_name , merging_params . graphite_params ) ; <nl> - } <nl> - <nl> - String date_column_name ; <nl> - ASTPtr partition_expr_list ; <nl> - ASTPtr primary_expr_list ; <nl> - ASTPtr sampling_expression ; <nl> - MergeTreeSettings storage_settings = context . getMergeTreeSettings ( ) ; <nl> - <nl> - if ( is_extended_storage_def ) <nl> - { <nl> - if ( storage_def . partition_by ) <nl> - partition_expr_list = extractKeyExpressionList ( * storage_def . partition_by ) ; <nl> - <nl> - if ( storage_def . order_by ) <nl> - primary_expr_list = extractKeyExpressionList ( * storage_def . order_by ) ; <nl> - <nl> - if ( storage_def . sample_by ) <nl> - sampling_expression = storage_def . sample_by - > ptr ( ) ; <nl> - <nl> - storage_settings . loadFromQuery ( storage_def ) ; <nl> - } <nl> - else <nl> - { <nl> - / / / If there is an expression for sampling . MergeTree ( date , [ sample_key ] , primary_key , index_granularity ) <nl> - if ( args . size ( ) = = 4 ) <nl> - { <nl> - sampling_expression = args [ 1 ] ; <nl> - args . erase ( args . begin ( ) + 1 ) ; <nl> - } <nl> - <nl> - / / / Now only three parameters remain - date ( or partitioning expression ) , primary_key , index_granularity . <nl> - <nl> - if ( auto ast = typeid_cast < ASTIdentifier * > ( args [ 0 ] . get ( ) ) ) <nl> - date_column_name = ast - > name ; <nl> - else <nl> - throw Exception ( <nl> - " Date column name must be an unquoted string " + getMergeTreeVerboseHelp ( is_extended_storage_def ) , <nl> - ErrorCodes : : BAD_ARGUMENTS ) ; <nl> - <nl> - if ( merging_params . mode ! = MergeTreeData : : MergingParams : : Unsorted ) <nl> - primary_expr_list = extractKeyExpressionList ( * args [ 1 ] ) ; <nl> - <nl> - auto ast = typeid_cast < ASTLiteral * > ( & * args . back ( ) ) ; <nl> - if ( ast & & ast - > value . getType ( ) = = Field : : Types : : UInt64 ) <nl> - storage_settings . index_granularity = safeGet < UInt64 > ( ast - > value ) ; <nl> - else <nl> - throw Exception ( <nl> - " Index granularity must be a positive integer " + getMergeTreeVerboseHelp ( is_extended_storage_def ) , <nl> - ErrorCodes : : BAD_ARGUMENTS ) ; <nl> - } <nl> - <nl> - if ( replicated ) <nl> - return StorageReplicatedMergeTree : : create ( <nl> - zookeeper_path , replica_name , attach , data_path , database_name , table_name , <nl> - columns , materialized_columns , alias_columns , column_defaults , <nl> - context , primary_expr_list , date_column_name , partition_expr_list , <nl> - sampling_expression , merging_params , storage_settings , <nl> - has_force_restore_data_flag ) ; <nl> - else <nl> - return StorageMergeTree : : create ( <nl> - data_path , database_name , table_name , <nl> - columns , materialized_columns , alias_columns , column_defaults , attach , <nl> - context , primary_expr_list , date_column_name , partition_expr_list , <nl> - sampling_expression , merging_params , storage_settings , <nl> - has_force_restore_data_flag ) ; <nl> } <nl> else <nl> throw Exception ( " Unknown storage " + name , ErrorCodes : : UNKNOWN_STORAGE ) ; <nl> mmm a / dbms / src / Storages / StorageFactory . h <nl> ppp b / dbms / src / Storages / StorageFactory . h <nl> <nl> <nl> # include < Storages / IStorage . h > <nl> # include < ext / singleton . h > <nl> + # include < unordered_map > <nl> <nl> <nl> namespace DB <nl> class Context ; <nl> class StorageFactory : public ext : : singleton < StorageFactory > <nl> { <nl> public : <nl> + struct Arguments <nl> + { <nl> + ASTs & args ; <nl> + const String & data_path ; <nl> + const String & table_name ; <nl> + const String & database_name ; <nl> + Context & local_context ; <nl> + Context & context ; <nl> + const NamesAndTypesList & columns ; <nl> + const NamesAndTypesList & materialized_columns ; <nl> + const NamesAndTypesList & alias_columns ; <nl> + const ColumnDefaults & column_defaults ; <nl> + bool attach ; <nl> + bool has_force_restore_data_flag ; <nl> + } ; <nl> + <nl> + using Creator = std : : function < StoragePtr ( const Arguments & arguments ) > ; <nl> + <nl> StoragePtr get ( <nl> ASTCreateQuery & query , <nl> const String & data_path , <nl> class StorageFactory : public ext : : singleton < StorageFactory > <nl> const ColumnDefaults & column_defaults , <nl> bool attach , <nl> bool has_force_restore_data_flag ) const ; <nl> + <nl> + / / / Register a function by its name . <nl> + / / / No locking , you must register all functions before usage of get . <nl> + void registerStorage ( const std : : string & name , Creator creator ) ; <nl> + <nl> + private : <nl> + using Storages = std : : unordered_map < std : : string , Creator > ; <nl> + Storages storages ; <nl> } ; <nl> <nl> } <nl>
Less dependencies in StorageFactory : development [ # CLICKHOUSE - 2 ] .
ClickHouse/ClickHouse
72445148594c4cecc823e98c995aa1705c05bf54
2017-12-30T04:00:39Z
mmm a / examples / digits . py <nl> ppp b / examples / digits . py <nl> <nl> import tensorflow as tf <nl> <nl> import skflow <nl> + from skflow import monitors <nl> <nl> - # Load dataset and split it into train / test subsets . <nl> + # Load dataset <nl> <nl> digits = datasets . load_digits ( ) <nl> X = digits . images <nl> y = digits . target <nl> <nl> + # Split it into train / test subsets <nl> + <nl> X_train , X_test , y_train , y_test = cross_validation . train_test_split ( X , y , <nl> - test_size = 0 . 2 , random_state = 42 ) <nl> + test_size = 0 . 2 , <nl> + random_state = 42 ) <nl> + <nl> + # Split X_train again to create validation data <nl> + <nl> + X_train , X_val , y_train , y_val = cross_validation . train_test_split ( X_train , <nl> + y_train , <nl> + test_size = 0 . 2 , <nl> + random_state = 42 ) <nl> <nl> # TensorFlow model using Scikit Flow ops <nl> <nl> + <nl> def conv_model ( X , y ) : <nl> X = tf . expand_dims ( X , 3 ) <nl> features = tf . reduce_max ( skflow . ops . conv2d ( X , 12 , [ 3 , 3 ] ) , [ 1 , 2 ] ) <nl> features = tf . reshape ( features , [ - 1 , 12 ] ) <nl> return skflow . models . logistic_regression ( features , y ) <nl> <nl> + val_monitor = monitors . ValidationMonitor ( X_val , y_val , n_classes = 10 , print_steps = 50 ) <nl> # Create a classifier , train and predict . <nl> classifier = skflow . TensorFlowEstimator ( model_fn = conv_model , n_classes = 10 , <nl> - steps = 500 , learning_rate = 0 . 05 , <nl> + steps = 1000 , learning_rate = 0 . 05 , <nl> batch_size = 128 ) <nl> - classifier . fit ( X_train , y_train ) <nl> + classifier . fit ( X_train , y_train , val_monitor ) <nl> score = metrics . accuracy_score ( y_test , classifier . predict ( X_test ) ) <nl> - print ( ' Accuracy : { 0 : f } ' . format ( score ) ) <nl> + print ( ' Test Accuracy : { 0 : f } ' . format ( score ) ) <nl> similarity index 66 % <nl> rename from examples / iris_early_stopping . py <nl> rename to examples / iris_val_based_early_stopping . py <nl> mmm a / examples / iris_early_stopping . py <nl> ppp b / examples / iris_val_based_early_stopping . py <nl> <nl> # See the License for the specific language governing permissions and <nl> # limitations under the License . <nl> <nl> - import tensorflow as tf <nl> from tensorflow . python . platform import googletest <nl> <nl> from sklearn import datasets , metrics <nl> <nl> test_size = 0 . 2 , <nl> random_state = 42 ) <nl> <nl> - # classifier without early stopping - overfitting <nl> + X_train , X_val , y_train , y_val = train_test_split ( X_train , y_train , <nl> + test_size = 0 . 2 , random_state = 42 ) <nl> + val_monitor = skflow . monitors . ValidationMonitor ( X_val , y_val , <nl> + early_stopping_rounds = 200 , <nl> + n_classes = 3 ) <nl> + <nl> + # classifier with early stopping on training data <nl> classifier1 = skflow . TensorFlowDNNClassifier ( hidden_units = [ 10 , 20 , 10 ] , <nl> - n_classes = 3 , steps = 800 ) <nl> + n_classes = 3 , steps = 2000 ) <nl> classifier1 . fit ( X_train , y_train ) <nl> score1 = metrics . accuracy_score ( y_test , classifier1 . predict ( X_test ) ) <nl> <nl> - # classifier with early stopping - improved accuracy on testing set <nl> + # classifier with early stopping on validation data <nl> classifier2 = skflow . TensorFlowDNNClassifier ( hidden_units = [ 10 , 20 , 10 ] , <nl> - n_classes = 3 , steps = 1000 , <nl> - early_stopping_rounds = 200 ) <nl> - classifier2 . fit ( X_train , y_train ) <nl> + n_classes = 3 , steps = 2000 ) <nl> + classifier2 . fit ( X_train , y_train , val_monitor ) <nl> score2 = metrics . accuracy_score ( y_test , classifier2 . predict ( X_test ) ) <nl> <nl> - # you can expect the score is improved by using early stopping <nl> + # in many applications , the score is improved by using early stopping on val data <nl> print ( score2 > score1 ) <nl> mmm a / skflow / estimators / base . py <nl> ppp b / skflow / estimators / base . py <nl> <nl> from skflow . io . data_feeder import setup_train_data_feeder <nl> from skflow . io . data_feeder import setup_predict_data_feeder <nl> from skflow . ops . dropout_ops import DROPOUTS <nl> + from skflow import monitors <nl> <nl> from skflow . addons . config_addon import ConfigAddon <nl> <nl> def exp_decay ( global_step ) : <nl> 0 : the algorithm and debug information is muted . <nl> 1 : trainer prints the progress . <nl> 2 : log device placement is printed . <nl> - early_stopping_rounds : Activates early stopping if this is not None . <nl> - Loss needs to decrease at least every every < early_stopping_rounds > <nl> - round ( s ) to continue training . ( default : None ) <nl> max_to_keep : The maximum number of recent checkpoint files to keep . <nl> As new files are created , older files are deleted . <nl> If None or 0 , all checkpoint files are kept . <nl> def __init__ ( self , model_fn , n_classes , tf_master = " " , batch_size = 32 , <nl> learning_rate = 0 . 1 , class_weight = None , <nl> tf_random_seed = 42 , continue_training = False , <nl> config_addon = None , verbose = 1 , <nl> - early_stopping_rounds = None , <nl> max_to_keep = 5 , keep_checkpoint_every_n_hours = 10000 ) : <nl> + <nl> self . n_classes = n_classes <nl> self . tf_master = tf_master <nl> self . batch_size = batch_size <nl> def __init__ ( self , model_fn , n_classes , tf_master = " " , batch_size = 32 , <nl> self . model_fn = model_fn <nl> self . continue_training = continue_training <nl> self . _initialized = False <nl> - self . _early_stopping_rounds = early_stopping_rounds <nl> self . max_to_keep = max_to_keep <nl> self . keep_checkpoint_every_n_hours = keep_checkpoint_every_n_hours <nl> self . class_weight = class_weight <nl> def _setup_training ( self ) : <nl> max_to_keep = self . max_to_keep , <nl> keep_checkpoint_every_n_hours = self . keep_checkpoint_every_n_hours ) <nl> <nl> + # Enable monitor to create validation data dict with appropriate tf placeholders <nl> + self . _monitor . create_val_feed_dict ( self . _inp , self . _out ) <nl> + <nl> # Create session to run model with . <nl> if self . config_addon is None : <nl> self . config_addon = ConfigAddon ( verbose = self . verbose ) <nl> def _setup_summary_writer ( self , logdir ) : <nl> os . path . join ( logdir , datetime . datetime . now ( ) . strftime ( ' % Y - % m - % d_ % H - % M - % S ' ) ) , <nl> graph_def = self . _session . graph_def ) <nl> <nl> - def fit ( self , X , y , logdir = None ) : <nl> + def fit ( self , X , y , monitor = None , logdir = None ) : <nl> " " " Builds a neural network model given provided ` model_fn ` and training <nl> data X and y . <nl> <nl> def fit ( self , X , y , logdir = None ) : <nl> y : vector or matrix [ n_samples ] or [ n_samples , n_outputs ] . Can be <nl> iterator that returns array of targets . The training target values <nl> ( class labels in classification , real numbers in regression ) . <nl> + monitor : Monitor object to print training progress and invoke early stopping <nl> logdir : the directory to save the log file that can be used for <nl> optional visualization . <nl> <nl> def fit ( self , X , y , logdir = None ) : <nl> self . _data_feeder = setup_train_data_feeder ( X , y , <nl> self . n_classes , <nl> self . batch_size ) <nl> + <nl> + if monitor is None : <nl> + self . _monitor = monitors . default_monitor ( ) <nl> + else : <nl> + self . _monitor = monitor <nl> + <nl> if not self . continue_training or not self . _initialized : <nl> # Sets up model and trainer . <nl> self . _setup_training ( ) <nl> def fit ( self , X , y , logdir = None ) : <nl> self . _data_feeder . get_feed_dict_fn ( <nl> self . _inp , self . _out ) , <nl> self . steps , <nl> + self . _monitor , <nl> self . _summary_writer , <nl> self . _summaries , <nl> - verbose = self . verbose , <nl> - early_stopping_rounds = self . _early_stopping_rounds , <nl> feed_params_fn = self . _data_feeder . get_feed_params ) <nl> return self <nl> <nl> def restore ( cls , path , config_addon = None ) : <nl> estimator = getattr ( estimators , class_name ) ( * * model_def ) <nl> estimator . _restore ( path ) <nl> return estimator <nl> - <nl> mmm a / skflow / estimators / dnn . py <nl> ppp b / skflow / estimators / dnn . py <nl> def exp_decay ( global_step ) : <nl> model will be continuely trained on every call of fit . <nl> config_addon : ConfigAddon object that controls the configurations of the session , <nl> e . g . num_cores , gpu_memory_fraction , etc . <nl> - early_stopping_rounds : Activates early stopping if this is not None . <nl> - Loss needs to decrease at least every every < early_stopping_rounds > <nl> - round ( s ) to continue training . ( default : None ) <nl> max_to_keep : The maximum number of recent checkpoint files to keep . <nl> As new files are created , older files are deleted . <nl> If None or 0 , all checkpoint files are kept . <nl> def __init__ ( self , hidden_units , n_classes , tf_master = " " , batch_size = 32 , <nl> steps = 200 , optimizer = " SGD " , learning_rate = 0 . 1 , <nl> class_weight = None , <nl> tf_random_seed = 42 , continue_training = False , config_addon = None , <nl> - verbose = 1 , early_stopping_rounds = None , <nl> - max_to_keep = 5 , keep_checkpoint_every_n_hours = 10000 ) : <nl> + verbose = 1 , max_to_keep = 5 , keep_checkpoint_every_n_hours = 10000 ) : <nl> + <nl> self . hidden_units = hidden_units <nl> super ( TensorFlowDNNClassifier , self ) . __init__ ( <nl> model_fn = self . _model_fn , <nl> def __init__ ( self , hidden_units , n_classes , tf_master = " " , batch_size = 32 , <nl> tf_random_seed = tf_random_seed , <nl> continue_training = continue_training , <nl> config_addon = config_addon , verbose = verbose , <nl> - early_stopping_rounds = early_stopping_rounds , <nl> max_to_keep = max_to_keep , <nl> keep_checkpoint_every_n_hours = keep_checkpoint_every_n_hours ) <nl> <nl> def exp_decay ( global_step ) : <nl> model will be continuely trained on every call of fit . <nl> config_addon : ConfigAddon object that controls the configurations of the session , <nl> e . g . num_cores , gpu_memory_fraction , etc . <nl> - early_stopping_rounds : Activates early stopping if this is not None . <nl> - Loss needs to decrease at least every every < early_stopping_rounds > <nl> - round ( s ) to continue training . ( default : None ) <nl> verbose : Controls the verbosity , possible values : <nl> 0 : the algorithm and debug information is muted . <nl> 1 : trainer prints the progress . <nl> 2 : log device placement is printed . <nl> - early_stopping_rounds : Activates early stopping if this is not None . <nl> - Loss needs to decrease at least every every < early_stopping_rounds > <nl> - round ( s ) to continue training . ( default : None ) <nl> max_to_keep : The maximum number of recent checkpoint files to keep . <nl> As new files are created , older files are deleted . <nl> If None or 0 , all checkpoint files are kept . <nl> def exp_decay ( global_step ) : <nl> def __init__ ( self , hidden_units , n_classes = 0 , tf_master = " " , batch_size = 32 , <nl> steps = 200 , optimizer = " SGD " , learning_rate = 0 . 1 , <nl> tf_random_seed = 42 , continue_training = False , config_addon = None , <nl> - verbose = 1 , early_stopping_rounds = None , <nl> - max_to_keep = 5 , keep_checkpoint_every_n_hours = 10000 ) : <nl> + verbose = 1 , max_to_keep = 5 , keep_checkpoint_every_n_hours = 10000 ) : <nl> + <nl> self . hidden_units = hidden_units <nl> super ( TensorFlowDNNRegressor , self ) . __init__ ( <nl> model_fn = self . _model_fn , <nl> def __init__ ( self , hidden_units , n_classes = 0 , tf_master = " " , batch_size = 32 , <nl> learning_rate = learning_rate , tf_random_seed = tf_random_seed , <nl> continue_training = continue_training , <nl> config_addon = config_addon , verbose = verbose , <nl> - early_stopping_rounds = early_stopping_rounds , <nl> max_to_keep = max_to_keep , <nl> keep_checkpoint_every_n_hours = keep_checkpoint_every_n_hours ) <nl> <nl> def bias_ ( self ) : <nl> biases . append ( self . get_tensor_value ( ' dnn / layer % d / Linear / Bias : 0 ' % layer ) ) <nl> biases . append ( self . get_tensor_value ( ' linear_regression / bias : 0 ' ) ) <nl> return biases <nl> - <nl> mmm a / skflow / estimators / linear . py <nl> ppp b / skflow / estimators / linear . py <nl> class TensorFlowLinearRegressor ( TensorFlowEstimator , RegressorMixin ) : <nl> <nl> def __init__ ( self , n_classes = 0 , tf_master = " " , batch_size = 32 , steps = 200 , optimizer = " SGD " , <nl> learning_rate = 0 . 1 , tf_random_seed = 42 , continue_training = False , <nl> - config_addon = None , verbose = 1 , early_stopping_rounds = None , <nl> + config_addon = None , verbose = 1 , <nl> max_to_keep = 5 , keep_checkpoint_every_n_hours = 10000 ) : <nl> + <nl> super ( TensorFlowLinearRegressor , self ) . __init__ ( <nl> model_fn = models . linear_regression , n_classes = n_classes , <nl> tf_master = tf_master , <nl> batch_size = batch_size , steps = steps , optimizer = optimizer , <nl> learning_rate = learning_rate , tf_random_seed = tf_random_seed , <nl> continue_training = continue_training , config_addon = config_addon , <nl> - verbose = verbose , early_stopping_rounds = early_stopping_rounds , <nl> - max_to_keep = max_to_keep , <nl> + verbose = verbose , max_to_keep = max_to_keep , <nl> keep_checkpoint_every_n_hours = keep_checkpoint_every_n_hours ) <nl> <nl> @ property <nl> class TensorFlowLinearClassifier ( TensorFlowEstimator , ClassifierMixin ) : <nl> def __init__ ( self , n_classes , tf_master = " " , batch_size = 32 , steps = 200 , optimizer = " SGD " , <nl> learning_rate = 0 . 1 , class_weight = None , <nl> tf_random_seed = 42 , continue_training = False , config_addon = None , <nl> - verbose = 1 , early_stopping_rounds = None , <nl> - max_to_keep = 5 , keep_checkpoint_every_n_hours = 10000 ) : <nl> + verbose = 1 , max_to_keep = 5 , keep_checkpoint_every_n_hours = 10000 ) : <nl> + <nl> super ( TensorFlowLinearClassifier , self ) . __init__ ( <nl> model_fn = models . logistic_regression , n_classes = n_classes , <nl> tf_master = tf_master , <nl> def __init__ ( self , n_classes , tf_master = " " , batch_size = 32 , steps = 200 , optimizer = <nl> learning_rate = learning_rate , class_weight = class_weight , <nl> tf_random_seed = tf_random_seed , <nl> continue_training = continue_training , config_addon = config_addon , <nl> - verbose = verbose , early_stopping_rounds = early_stopping_rounds , <nl> - max_to_keep = max_to_keep , <nl> + verbose = verbose , max_to_keep = max_to_keep , <nl> keep_checkpoint_every_n_hours = keep_checkpoint_every_n_hours ) <nl> <nl> @ property <nl> mmm a / skflow / estimators / rnn . py <nl> ppp b / skflow / estimators / rnn . py <nl> def null_input_op_fn ( X ) : <nl> " " " This function does no transformation on the inputs , used as default " " " <nl> return X <nl> <nl> + <nl> class TensorFlowRNNClassifier ( TensorFlowEstimator , ClassifierMixin ) : <nl> " " " TensorFlow RNN Classifier model . <nl> <nl> def exp_decay ( global_step ) : <nl> continue_training : when continue_training is True , once initialized <nl> model will be continuely trained on every call of fit . <nl> num_cores : Number of cores to be used . ( default : 4 ) <nl> - early_stopping_rounds : Activates early stopping if this is not None . <nl> - Loss needs to decrease at least every every < early_stopping_rounds > <nl> - round ( s ) to continue training . ( default : None ) <nl> max_to_keep : The maximum number of recent checkpoint files to keep . <nl> As new files are created , older files are deleted . <nl> If None or 0 , all checkpoint files are kept . <nl> def __init__ ( self , rnn_size , n_classes , cell_type = ' gru ' , num_layers = 1 , <nl> steps = 50 , optimizer = " SGD " , learning_rate = 0 . 1 , <nl> class_weight = None , <nl> tf_random_seed = 42 , continue_training = False , <nl> - config_addon = None , verbose = 1 , early_stopping_rounds = None , <nl> + config_addon = None , verbose = 1 , <nl> max_to_keep = 5 , keep_checkpoint_every_n_hours = 10000 ) : <nl> + <nl> self . rnn_size = rnn_size <nl> self . cell_type = cell_type <nl> self . input_op_fn = input_op_fn <nl> def __init__ ( self , rnn_size , n_classes , cell_type = ' gru ' , num_layers = 1 , <nl> tf_random_seed = tf_random_seed , <nl> continue_training = continue_training , config_addon = config_addon , <nl> verbose = verbose , <nl> - early_stopping_rounds = early_stopping_rounds , <nl> max_to_keep = max_to_keep , <nl> keep_checkpoint_every_n_hours = keep_checkpoint_every_n_hours ) <nl> <nl> def exp_decay ( global_step ) : <nl> continue_training : when continue_training is True , once initialized <nl> model will be continuely trained on every call of fit . <nl> num_cores : Number of cores to be used . ( default : 4 ) <nl> - early_stopping_rounds : Activates early stopping if this is not None . <nl> - Loss needs to decrease at least every every < early_stopping_rounds > <nl> - round ( s ) to continue training . ( default : None ) <nl> verbose : Controls the verbosity , possible values : <nl> 0 : the algorithm and debug information is muted . <nl> 1 : trainer prints the progress . <nl> 2 : log device placement is printed . <nl> - early_stopping_rounds : Activates early stopping if this is not None . <nl> - Loss needs to decrease at least every every < early_stopping_rounds > <nl> - round ( s ) to continue training . ( default : None ) <nl> max_to_keep : The maximum number of recent checkpoint files to keep . <nl> As new files are created , older files are deleted . <nl> If None or 0 , all checkpoint files are kept . <nl> def __init__ ( self , rnn_size , cell_type = ' gru ' , num_layers = 1 , <nl> n_classes = 0 , tf_master = " " , batch_size = 32 , <nl> steps = 50 , optimizer = " SGD " , learning_rate = 0 . 1 , <nl> tf_random_seed = 42 , continue_training = False , <nl> - config_addon = None , verbose = 1 , early_stopping_rounds = None , <nl> + config_addon = None , verbose = 1 , <nl> max_to_keep = 5 , keep_checkpoint_every_n_hours = 10000 ) : <nl> + <nl> self . rnn_size = rnn_size <nl> self . cell_type = cell_type <nl> self . input_op_fn = input_op_fn <nl> def __init__ ( self , rnn_size , cell_type = ' gru ' , num_layers = 1 , <nl> batch_size = batch_size , steps = steps , optimizer = optimizer , <nl> learning_rate = learning_rate , tf_random_seed = tf_random_seed , <nl> continue_training = continue_training , config_addon = config_addon , <nl> - verbose = verbose , <nl> - early_stopping_rounds = early_stopping_rounds , <nl> - max_to_keep = max_to_keep , <nl> + verbose = verbose , max_to_keep = max_to_keep , <nl> keep_checkpoint_every_n_hours = keep_checkpoint_every_n_hours ) <nl> <nl> def _model_fn ( self , X , y ) : <nl> new file mode 100644 <nl> index 0000000000000 . . a339be6c97daf <nl> mmm / dev / null <nl> ppp b / skflow / monitors . py <nl> <nl> + " " " Monitors to track model training , report on progress and request early stopping " " " <nl> + # Copyright 2015 - present Scikit Flow Authors . All Rights Reserved . <nl> + # <nl> + # Licensed under the Apache License , Version 2 . 0 ( the " License " ) ; <nl> + # you may not use this file except in compliance with the License . <nl> + # You may obtain a copy of the License at <nl> + # <nl> + # http : / / www . apache . org / licenses / LICENSE - 2 . 0 <nl> + # <nl> + # Unless required by applicable law or agreed to in writing , software <nl> + # distributed under the License is distributed on an " AS IS " BASIS , <nl> + # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied . <nl> + # See the License for the specific language governing permissions and <nl> + # limitations under the License . <nl> + <nl> + from __future__ import print_function <nl> + import sys <nl> + import numpy as np <nl> + <nl> + from skflow . io . data_feeder import setup_train_data_feeder <nl> + <nl> + <nl> + # pylint : disable = too - many - instance - attributes <nl> + # pylint : disable = too - few - public - methods <nl> + # pylint : disable = too - many - arguments <nl> + # pylint : disable = attribute - defined - outside - init <nl> + <nl> + def default_monitor ( ) : <nl> + " " " returns very simple monitor object to summarize training progress " " " <nl> + return BaseMonitor ( ) <nl> + <nl> + <nl> + class BaseMonitor ( object ) : <nl> + " " " Base class for all learning monitors . Stores and reports training loss throughout learning <nl> + <nl> + Parameters : <nl> + print_steps : Number of steps in between printing cost . <nl> + early_stopping_rounds : Activates early stopping if this is not None . <nl> + Loss needs to decrease at least every every < early_stopping_rounds > <nl> + round ( s ) to continue training . ( default : None ) <nl> + <nl> + " " " <nl> + def __init__ ( self , print_steps = 100 , early_stopping_rounds = 250 , verbose = 1 ) : <nl> + self . print_steps = print_steps <nl> + self . early_stopping_rounds = early_stopping_rounds <nl> + <nl> + self . converged = False <nl> + self . min_loss = np . inf <nl> + self . min_loss_i = 0 <nl> + self . last_loss_seen = np . inf <nl> + self . steps = 0 <nl> + self . print_train_loss_buffer = [ ] <nl> + self . all_train_loss_buffer = [ ] <nl> + self . verbose = verbose <nl> + self . epoch = None <nl> + <nl> + def update ( self , global_step , step_number , training_loss , <nl> + sess , feed_params_fn , loss_expression_tensor ) : <nl> + " " " Adds training_loss to monitor . Triggers printed output if appropriate <nl> + <nl> + global_step : <nl> + step_number : current step in training <nl> + training_loss : float value of training loss <nl> + sess : session for computation ( used to calculate validation loss ) <nl> + feed_params_fn : function generating dict with information like epoch . Sometimes None . <nl> + loss_expression_tensor : Tensor applied to validation data to calculate val loss <nl> + <nl> + " " " <nl> + self . steps = step_number <nl> + self . global_step = global_step <nl> + self . print_train_loss_buffer . append ( training_loss ) <nl> + self . all_train_loss_buffer . append ( training_loss ) <nl> + self . sess = sess <nl> + self . loss_expression_tensor = loss_expression_tensor <nl> + self . _set_last_loss_seen ( ) <nl> + if self . last_loss_seen < self . min_loss : <nl> + self . min_loss = training_loss <nl> + self . min_loss_i = self . steps <nl> + self . _set_epoch ( feed_params_fn ) <nl> + self . report ( ) <nl> + <nl> + def _set_last_loss_seen ( self ) : <nl> + " " " Sets last_loss_seen attribute to most recent training error " " " <nl> + self . last_loss_seen = self . all_train_loss_buffer [ - 1 ] <nl> + <nl> + def report ( self ) : <nl> + " " " Checks whether to report , and prints loss information if appropriate " " " <nl> + if self . verbose and ( self . steps % self . print_steps = = 0 ) : <nl> + self . _set_training_summary ( ) <nl> + print ( self . _summary_str ) <nl> + <nl> + def monitor_inducing_stop ( self ) : <nl> + " " " Returns True if the monitor requests the model stop ( e . g . for early stopping ) " " " <nl> + stop_now = ( self . steps - self . min_loss_i > = self . early_stopping_rounds ) <nl> + if stop_now : <nl> + sys . stderr . write ( " Stopping . Best step : \ n step { } with loss { } \ n " <nl> + . format ( self . min_loss_i , self . min_loss ) ) <nl> + return stop_now <nl> + <nl> + def create_val_feed_dict ( self , inp , out ) : <nl> + " " " Validation requires access to TensorFlow placeholders . Not used in this Monitor " " " <nl> + pass <nl> + <nl> + def _set_epoch ( self , feed_params_fn ) : <nl> + " " " Sets self . epoch from a function that generates a dictionary including this info " " " <nl> + if feed_params_fn : <nl> + feed_params = feed_params_fn ( ) <nl> + self . epoch = feed_params [ ' epoch ' ] if ' epoch ' in feed_params else None <nl> + <nl> + def _set_training_summary ( self ) : <nl> + " " " Returns the string to be written describing training progress " " " <nl> + avg_train_loss = np . mean ( self . print_train_loss_buffer ) <nl> + self . print_train_loss_buffer = [ ] <nl> + if self . epoch : <nl> + self . _summary_str = ( " Step # { step } , epoch # { epoch } , avg . train loss : { loss : . 5f } " <nl> + . format ( step = self . steps , loss = avg_train_loss , <nl> + epoch = self . epoch ) ) <nl> + else : <nl> + self . _summary_str = ( " Step # { step } , avg . train loss : { loss : . 5f } " <nl> + . format ( step = self . global_step , <nl> + loss = avg_train_loss ) ) <nl> + self . _modify_summary_string ( ) <nl> + <nl> + def _modify_summary_string ( self ) : <nl> + " " " Makes monitor specific changes to printed summary . Nothing interesting in BaseMonitor " " " <nl> + pass <nl> + <nl> + <nl> + class ValidationMonitor ( BaseMonitor ) : <nl> + " " " Monitor that reports score for validation data and uses validation data for early stopping <nl> + <nl> + val_X : Validation features <nl> + val_y : Validation labels <nl> + n_classes : Number of labels in output . 0 for regression <nl> + print_steps : Number of steps in between printing cost . <nl> + early_stopping_rounds : Activates early stopping if this is not None . <nl> + Loss needs to decrease at least every every < early_stopping_rounds > <nl> + round ( s ) to continue training . ( default : None ) <nl> + <nl> + " " " <nl> + def __init__ ( self , val_X , val_y , n_classes = 0 , print_steps = 100 , early_stopping_rounds = 250 ) : <nl> + super ( ValidationMonitor , self ) . __init__ ( print_steps = print_steps , <nl> + early_stopping_rounds = early_stopping_rounds ) <nl> + self . val_feeder = setup_train_data_feeder ( val_X , val_y , n_classes , - 1 ) <nl> + self . print_val_loss_buffer = [ ] <nl> + self . all_val_loss_buffer = [ ] <nl> + <nl> + def create_val_feed_dict ( self , inp , out ) : <nl> + " " " Set tensorflow placeholders and create validation data feed " " " <nl> + self . val_dict = self . val_feeder . get_feed_dict_fn ( inp , out ) ( ) <nl> + <nl> + def _set_last_loss_seen ( self ) : <nl> + " " " Sets self . last_loss_seen to most recent validation loss <nl> + <nl> + Also stores this value to appropriate buffers <nl> + " " " <nl> + [ val_loss ] = self . sess . run ( [ self . loss_expression_tensor ] , feed_dict = self . val_dict ) <nl> + self . last_loss_seen = val_loss <nl> + self . all_val_loss_buffer . append ( val_loss ) <nl> + self . print_val_loss_buffer . append ( val_loss ) <nl> + <nl> + def _modify_summary_string ( self ) : <nl> + " " " Adds validation data to string to print and resets validation printing buffer " " " <nl> + avg_val_loss = np . mean ( self . print_val_loss_buffer ) <nl> + self . print_val_loss_buffer = [ ] <nl> + val_loss_string = " avg . val loss : { val_loss : . 5f } " . format ( val_loss = avg_val_loss ) <nl> + self . _summary_str = ( " , " . join ( [ self . _summary_str , val_loss_string ] ) ) <nl> mmm a / skflow / tests / test_early_stopping . py <nl> ppp b / skflow / tests / test_early_stopping . py <nl> <nl> <nl> import skflow <nl> <nl> + <nl> class EarlyStoppingTest ( tf . test . TestCase ) : <nl> <nl> def testIrisES ( self ) : <nl> def testIrisES ( self ) : <nl> test_size = 0 . 2 , <nl> random_state = 42 ) <nl> <nl> + X_train , X_val , y_train , y_val = train_test_split ( X_train , y_train , test_size = 0 . 2 ) <nl> + val_monitor = skflow . monitors . ValidationMonitor ( X_val , y_val , n_classes = 3 ) <nl> + <nl> # classifier without early stopping - overfitting <nl> classifier1 = skflow . TensorFlowDNNClassifier ( hidden_units = [ 10 , 20 , 10 ] , <nl> - n_classes = 3 , steps = 1000 ) <nl> + n_classes = 3 , steps = 1000 ) <nl> classifier1 . fit ( X_train , y_train ) <nl> score1 = metrics . accuracy_score ( y_test , classifier1 . predict ( X_test ) ) <nl> <nl> # classifier with early stopping - improved accuracy on testing set <nl> classifier2 = skflow . TensorFlowDNNClassifier ( hidden_units = [ 10 , 20 , 10 ] , <nl> - n_classes = 3 , steps = 1000 , <nl> - early_stopping_rounds = 300 ) <nl> - classifier2 . fit ( X_train , y_train ) <nl> + n_classes = 3 , steps = 1000 ) <nl> + <nl> + classifier2 . fit ( X_train , y_train , val_monitor ) <nl> score2 = metrics . accuracy_score ( y_test , classifier2 . predict ( X_test ) ) <nl> <nl> # self . assertGreater ( score2 , score1 , " No improvement using early stopping . " ) <nl> mmm a / skflow / trainer . py <nl> ppp b / skflow / trainer . py <nl> <nl> <nl> from __future__ import division , print_function , absolute_import <nl> <nl> - import sys <nl> - import math <nl> from six . moves import xrange # pylint : disable = redefined - builtin <nl> - <nl> - import numpy as np <nl> import tensorflow as tf <nl> <nl> - <nl> OPTIMIZER_CLS_NAMES = { <nl> " SGD " : tf . train . GradientDescentOptimizer , <nl> " Adagrad " : tf . train . AdagradOptimizer , <nl> <nl> } <nl> <nl> <nl> - def _print_report ( print_loss_buffer , global_step , epoch ) : <nl> - " " " Prints report for given losses and global step . " " " <nl> - avg_loss = np . mean ( print_loss_buffer ) <nl> - if epoch : <nl> - print ( " Step # { step } , epoch # { epoch } , avg . loss : { loss : . 5f } " <nl> - . format ( step = global_step , loss = avg_loss , epoch = epoch ) ) <nl> - else : <nl> - print ( " Step # { step } , avg . loss : { loss : . 5f } " <nl> - . format ( step = global_step , loss = avg_loss ) ) <nl> - <nl> - <nl> class TensorFlowTrainer ( object ) : <nl> " " " General trainer class . <nl> <nl> def initialize ( self , sess ) : <nl> " " " <nl> return sess . run ( self . _initializers ) <nl> <nl> - def train ( self , sess , feed_dict_fn , steps , <nl> + def train ( self , sess , feed_dict_fn , steps , monitor , <nl> summary_writer = None , summaries = None , <nl> - print_steps = 0 , verbose = 1 , early_stopping_rounds = None , <nl> feed_params_fn = None ) : <nl> " " " Trains a model for given number of steps , given feed_dict function . <nl> <nl> Args : <nl> sess : Session object . <nl> feed_dict_fn : Function that will return a feed dictionary . <nl> - steps : Number of steps to run . <nl> summary_writer : SummaryWriter object to use for writing summaries . <nl> + steps : Number of steps to run . <nl> + monitor : Monitor object to track training progress and induce early stopping <nl> summaries : Joined object of all summaries that should be ran . <nl> - print_steps : Number of steps in between printing cost . <nl> - verbose : Controls the verbosity . If set to 0 , the algorithm is muted . <nl> - early_stopping_rounds : Activates early stopping if this is not None . <nl> - Loss needs to decrease at least every every < early_stopping_rounds > <nl> - round ( s ) to continue training . ( default : None ) <nl> - feed_params_fn : params about data feeder state ( epoch , offset ) <nl> <nl> Returns : <nl> List of losses for each step . <nl> " " " <nl> - losses , print_loss_buffer = [ ] , [ ] <nl> - print_steps = ( print_steps if print_steps else <nl> - math . ceil ( float ( steps ) / 10 ) ) <nl> - <nl> - min_loss = float ( ' inf ' ) <nl> - min_loss_i = 0 <nl> - if early_stopping_rounds is not None : <nl> - sys . stderr . write ( " Performing early stopping . \ n " ) <nl> - <nl> for step in xrange ( steps ) : <nl> feed_dict = feed_dict_fn ( ) <nl> if summaries : <nl> def train ( self , sess , feed_dict_fn , steps , <nl> global_step , loss , _ = sess . run ( <nl> [ self . global_step , self . loss , self . trainer ] , <nl> feed_dict = feed_dict ) <nl> - <nl> - if early_stopping_rounds is not None : <nl> - if loss < min_loss : <nl> - min_loss = loss <nl> - min_loss_i = step <nl> - elif step - min_loss_i > = early_stopping_rounds : <nl> - sys . stderr . write ( " Stopping . Best step : \ n \ <nl> - step { } with loss { } \ n " . format ( min_loss_i , <nl> - min_loss ) ) <nl> - break <nl> - <nl> - losses . append ( loss ) <nl> - print_loss_buffer . append ( loss ) <nl> + monitor . update ( step , global_step , loss , sess , <nl> + feed_params_fn , loss_expression_tensor = self . loss ) <nl> if summaries and summary_writer and summ is not None : <nl> summary_writer . add_summary ( summ , global_step ) <nl> - if verbose > 0 : <nl> - if step % print_steps = = 0 : <nl> - if feed_params_fn : <nl> - feed_params = feed_params_fn ( ) <nl> - epoch = feed_params [ ' epoch ' ] if ' epoch ' in feed_params else None <nl> - _print_report ( print_loss_buffer , global_step , epoch ) <nl> - print_loss_buffer = [ ] <nl> - <nl> - return losses <nl> + if monitor . monitor_inducing_stop ( ) : <nl> + break <nl> + return <nl> <nl> <nl> class RestoredTrainer ( TensorFlowTrainer ) : <nl> def __init__ ( self , loss , global_step , trainer ) : <nl> self . global_step = global_step <nl> self . loss = loss <nl> self . trainer = trainer <nl> - <nl>
Merge pull request from dansbecker / validation - early - stopping
tensorflow/tensorflow
a28882f7fc50a1d59f0bd227ff5d2d7ddc42b451
2016-02-24T18:26:10Z
mmm a / test / functional / test_framework / test_framework . py <nl> ppp b / test / functional / test_framework / test_framework . py <nl> <nl> disconnect_nodes , <nl> get_datadir_path , <nl> initialize_datadir , <nl> - p2p_port , <nl> sync_blocks , <nl> sync_mempools , <nl> ) <nl> def _start_logging ( self ) : <nl> def _initialize_chain ( self ) : <nl> " " " Initialize a pre - mined blockchain for use by the test . <nl> <nl> - Create a cache of a 199 - block - long chain ( with wallet ) for MAX_NODES <nl> + Create a cache of a 199 - block - long chain <nl> Afterward , create num_nodes copies from the cache . " " " <nl> <nl> + CACHE_NODE_ID = 0 # Use node 0 to create the cache for all other nodes <nl> + cache_node_dir = get_datadir_path ( self . options . cachedir , CACHE_NODE_ID ) <nl> assert self . num_nodes < = MAX_NODES <nl> - create_cache = False <nl> - for i in range ( MAX_NODES ) : <nl> - if not os . path . isdir ( get_datadir_path ( self . options . cachedir , i ) ) : <nl> - create_cache = True <nl> - break <nl> - <nl> - if create_cache : <nl> - self . log . debug ( " Creating data directories from cached datadir " ) <nl> - <nl> - # find and delete old cache directories if any exist <nl> - for i in range ( MAX_NODES ) : <nl> - if os . path . isdir ( get_datadir_path ( self . options . cachedir , i ) ) : <nl> - shutil . rmtree ( get_datadir_path ( self . options . cachedir , i ) ) <nl> - <nl> - # Create cache directories , run bitcoinds : <nl> - for i in range ( MAX_NODES ) : <nl> - datadir = initialize_datadir ( self . options . cachedir , i ) <nl> - args = [ self . options . bitcoind , " - datadir = " + datadir , ' - disablewallet ' ] <nl> - if i > 0 : <nl> - args . append ( " - connect = 127 . 0 . 0 . 1 : " + str ( p2p_port ( 0 ) ) ) <nl> - self . nodes . append ( TestNode ( <nl> - i , <nl> - get_datadir_path ( self . options . cachedir , i ) , <nl> + <nl> + if not os . path . isdir ( cache_node_dir ) : <nl> + self . log . debug ( " Creating cache directory { } " . format ( cache_node_dir ) ) <nl> + <nl> + initialize_datadir ( self . options . cachedir , CACHE_NODE_ID ) <nl> + self . nodes . append ( <nl> + TestNode ( <nl> + CACHE_NODE_ID , <nl> + cache_node_dir , <nl> extra_conf = [ " bind = 127 . 0 . 0 . 1 " ] , <nl> - extra_args = [ ] , <nl> + extra_args = [ ' - disablewallet ' ] , <nl> rpchost = None , <nl> timewait = self . rpc_timeout , <nl> bitcoind = self . options . bitcoind , <nl> def _initialize_chain ( self ) : <nl> coverage_dir = None , <nl> cwd = self . options . tmpdir , <nl> ) ) <nl> - self . nodes [ i ] . args = args <nl> - self . start_node ( i ) <nl> + self . start_node ( CACHE_NODE_ID ) <nl> <nl> # Wait for RPC connections to be ready <nl> - for node in self . nodes : <nl> - node . wait_for_rpc_connection ( ) <nl> + self . nodes [ CACHE_NODE_ID ] . wait_for_rpc_connection ( ) <nl> <nl> # Create a 199 - block - long chain ; each of the 4 first nodes <nl> # gets 25 mature blocks and 25 immature . <nl> def _initialize_chain ( self ) : <nl> # This is needed so that we are out of IBD when the test starts , <nl> # see the tip age check in IsInitialBlockDownload ( ) . <nl> for i in range ( 8 ) : <nl> - self . nodes [ 0 ] . generatetoaddress ( 25 if i ! = 7 else 24 , self . nodes [ i % 4 ] . get_deterministic_priv_key ( ) . address ) <nl> - self . sync_blocks ( ) <nl> + self . nodes [ CACHE_NODE_ID ] . generatetoaddress ( <nl> + nblocks = 25 if i ! = 7 else 24 , <nl> + address = TestNode . PRIV_KEYS [ i % 4 ] . address , <nl> + ) <nl> <nl> - for n in self . nodes : <nl> - assert_equal ( n . getblockchaininfo ( ) [ " blocks " ] , 199 ) <nl> + assert_equal ( self . nodes [ CACHE_NODE_ID ] . getblockchaininfo ( ) [ " blocks " ] , 199 ) <nl> <nl> - # Shut them down , and clean up cache directories : <nl> + # Shut it down , and clean up cache directories : <nl> self . stop_nodes ( ) <nl> self . nodes = [ ] <nl> <nl> - def cache_path ( n , * paths ) : <nl> - return os . path . join ( get_datadir_path ( self . options . cachedir , n ) , " regtest " , * paths ) <nl> + def cache_path ( * paths ) : <nl> + return os . path . join ( cache_node_dir , " regtest " , * paths ) <nl> <nl> - for i in range ( MAX_NODES ) : <nl> - os . rmdir ( cache_path ( i , ' wallets ' ) ) # Remove empty wallets dir <nl> - for entry in os . listdir ( cache_path ( i ) ) : <nl> - if entry not in [ ' chainstate ' , ' blocks ' ] : <nl> - os . remove ( cache_path ( i , entry ) ) <nl> + os . rmdir ( cache_path ( ' wallets ' ) ) # Remove empty wallets dir <nl> + for entry in os . listdir ( cache_path ( ) ) : <nl> + if entry not in [ ' chainstate ' , ' blocks ' ] : # Only keep chainstate and blocks folder <nl> + os . remove ( cache_path ( entry ) ) <nl> <nl> for i in range ( self . num_nodes ) : <nl> - from_dir = get_datadir_path ( self . options . cachedir , i ) <nl> + self . log . debug ( " Copy cache directory { } to node { } " . format ( cache_node_dir , i ) ) <nl> to_dir = get_datadir_path ( self . options . tmpdir , i ) <nl> - shutil . copytree ( from_dir , to_dir ) <nl> + shutil . copytree ( cache_node_dir , to_dir ) <nl> initialize_datadir ( self . options . tmpdir , i ) # Overwrite port / rpcport in bitcoin . conf <nl> <nl> def _initialize_chain_clean ( self ) : <nl> mmm a / test / functional / test_framework / test_node . py <nl> ppp b / test / functional / test_framework / test_node . py <nl> def __init__ ( self , i , datadir , * , rpchost , timewait , bitcoind , bitcoin_cli , cove <nl> <nl> self . p2ps = [ ] <nl> <nl> - def get_deterministic_priv_key ( self ) : <nl> - " " " Return a deterministic priv key in base58 , that only depends on the node ' s index " " " <nl> - AddressKeyPair = collections . namedtuple ( ' AddressKeyPair ' , [ ' address ' , ' key ' ] ) <nl> - PRIV_KEYS = [ <nl> + AddressKeyPair = collections . namedtuple ( ' AddressKeyPair ' , [ ' address ' , ' key ' ] ) <nl> + PRIV_KEYS = [ <nl> # address , privkey <nl> AddressKeyPair ( ' mjTkW3DjgyZck4KbiRusZsqTgaYTxdSz6z ' , ' cVpF924EspNh8KjYsfhgY96mmxvT6DgdWiTYMtMjuM74hJaU5psW ' ) , <nl> AddressKeyPair ( ' msX6jQXvxiNhx3Q62PKeLPrhrqZQdSimTg ' , ' cUxsWyKyZ9MAQTaAhUQWJmBbSvHMwSmuv59KgxQV7oZQU3PXN3KE ' ) , <nl> def get_deterministic_priv_key ( self ) : <nl> AddressKeyPair ( ' mq4fBNdckGtvY2mijd9am7DRsbRB4KjUkf ' , ' cN55daf1HotwBAgAKWVgDcoppmUNDtQSfb7XLutTLeAgVc3u8hik ' ) , <nl> AddressKeyPair ( ' mpFAHDjX7KregM3rVotdXzQmkbwtbQEnZ6 ' , ' cT7qK7g1wkYEMvKowd2ZrX1E5f6JQ7TM246UfqbCiyF7kZhorpX3 ' ) , <nl> AddressKeyPair ( ' mzRe8QZMfGi58KyWCse2exxEFry2sfF2Y7 ' , ' cPiRWE8KMjTRxH1MWkPerhfoHFn5iHPWVK5aPqjW8NxmdwenFinJ ' ) , <nl> - ] <nl> - assert len ( PRIV_KEYS ) = = MAX_NODES <nl> - return PRIV_KEYS [ self . index ] <nl> + ] <nl> + <nl> + def get_deterministic_priv_key ( self ) : <nl> + " " " Return a deterministic priv key in base58 , that only depends on the node ' s index " " " <nl> + assert len ( self . PRIV_KEYS ) = = MAX_NODES <nl> + return self . PRIV_KEYS [ self . index ] <nl> <nl> def get_mem_rss_kilobytes ( self ) : <nl> " " " Get the memory usage ( RSS ) per ` ps ` . <nl>
test : Speed up cache creation
bitcoin/bitcoin
fa473303972b7dad600d949dc9b303d8136cb7e7
2019-05-17T17:21:48Z
new file mode 100644 <nl> index 00000000000 . . 46be8bfaa3c <nl> mmm / dev / null <nl> ppp b / examples / cpp / metadata / Makefile <nl> <nl> + # <nl> + # Copyright 2018 gRPC authors . <nl> + # <nl> + # Licensed under the Apache License , Version 2 . 0 ( the " License " ) ; <nl> + # you may not use this file except in compliance with the License . <nl> + # You may obtain a copy of the License at <nl> + # <nl> + # http : / / www . apache . org / licenses / LICENSE - 2 . 0 <nl> + # <nl> + # Unless required by applicable law or agreed to in writing , software <nl> + # distributed under the License is distributed on an " AS IS " BASIS , <nl> + # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied . <nl> + # See the License for the specific language governing permissions and <nl> + # limitations under the License . <nl> + # <nl> + HOST_SYSTEM = $ ( shell uname | cut - f 1 - d_ ) <nl> + SYSTEM ? = $ ( HOST_SYSTEM ) <nl> + CXX = g + + <nl> + CPPFLAGS + = ` pkg - config - - cflags protobuf grpc ` <nl> + CXXFLAGS + = - std = c + + 11 <nl> + ifeq ( $ ( SYSTEM ) , Darwin ) <nl> + LDFLAGS + = - L / usr / local / lib ` pkg - config - - libs protobuf grpc + + grpc ` \ <nl> + - lgrpc + + _reflection \ <nl> + - ldl <nl> + else <nl> + LDFLAGS + = - L / usr / local / lib ` pkg - config - - libs protobuf grpc + + grpc ` \ <nl> + - Wl , - - no - as - needed - lgrpc + + _reflection - Wl , - - as - needed \ <nl> + - ldl <nl> + endif <nl> + PROTOC = protoc <nl> + GRPC_CPP_PLUGIN = grpc_cpp_plugin <nl> + GRPC_CPP_PLUGIN_PATH ? = ` which $ ( GRPC_CPP_PLUGIN ) ` <nl> + PROTOS_PATH = . . / . . / protos <nl> + vpath % . proto $ ( PROTOS_PATH ) <nl> + all : system - check greeter_client greeter_server <nl> + greeter_client : helloworld . pb . o helloworld . grpc . pb . o greeter_client . o <nl> + $ ( CXX ) $ ^ $ ( LDFLAGS ) - o $ @ <nl> + greeter_server : helloworld . pb . o helloworld . grpc . pb . o greeter_server . o <nl> + $ ( CXX ) $ ^ $ ( LDFLAGS ) - o $ @ <nl> + . PRECIOUS : % . grpc . pb . cc <nl> + % . grpc . pb . cc : % . proto <nl> + $ ( PROTOC ) - I $ ( PROTOS_PATH ) - - grpc_out = . - - plugin = protoc - gen - grpc = $ ( GRPC_CPP_PLUGIN_PATH ) $ < <nl> + . PRECIOUS : % . pb . cc <nl> + % . pb . cc : % . proto <nl> + $ ( PROTOC ) - I $ ( PROTOS_PATH ) - - cpp_out = . $ < <nl> + clean : <nl> + rm - f * . o * . pb . cc * . pb . h greeter_client greeter_server <nl> + # The following is to test your system and ensure a smoother experience . <nl> + # They are by no means necessary to actually compile a grpc - enabled software . <nl> + PROTOC_CMD = which $ ( PROTOC ) <nl> + PROTOC_CHECK_CMD = $ ( PROTOC ) - - version | grep - q libprotoc . 3 <nl> + PLUGIN_CHECK_CMD = which $ ( GRPC_CPP_PLUGIN ) <nl> + HAS_PROTOC = $ ( shell $ ( PROTOC_CMD ) > / dev / null & & echo true | | echo false ) <nl> + ifeq ( $ ( HAS_PROTOC ) , true ) <nl> + HAS_VALID_PROTOC = $ ( shell $ ( PROTOC_CHECK_CMD ) 2 > / dev / null & & echo true | | echo false ) <nl> + endif <nl> + HAS_PLUGIN = $ ( shell $ ( PLUGIN_CHECK_CMD ) > / dev / null & & echo true | | echo false ) <nl> + SYSTEM_OK = false <nl> + ifeq ( $ ( HAS_VALID_PROTOC ) , true ) <nl> + ifeq ( $ ( HAS_PLUGIN ) , true ) <nl> + SYSTEM_OK = true <nl> + endif <nl> + endif <nl> + system - check : <nl> + ifneq ( $ ( HAS_VALID_PROTOC ) , true ) <nl> + @ echo " DEPENDENCY ERROR " <nl> + @ echo <nl> + @ echo " You don ' t have protoc 3 . 0 . 0 installed in your path . " <nl> + @ echo " Please install Google protocol buffers 3 . 0 . 0 and its compiler . " <nl> + @ echo " You can find it here : " <nl> + @ echo <nl> + @ echo " https : / / github . com / google / protobuf / releases / tag / v3 . 0 . 0 " <nl> + @ echo <nl> + @ echo " Here is what I get when trying to evaluate your version of protoc : " <nl> + @ echo <nl> + - $ ( PROTOC ) - - version <nl> + @ echo <nl> + @ echo <nl> + endif <nl> + ifneq ( $ ( HAS_PLUGIN ) , true ) <nl> + @ echo " DEPENDENCY ERROR " <nl> + @ echo <nl> + @ echo " You don ' t have the grpc c + + protobuf plugin installed in your path . " <nl> + @ echo " Please install grpc . You can find it here : " <nl> + @ echo <nl> + @ echo " https : / / github . com / grpc / grpc " <nl> + @ echo <nl> + @ echo " Here is what I get when trying to detect if you have the plugin : " <nl> + @ echo <nl> + - which $ ( GRPC_CPP_PLUGIN ) <nl> + @ echo <nl> + @ echo <nl> + endif <nl> + ifneq ( $ ( SYSTEM_OK ) , true ) <nl> + @ false <nl> + endif <nl> new file mode 100644 <nl> index 00000000000 . . 7b33074ba1e <nl> mmm / dev / null <nl> ppp b / examples / cpp / metadata / README . md <nl> <nl> + # Metadata Example <nl> + <nl> + # # Overview <nl> + <nl> + This example shows you how to add custom headers on the client and server and <nl> + how to access them . <nl> + <nl> + Custom metadata must follow the " Custom - Metadata " format listed in <nl> + https : / / github . com / grpc / grpc / blob / master / doc / PROTOCOL - HTTP2 . md , with the <nl> + exception of binary headers , which don ' t have to be base64 encoded . <nl> + <nl> + # # # Get the tutorial source code <nl> + The example code for this and our other examples lives in the ` examples ` directory . Clone this repository to your local machine by running the following command : <nl> + ` ` ` sh <nl> + $ git clone - b $ ( curl - L https : / / grpc . io / release ) https : / / github . com / grpc / grpc <nl> + ` ` ` <nl> + Change your current directory to examples / cpp / metadata <nl> + ` ` ` sh <nl> + $ cd examples / cpp / metadata <nl> + ` ` ` <nl> + <nl> + # # # Generating gRPC code <nl> + To generate the client and server side interfaces : <nl> + ` ` ` sh <nl> + $ make helloworld . grpc . pb . cc helloworld . pb . cc <nl> + ` ` ` <nl> + Which internally invokes the proto - compiler as : <nl> + ` ` ` sh <nl> + $ protoc - I . . / . . / protos / - - grpc_out = . - - plugin = protoc - gen - grpc = grpc_cpp_plugin . . / . . / protos / helloworld . proto <nl> + $ protoc - I . . / . . / protos / - - cpp_out = . . . / . . / protos / helloworld . proto <nl> + ` ` ` <nl> + # # # Try it ! <nl> + Build client and server : <nl> + <nl> + ` ` ` sh <nl> + $ make <nl> + ` ` ` <nl> + <nl> + Run the server , which will listen on port 50051 : <nl> + <nl> + ` ` ` sh <nl> + $ . / greeter_server <nl> + ` ` ` <nl> + <nl> + Run the client ( in a different terminal ) : <nl> + <nl> + ` ` ` sh <nl> + $ . / greeter_client <nl> + ` ` ` <nl> + <nl> + If things go smoothly , you will see in the client terminal : <nl> + <nl> + " Client received initial metadata from server : initial metadata value " <nl> + " Client received trailing metadata from server : trailing metadata value " <nl> + " Client received message : Hello World " <nl> + <nl> + <nl> + And in the server terminal : <nl> + <nl> + " Header key : custom - bin , value :      " <nl> + " Header key : custom - header , value : Custom Value " <nl> + " Header key : user - agent , value : grpc - c + + / 1 . 16 . 0 - dev grpc - c / 6 . 0 . 0 - dev ( linux ; chttp2 ; gao ) " <nl> + <nl> + Note that the value for custom - bin doesn ' t print nicely because it ' s a binary <nl> + value . You can indicate a binary value through appending " - bin " to the header key . <nl> + <nl> + We did not add the user - agent metadata as a custom header . This shows how <nl> + the gRPC framework adds some headers under the hood that may show up in the <nl> + metadata map . <nl> new file mode 100755 <nl> index 00000000000 . . 929a51c3a5b <nl> Binary files / dev / null and b / examples / cpp / metadata / greeter_client differ <nl> new file mode 100644 <nl> index 00000000000 . . 80494389937 <nl> mmm / dev / null <nl> ppp b / examples / cpp / metadata / greeter_client . cc <nl> <nl> + / * <nl> + * <nl> + * Copyright 2015 gRPC authors . <nl> + * <nl> + * Licensed under the Apache License , Version 2 . 0 ( the " License " ) ; <nl> + * you may not use this file except in compliance with the License . <nl> + * You may obtain a copy of the License at <nl> + * <nl> + * http : / / www . apache . org / licenses / LICENSE - 2 . 0 <nl> + * <nl> + * Unless required by applicable law or agreed to in writing , software <nl> + * distributed under the License is distributed on an " AS IS " BASIS , <nl> + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied . <nl> + * See the License for the specific language governing permissions and <nl> + * limitations under the License . <nl> + * <nl> + * / <nl> + <nl> + # include < iostream > <nl> + # include < memory > <nl> + # include < string > <nl> + <nl> + # include < grpcpp / grpcpp . h > <nl> + <nl> + # ifdef BAZEL_BUILD <nl> + # include " examples / protos / helloworld . grpc . pb . h " <nl> + # else <nl> + # include " helloworld . grpc . pb . h " <nl> + # endif <nl> + <nl> + using grpc : : Channel ; <nl> + using grpc : : ClientContext ; <nl> + using grpc : : Status ; <nl> + using helloworld : : HelloRequest ; <nl> + using helloworld : : HelloReply ; <nl> + using helloworld : : Greeter ; <nl> + <nl> + class CustomHeaderClient { <nl> + public : <nl> + CustomHeaderClient ( std : : shared_ptr < Channel > channel ) <nl> + : stub_ ( Greeter : : NewStub ( channel ) ) { } <nl> + <nl> + / / Assembles the client ' s payload , sends it and presents the response back <nl> + / / from the server . <nl> + std : : string SayHello ( const std : : string & user ) { <nl> + / / Data we are sending to the server . <nl> + HelloRequest request ; <nl> + request . set_name ( user ) ; <nl> + <nl> + / / Container for the data we expect from the server . <nl> + HelloReply reply ; <nl> + <nl> + / / Context for the client . It could be used to convey extra information to <nl> + / / the server and / or tweak certain RPC behaviors . <nl> + ClientContext context ; <nl> + <nl> + / / Setting custom metadata to be sent to the server <nl> + context . AddMetadata ( " custom - header " , " Custom Value " ) ; <nl> + <nl> + / / Setting custom binary metadata <nl> + char bytes [ 8 ] = { ' \ 0 ' , ' \ 1 ' , ' \ 2 ' , ' \ 3 ' , <nl> + ' \ 4 ' , ' \ 5 ' , ' \ 6 ' , ' \ 7 ' } ; <nl> + context . AddMetadata ( " custom - bin " , grpc : : string ( bytes , 8 ) ) ; <nl> + <nl> + / / The actual RPC . <nl> + Status status = stub_ - > SayHello ( & context , request , & reply ) ; <nl> + <nl> + / / Act upon its status . <nl> + if ( status . ok ( ) ) { <nl> + std : : cout < < " Client received initial metadata from server : " < < context . GetServerInitialMetadata ( ) . find ( " custom - server - metadata " ) - > second < < std : : endl ; <nl> + std : : cout < < " Client received trailing metadata from server : " < < context . GetServerTrailingMetadata ( ) . find ( " custom - trailing - metadata " ) - > second < < std : : endl ; <nl> + return reply . message ( ) ; <nl> + } else { <nl> + std : : cout < < status . error_code ( ) < < " : " < < status . error_message ( ) <nl> + < < std : : endl ; <nl> + return " RPC failed " ; <nl> + } <nl> + } <nl> + <nl> + private : <nl> + std : : unique_ptr < Greeter : : Stub > stub_ ; <nl> + } ; <nl> + <nl> + int main ( int argc , char * * argv ) { <nl> + / / Instantiate the client . It requires a channel , out of which the actual RPCs <nl> + / / are created . This channel models a connection to an endpoint ( in this case , <nl> + / / localhost at port 50051 ) . We indicate that the channel isn ' t authenticated <nl> + / / ( use of InsecureChannelCredentials ( ) ) . <nl> + CustomHeaderClient greeter ( grpc : : CreateChannel ( <nl> + " localhost : 50051 " , grpc : : InsecureChannelCredentials ( ) ) ) ; <nl> + std : : string user ( " world " ) ; <nl> + std : : string reply = greeter . SayHello ( user ) ; <nl> + std : : cout < < " Client received message : " < < reply < < std : : endl ; <nl> + return 0 ; <nl> + } <nl> new file mode 100644 <nl> index 00000000000 . . 483cb0741cf <nl> Binary files / dev / null and b / examples / cpp / metadata / greeter_client . o differ <nl> new file mode 100755 <nl> index 00000000000 . . 6b03f514753 <nl> Binary files / dev / null and b / examples / cpp / metadata / greeter_server differ <nl> new file mode 100644 <nl> index 00000000000 . . db2fecbcaf5 <nl> mmm / dev / null <nl> ppp b / examples / cpp / metadata / greeter_server . cc <nl> <nl> + / * <nl> + * <nl> + * Copyright 2015 gRPC authors . <nl> + * <nl> + * Licensed under the Apache License , Version 2 . 0 ( the " License " ) ; <nl> + * you may not use this file except in compliance with the License . <nl> + * You may obtain a copy of the License at <nl> + * <nl> + * http : / / www . apache . org / licenses / LICENSE - 2 . 0 <nl> + * <nl> + * Unless required by applicable law or agreed to in writing , software <nl> + * distributed under the License is distributed on an " AS IS " BASIS , <nl> + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied . <nl> + * See the License for the specific language governing permissions and <nl> + * limitations under the License . <nl> + * <nl> + * / <nl> + <nl> + # include < iostream > <nl> + # include < memory > <nl> + # include < string > <nl> + <nl> + # include < grpcpp / grpcpp . h > <nl> + <nl> + # ifdef BAZEL_BUILD <nl> + # include " examples / protos / helloworld . grpc . pb . h " <nl> + # else <nl> + # include " helloworld . grpc . pb . h " <nl> + # endif <nl> + <nl> + using grpc : : Server ; <nl> + using grpc : : ServerBuilder ; <nl> + using grpc : : ServerContext ; <nl> + using grpc : : Status ; <nl> + using helloworld : : HelloRequest ; <nl> + using helloworld : : HelloReply ; <nl> + using helloworld : : Greeter ; <nl> + <nl> + / / Logic and data behind the server ' s behavior . <nl> + class GreeterServiceImpl final : public Greeter : : Service { <nl> + Status SayHello ( ServerContext * context , const HelloRequest * request , <nl> + HelloReply * reply ) override { <nl> + std : : string prefix ( " Hello " ) ; <nl> + <nl> + / / Get the client ' s initial metadata <nl> + std : : cout < < " Client metadata : " < < std : : endl ; <nl> + const std : : multimap < grpc : : string_ref , grpc : : string_ref > metadata = context - > client_metadata ( ) ; <nl> + for ( auto iter = metadata . begin ( ) ; iter ! = metadata . end ( ) ; + + iter ) { <nl> + std : : cout < < " Header key : " < < iter - > first < < " , value : " < < iter - > second < < std : : endl ; <nl> + } <nl> + <nl> + context - > AddInitialMetadata ( " custom - server - metadata " , " initial metadata value " ) ; <nl> + context - > AddTrailingMetadata ( " custom - trailing - metadata " , " trailing metadata value " ) ; <nl> + reply - > set_message ( prefix + request - > name ( ) ) ; <nl> + return Status : : OK ; <nl> + } <nl> + } ; <nl> + <nl> + void RunServer ( ) { <nl> + std : : string server_address ( " 0 . 0 . 0 . 0 : 50051 " ) ; <nl> + GreeterServiceImpl service ; <nl> + <nl> + ServerBuilder builder ; <nl> + / / Listen on the given address without any authentication mechanism . <nl> + builder . AddListeningPort ( server_address , grpc : : InsecureServerCredentials ( ) ) ; <nl> + / / Register " service " as the instance through which we ' ll communicate with <nl> + / / clients . In this case it corresponds to an * synchronous * service . <nl> + builder . RegisterService ( & service ) ; <nl> + / / Finally assemble the server . <nl> + std : : unique_ptr < Server > server ( builder . BuildAndStart ( ) ) ; <nl> + std : : cout < < " Server listening on " < < server_address < < std : : endl ; <nl> + <nl> + / / Wait for the server to shutdown . Note that some other thread must be <nl> + / / responsible for shutting down the server for this call to ever return . <nl> + server - > Wait ( ) ; <nl> + } <nl> + <nl> + int main ( int argc , char * * argv ) { <nl> + RunServer ( ) ; <nl> + <nl> + return 0 ; <nl> + } <nl> new file mode 100644 <nl> index 00000000000 . . dc197b2d14c <nl> Binary files / dev / null and b / examples / cpp / metadata / greeter_server . o differ <nl> new file mode 100644 <nl> index 00000000000 . . 4255687148b <nl> mmm / dev / null <nl> ppp b / examples / cpp / metadata / helloworld . grpc . pb . cc <nl> <nl> + / / Generated by the gRPC C + + plugin . <nl> + / / If you make any local change , they will be lost . <nl> + / / source : helloworld . proto <nl> + <nl> + # include " helloworld . pb . h " <nl> + # include " helloworld . grpc . pb . h " <nl> + <nl> + # include < functional > <nl> + # include < grpcpp / impl / codegen / async_stream . h > <nl> + # include < grpcpp / impl / codegen / async_unary_call . h > <nl> + # include < grpcpp / impl / codegen / channel_interface . h > <nl> + # include < grpcpp / impl / codegen / client_unary_call . h > <nl> + # include < grpcpp / impl / codegen / client_callback . h > <nl> + # include < grpcpp / impl / codegen / method_handler_impl . h > <nl> + # include < grpcpp / impl / codegen / rpc_service_method . h > <nl> + # include < grpcpp / impl / codegen / service_type . h > <nl> + # include < grpcpp / impl / codegen / sync_stream . h > <nl> + namespace helloworld { <nl> + <nl> + static const char * Greeter_method_names [ ] = { <nl> + " / helloworld . Greeter / SayHello " , <nl> + } ; <nl> + <nl> + std : : unique_ptr < Greeter : : Stub > Greeter : : NewStub ( const std : : shared_ptr < : : grpc : : ChannelInterface > & channel , const : : grpc : : StubOptions & options ) { <nl> + ( void ) options ; <nl> + std : : unique_ptr < Greeter : : Stub > stub ( new Greeter : : Stub ( channel ) ) ; <nl> + return stub ; <nl> + } <nl> + <nl> + Greeter : : Stub : : Stub ( const std : : shared_ptr < : : grpc : : ChannelInterface > & channel ) <nl> + : channel_ ( channel ) , rpcmethod_SayHello_ ( Greeter_method_names [ 0 ] , : : grpc : : internal : : RpcMethod : : NORMAL_RPC , channel ) <nl> + { } <nl> + <nl> + : : grpc : : Status Greeter : : Stub : : SayHello ( : : grpc : : ClientContext * context , const : : helloworld : : HelloRequest & request , : : helloworld : : HelloReply * response ) { <nl> + return : : grpc : : internal : : BlockingUnaryCall ( channel_ . get ( ) , rpcmethod_SayHello_ , context , request , response ) ; <nl> + } <nl> + <nl> + void Greeter : : Stub : : experimental_async : : SayHello ( : : grpc : : ClientContext * context , const : : helloworld : : HelloRequest * request , : : helloworld : : HelloReply * response , std : : function < void ( : : grpc : : Status ) > f ) { <nl> + return : : grpc : : internal : : CallbackUnaryCall ( stub_ - > channel_ . get ( ) , stub_ - > rpcmethod_SayHello_ , context , request , response , std : : move ( f ) ) ; <nl> + } <nl> + <nl> + : : grpc : : ClientAsyncResponseReader < : : helloworld : : HelloReply > * Greeter : : Stub : : AsyncSayHelloRaw ( : : grpc : : ClientContext * context , const : : helloworld : : HelloRequest & request , : : grpc : : CompletionQueue * cq ) { <nl> + return : : grpc : : internal : : ClientAsyncResponseReaderFactory < : : helloworld : : HelloReply > : : Create ( channel_ . get ( ) , cq , rpcmethod_SayHello_ , context , request , true ) ; <nl> + } <nl> + <nl> + : : grpc : : ClientAsyncResponseReader < : : helloworld : : HelloReply > * Greeter : : Stub : : PrepareAsyncSayHelloRaw ( : : grpc : : ClientContext * context , const : : helloworld : : HelloRequest & request , : : grpc : : CompletionQueue * cq ) { <nl> + return : : grpc : : internal : : ClientAsyncResponseReaderFactory < : : helloworld : : HelloReply > : : Create ( channel_ . get ( ) , cq , rpcmethod_SayHello_ , context , request , false ) ; <nl> + } <nl> + <nl> + Greeter : : Service : : Service ( ) { <nl> + AddMethod ( new : : grpc : : internal : : RpcServiceMethod ( <nl> + Greeter_method_names [ 0 ] , <nl> + : : grpc : : internal : : RpcMethod : : NORMAL_RPC , <nl> + new : : grpc : : internal : : RpcMethodHandler < Greeter : : Service , : : helloworld : : HelloRequest , : : helloworld : : HelloReply > ( <nl> + std : : mem_fn ( & Greeter : : Service : : SayHello ) , this ) ) ) ; <nl> + } <nl> + <nl> + Greeter : : Service : : ~ Service ( ) { <nl> + } <nl> + <nl> + : : grpc : : Status Greeter : : Service : : SayHello ( : : grpc : : ServerContext * context , const : : helloworld : : HelloRequest * request , : : helloworld : : HelloReply * response ) { <nl> + ( void ) context ; <nl> + ( void ) request ; <nl> + ( void ) response ; <nl> + return : : grpc : : Status ( : : grpc : : StatusCode : : UNIMPLEMENTED , " " ) ; <nl> + } <nl> + <nl> + <nl> + } / / namespace helloworld <nl> + <nl> new file mode 100644 <nl> index 00000000000 . . 73cc75e0a62 <nl> mmm / dev / null <nl> ppp b / examples / cpp / metadata / helloworld . grpc . pb . h <nl> <nl> + / / Generated by the gRPC C + + plugin . <nl> + / / If you make any local change , they will be lost . <nl> + / / source : helloworld . proto <nl> + / / Original file comments : <nl> + / / Copyright 2015 gRPC authors . <nl> + / / <nl> + / / Licensed under the Apache License , Version 2 . 0 ( the " License " ) ; <nl> + / / you may not use this file except in compliance with the License . <nl> + / / You may obtain a copy of the License at <nl> + / / <nl> + / / http : / / www . apache . org / licenses / LICENSE - 2 . 0 <nl> + / / <nl> + / / Unless required by applicable law or agreed to in writing , software <nl> + / / distributed under the License is distributed on an " AS IS " BASIS , <nl> + / / WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied . <nl> + / / See the License for the specific language governing permissions and <nl> + / / limitations under the License . <nl> + / / <nl> + # ifndef GRPC_helloworld_2eproto__INCLUDED <nl> + # define GRPC_helloworld_2eproto__INCLUDED <nl> + <nl> + # include " helloworld . pb . h " <nl> + <nl> + # include < functional > <nl> + # include < grpcpp / impl / codegen / async_generic_service . h > <nl> + # include < grpcpp / impl / codegen / async_stream . h > <nl> + # include < grpcpp / impl / codegen / async_unary_call . h > <nl> + # include < grpcpp / impl / codegen / method_handler_impl . h > <nl> + # include < grpcpp / impl / codegen / proto_utils . h > <nl> + # include < grpcpp / impl / codegen / rpc_method . h > <nl> + # include < grpcpp / impl / codegen / service_type . h > <nl> + # include < grpcpp / impl / codegen / status . h > <nl> + # include < grpcpp / impl / codegen / stub_options . h > <nl> + # include < grpcpp / impl / codegen / sync_stream . h > <nl> + <nl> + namespace grpc { <nl> + class CompletionQueue ; <nl> + class Channel ; <nl> + class ServerCompletionQueue ; <nl> + class ServerContext ; <nl> + } / / namespace grpc <nl> + <nl> + namespace helloworld { <nl> + <nl> + / / The greeting service definition . <nl> + class Greeter final { <nl> + public : <nl> + static constexpr char const * service_full_name ( ) { <nl> + return " helloworld . Greeter " ; <nl> + } <nl> + class StubInterface { <nl> + public : <nl> + virtual ~ StubInterface ( ) { } <nl> + / / Sends a greeting <nl> + virtual : : grpc : : Status SayHello ( : : grpc : : ClientContext * context , const : : helloworld : : HelloRequest & request , : : helloworld : : HelloReply * response ) = 0 ; <nl> + std : : unique_ptr < : : grpc : : ClientAsyncResponseReaderInterface < : : helloworld : : HelloReply > > AsyncSayHello ( : : grpc : : ClientContext * context , const : : helloworld : : HelloRequest & request , : : grpc : : CompletionQueue * cq ) { <nl> + return std : : unique_ptr < : : grpc : : ClientAsyncResponseReaderInterface < : : helloworld : : HelloReply > > ( AsyncSayHelloRaw ( context , request , cq ) ) ; <nl> + } <nl> + std : : unique_ptr < : : grpc : : ClientAsyncResponseReaderInterface < : : helloworld : : HelloReply > > PrepareAsyncSayHello ( : : grpc : : ClientContext * context , const : : helloworld : : HelloRequest & request , : : grpc : : CompletionQueue * cq ) { <nl> + return std : : unique_ptr < : : grpc : : ClientAsyncResponseReaderInterface < : : helloworld : : HelloReply > > ( PrepareAsyncSayHelloRaw ( context , request , cq ) ) ; <nl> + } <nl> + class experimental_async_interface { <nl> + public : <nl> + virtual ~ experimental_async_interface ( ) { } <nl> + / / Sends a greeting <nl> + virtual void SayHello ( : : grpc : : ClientContext * context , const : : helloworld : : HelloRequest * request , : : helloworld : : HelloReply * response , std : : function < void ( : : grpc : : Status ) > ) = 0 ; <nl> + } ; <nl> + virtual class experimental_async_interface * experimental_async ( ) { return nullptr ; } <nl> + private : <nl> + virtual : : grpc : : ClientAsyncResponseReaderInterface < : : helloworld : : HelloReply > * AsyncSayHelloRaw ( : : grpc : : ClientContext * context , const : : helloworld : : HelloRequest & request , : : grpc : : CompletionQueue * cq ) = 0 ; <nl> + virtual : : grpc : : ClientAsyncResponseReaderInterface < : : helloworld : : HelloReply > * PrepareAsyncSayHelloRaw ( : : grpc : : ClientContext * context , const : : helloworld : : HelloRequest & request , : : grpc : : CompletionQueue * cq ) = 0 ; <nl> + } ; <nl> + class Stub final : public StubInterface { <nl> + public : <nl> + Stub ( const std : : shared_ptr < : : grpc : : ChannelInterface > & channel ) ; <nl> + : : grpc : : Status SayHello ( : : grpc : : ClientContext * context , const : : helloworld : : HelloRequest & request , : : helloworld : : HelloReply * response ) override ; <nl> + std : : unique_ptr < : : grpc : : ClientAsyncResponseReader < : : helloworld : : HelloReply > > AsyncSayHello ( : : grpc : : ClientContext * context , const : : helloworld : : HelloRequest & request , : : grpc : : CompletionQueue * cq ) { <nl> + return std : : unique_ptr < : : grpc : : ClientAsyncResponseReader < : : helloworld : : HelloReply > > ( AsyncSayHelloRaw ( context , request , cq ) ) ; <nl> + } <nl> + std : : unique_ptr < : : grpc : : ClientAsyncResponseReader < : : helloworld : : HelloReply > > PrepareAsyncSayHello ( : : grpc : : ClientContext * context , const : : helloworld : : HelloRequest & request , : : grpc : : CompletionQueue * cq ) { <nl> + return std : : unique_ptr < : : grpc : : ClientAsyncResponseReader < : : helloworld : : HelloReply > > ( PrepareAsyncSayHelloRaw ( context , request , cq ) ) ; <nl> + } <nl> + class experimental_async final : <nl> + public StubInterface : : experimental_async_interface { <nl> + public : <nl> + void SayHello ( : : grpc : : ClientContext * context , const : : helloworld : : HelloRequest * request , : : helloworld : : HelloReply * response , std : : function < void ( : : grpc : : Status ) > ) override ; <nl> + private : <nl> + friend class Stub ; <nl> + explicit experimental_async ( Stub * stub ) : stub_ ( stub ) { } <nl> + Stub * stub ( ) { return stub_ ; } <nl> + Stub * stub_ ; <nl> + } ; <nl> + class experimental_async_interface * experimental_async ( ) override { return & async_stub_ ; } <nl> + <nl> + private : <nl> + std : : shared_ptr < : : grpc : : ChannelInterface > channel_ ; <nl> + class experimental_async async_stub_ { this } ; <nl> + : : grpc : : ClientAsyncResponseReader < : : helloworld : : HelloReply > * AsyncSayHelloRaw ( : : grpc : : ClientContext * context , const : : helloworld : : HelloRequest & request , : : grpc : : CompletionQueue * cq ) override ; <nl> + : : grpc : : ClientAsyncResponseReader < : : helloworld : : HelloReply > * PrepareAsyncSayHelloRaw ( : : grpc : : ClientContext * context , const : : helloworld : : HelloRequest & request , : : grpc : : CompletionQueue * cq ) override ; <nl> + const : : grpc : : internal : : RpcMethod rpcmethod_SayHello_ ; <nl> + } ; <nl> + static std : : unique_ptr < Stub > NewStub ( const std : : shared_ptr < : : grpc : : ChannelInterface > & channel , const : : grpc : : StubOptions & options = : : grpc : : StubOptions ( ) ) ; <nl> + <nl> + class Service : public : : grpc : : Service { <nl> + public : <nl> + Service ( ) ; <nl> + virtual ~ Service ( ) ; <nl> + / / Sends a greeting <nl> + virtual : : grpc : : Status SayHello ( : : grpc : : ServerContext * context , const : : helloworld : : HelloRequest * request , : : helloworld : : HelloReply * response ) ; <nl> + } ; <nl> + template < class BaseClass > <nl> + class WithAsyncMethod_SayHello : public BaseClass { <nl> + private : <nl> + void BaseClassMustBeDerivedFromService ( const Service * service ) { } <nl> + public : <nl> + WithAsyncMethod_SayHello ( ) { <nl> + : : grpc : : Service : : MarkMethodAsync ( 0 ) ; <nl> + } <nl> + ~ WithAsyncMethod_SayHello ( ) override { <nl> + BaseClassMustBeDerivedFromService ( this ) ; <nl> + } <nl> + / / disable synchronous version of this method <nl> + : : grpc : : Status SayHello ( : : grpc : : ServerContext * context , const : : helloworld : : HelloRequest * request , : : helloworld : : HelloReply * response ) override { <nl> + abort ( ) ; <nl> + return : : grpc : : Status ( : : grpc : : StatusCode : : UNIMPLEMENTED , " " ) ; <nl> + } <nl> + void RequestSayHello ( : : grpc : : ServerContext * context , : : helloworld : : HelloRequest * request , : : grpc : : ServerAsyncResponseWriter < : : helloworld : : HelloReply > * response , : : grpc : : CompletionQueue * new_call_cq , : : grpc : : ServerCompletionQueue * notification_cq , void * tag ) { <nl> + : : grpc : : Service : : RequestAsyncUnary ( 0 , context , request , response , new_call_cq , notification_cq , tag ) ; <nl> + } <nl> + } ; <nl> + typedef WithAsyncMethod_SayHello < Service > AsyncService ; <nl> + template < class BaseClass > <nl> + class WithGenericMethod_SayHello : public BaseClass { <nl> + private : <nl> + void BaseClassMustBeDerivedFromService ( const Service * service ) { } <nl> + public : <nl> + WithGenericMethod_SayHello ( ) { <nl> + : : grpc : : Service : : MarkMethodGeneric ( 0 ) ; <nl> + } <nl> + ~ WithGenericMethod_SayHello ( ) override { <nl> + BaseClassMustBeDerivedFromService ( this ) ; <nl> + } <nl> + / / disable synchronous version of this method <nl> + : : grpc : : Status SayHello ( : : grpc : : ServerContext * context , const : : helloworld : : HelloRequest * request , : : helloworld : : HelloReply * response ) override { <nl> + abort ( ) ; <nl> + return : : grpc : : Status ( : : grpc : : StatusCode : : UNIMPLEMENTED , " " ) ; <nl> + } <nl> + } ; <nl> + template < class BaseClass > <nl> + class WithRawMethod_SayHello : public BaseClass { <nl> + private : <nl> + void BaseClassMustBeDerivedFromService ( const Service * service ) { } <nl> + public : <nl> + WithRawMethod_SayHello ( ) { <nl> + : : grpc : : Service : : MarkMethodRaw ( 0 ) ; <nl> + } <nl> + ~ WithRawMethod_SayHello ( ) override { <nl> + BaseClassMustBeDerivedFromService ( this ) ; <nl> + } <nl> + / / disable synchronous version of this method <nl> + : : grpc : : Status SayHello ( : : grpc : : ServerContext * context , const : : helloworld : : HelloRequest * request , : : helloworld : : HelloReply * response ) override { <nl> + abort ( ) ; <nl> + return : : grpc : : Status ( : : grpc : : StatusCode : : UNIMPLEMENTED , " " ) ; <nl> + } <nl> + void RequestSayHello ( : : grpc : : ServerContext * context , : : grpc : : ByteBuffer * request , : : grpc : : ServerAsyncResponseWriter < : : grpc : : ByteBuffer > * response , : : grpc : : CompletionQueue * new_call_cq , : : grpc : : ServerCompletionQueue * notification_cq , void * tag ) { <nl> + : : grpc : : Service : : RequestAsyncUnary ( 0 , context , request , response , new_call_cq , notification_cq , tag ) ; <nl> + } <nl> + } ; <nl> + template < class BaseClass > <nl> + class WithStreamedUnaryMethod_SayHello : public BaseClass { <nl> + private : <nl> + void BaseClassMustBeDerivedFromService ( const Service * service ) { } <nl> + public : <nl> + WithStreamedUnaryMethod_SayHello ( ) { <nl> + : : grpc : : Service : : MarkMethodStreamed ( 0 , <nl> + new : : grpc : : internal : : StreamedUnaryHandler < : : helloworld : : HelloRequest , : : helloworld : : HelloReply > ( std : : bind ( & WithStreamedUnaryMethod_SayHello < BaseClass > : : StreamedSayHello , this , std : : placeholders : : _1 , std : : placeholders : : _2 ) ) ) ; <nl> + } <nl> + ~ WithStreamedUnaryMethod_SayHello ( ) override { <nl> + BaseClassMustBeDerivedFromService ( this ) ; <nl> + } <nl> + / / disable regular version of this method <nl> + : : grpc : : Status SayHello ( : : grpc : : ServerContext * context , const : : helloworld : : HelloRequest * request , : : helloworld : : HelloReply * response ) override { <nl> + abort ( ) ; <nl> + return : : grpc : : Status ( : : grpc : : StatusCode : : UNIMPLEMENTED , " " ) ; <nl> + } <nl> + / / replace default version of method with streamed unary <nl> + virtual : : grpc : : Status StreamedSayHello ( : : grpc : : ServerContext * context , : : grpc : : ServerUnaryStreamer < : : helloworld : : HelloRequest , : : helloworld : : HelloReply > * server_unary_streamer ) = 0 ; <nl> + } ; <nl> + typedef WithStreamedUnaryMethod_SayHello < Service > StreamedUnaryService ; <nl> + typedef Service SplitStreamedService ; <nl> + typedef WithStreamedUnaryMethod_SayHello < Service > StreamedService ; <nl> + } ; <nl> + <nl> + } / / namespace helloworld <nl> + <nl> + <nl> + # endif / / GRPC_helloworld_2eproto__INCLUDED <nl> new file mode 100644 <nl> index 00000000000 . . 234283660c2 <nl> Binary files / dev / null and b / examples / cpp / metadata / helloworld . grpc . pb . o differ <nl> new file mode 100644 <nl> index 00000000000 . . cfbc8d3194f <nl> mmm / dev / null <nl> ppp b / examples / cpp / metadata / helloworld . pb . cc <nl> <nl> + / / Generated by the protocol buffer compiler . DO NOT EDIT ! <nl> + / / source : helloworld . proto <nl> + <nl> + # include " helloworld . pb . h " <nl> + <nl> + # include < algorithm > <nl> + <nl> + # include < google / protobuf / stubs / common . h > <nl> + # include < google / protobuf / stubs / port . h > <nl> + # include < google / protobuf / io / coded_stream . h > <nl> + # include < google / protobuf / wire_format_lite_inl . h > <nl> + # include < google / protobuf / descriptor . h > <nl> + # include < google / protobuf / generated_message_reflection . h > <nl> + # include < google / protobuf / reflection_ops . h > <nl> + # include < google / protobuf / wire_format . h > <nl> + / / This is a temporary google only hack <nl> + # ifdef GOOGLE_PROTOBUF_ENFORCE_UNIQUENESS <nl> + # include " third_party / protobuf / version . h " <nl> + # endif <nl> + / / @ @ protoc_insertion_point ( includes ) <nl> + <nl> + namespace helloworld { <nl> + class HelloRequestDefaultTypeInternal { <nl> + public : <nl> + : : google : : protobuf : : internal : : ExplicitlyConstructed < HelloRequest > <nl> + _instance ; <nl> + } _HelloRequest_default_instance_ ; <nl> + class HelloReplyDefaultTypeInternal { <nl> + public : <nl> + : : google : : protobuf : : internal : : ExplicitlyConstructed < HelloReply > <nl> + _instance ; <nl> + } _HelloReply_default_instance_ ; <nl> + } / / namespace helloworld <nl> + namespace protobuf_helloworld_2eproto { <nl> + static void InitDefaultsHelloRequest ( ) { <nl> + GOOGLE_PROTOBUF_VERIFY_VERSION ; <nl> + <nl> + { <nl> + void * ptr = & : : helloworld : : _HelloRequest_default_instance_ ; <nl> + new ( ptr ) : : helloworld : : HelloRequest ( ) ; <nl> + : : google : : protobuf : : internal : : OnShutdownDestroyMessage ( ptr ) ; <nl> + } <nl> + : : helloworld : : HelloRequest : : InitAsDefaultInstance ( ) ; <nl> + } <nl> + <nl> + : : google : : protobuf : : internal : : SCCInfo < 0 > scc_info_HelloRequest = <nl> + { { ATOMIC_VAR_INIT ( : : google : : protobuf : : internal : : SCCInfoBase : : kUninitialized ) , 0 , InitDefaultsHelloRequest } , { } } ; <nl> + <nl> + static void InitDefaultsHelloReply ( ) { <nl> + GOOGLE_PROTOBUF_VERIFY_VERSION ; <nl> + <nl> + { <nl> + void * ptr = & : : helloworld : : _HelloReply_default_instance_ ; <nl> + new ( ptr ) : : helloworld : : HelloReply ( ) ; <nl> + : : google : : protobuf : : internal : : OnShutdownDestroyMessage ( ptr ) ; <nl> + } <nl> + : : helloworld : : HelloReply : : InitAsDefaultInstance ( ) ; <nl> + } <nl> + <nl> + : : google : : protobuf : : internal : : SCCInfo < 0 > scc_info_HelloReply = <nl> + { { ATOMIC_VAR_INIT ( : : google : : protobuf : : internal : : SCCInfoBase : : kUninitialized ) , 0 , InitDefaultsHelloReply } , { } } ; <nl> + <nl> + void InitDefaults ( ) { <nl> + : : google : : protobuf : : internal : : InitSCC ( & scc_info_HelloRequest . base ) ; <nl> + : : google : : protobuf : : internal : : InitSCC ( & scc_info_HelloReply . base ) ; <nl> + } <nl> + <nl> + : : google : : protobuf : : Metadata file_level_metadata [ 2 ] ; <nl> + <nl> + const : : google : : protobuf : : uint32 TableStruct : : offsets [ ] GOOGLE_PROTOBUF_ATTRIBUTE_SECTION_VARIABLE ( protodesc_cold ) = { <nl> + ~ 0u , / / no _has_bits_ <nl> + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET ( : : helloworld : : HelloRequest , _internal_metadata_ ) , <nl> + ~ 0u , / / no _extensions_ <nl> + ~ 0u , / / no _oneof_case_ <nl> + ~ 0u , / / no _weak_field_map_ <nl> + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET ( : : helloworld : : HelloRequest , name_ ) , <nl> + ~ 0u , / / no _has_bits_ <nl> + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET ( : : helloworld : : HelloReply , _internal_metadata_ ) , <nl> + ~ 0u , / / no _extensions_ <nl> + ~ 0u , / / no _oneof_case_ <nl> + ~ 0u , / / no _weak_field_map_ <nl> + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET ( : : helloworld : : HelloReply , message_ ) , <nl> + } ; <nl> + static const : : google : : protobuf : : internal : : MigrationSchema schemas [ ] GOOGLE_PROTOBUF_ATTRIBUTE_SECTION_VARIABLE ( protodesc_cold ) = { <nl> + { 0 , - 1 , sizeof ( : : helloworld : : HelloRequest ) } , <nl> + { 6 , - 1 , sizeof ( : : helloworld : : HelloReply ) } , <nl> + } ; <nl> + <nl> + static : : google : : protobuf : : Message const * const file_default_instances [ ] = { <nl> + reinterpret_cast < const : : google : : protobuf : : Message * > ( & : : helloworld : : _HelloRequest_default_instance_ ) , <nl> + reinterpret_cast < const : : google : : protobuf : : Message * > ( & : : helloworld : : _HelloReply_default_instance_ ) , <nl> + } ; <nl> + <nl> + void protobuf_AssignDescriptors ( ) { <nl> + AddDescriptors ( ) ; <nl> + AssignDescriptors ( <nl> + " helloworld . proto " , schemas , file_default_instances , TableStruct : : offsets , <nl> + file_level_metadata , NULL , NULL ) ; <nl> + } <nl> + <nl> + void protobuf_AssignDescriptorsOnce ( ) { <nl> + static : : google : : protobuf : : internal : : once_flag once ; <nl> + : : google : : protobuf : : internal : : call_once ( once , protobuf_AssignDescriptors ) ; <nl> + } <nl> + <nl> + void protobuf_RegisterTypes ( const : : std : : string & ) GOOGLE_PROTOBUF_ATTRIBUTE_COLD ; <nl> + void protobuf_RegisterTypes ( const : : std : : string & ) { <nl> + protobuf_AssignDescriptorsOnce ( ) ; <nl> + : : google : : protobuf : : internal : : RegisterAllTypes ( file_level_metadata , 2 ) ; <nl> + } <nl> + <nl> + void AddDescriptorsImpl ( ) { <nl> + InitDefaults ( ) ; <nl> + static const char descriptor [ ] GOOGLE_PROTOBUF_ATTRIBUTE_SECTION_VARIABLE ( protodesc_cold ) = { <nl> + " \ n \ 020helloworld . proto \ 022 \ nhelloworld \ " \ 034 \ n \ 014HelloR " <nl> + " equest \ 022 \ 014 \ n \ 004name \ 030 \ 001 \ 001 ( \ t \ " \ 035 \ n \ nHelloReply \ 022 \ 017 \ n \ 007me " <nl> + " ssage \ 030 \ 001 \ 001 ( \ t2I \ n \ 007Greeter \ 022 > \ n \ 010SayHello \ 022 \ 030 . hel " <nl> + " loworld . HelloRequest \ 032 \ 026 . helloworld . HelloR " <nl> + " eply \ " \ 000B6 \ n \ 033io . grpc . examples . helloworldB \ 017H " <nl> + " elloWorldProtoP \ 001 \ 242 \ 002 \ 003HLWb \ 006proto3 " <nl> + } ; <nl> + : : google : : protobuf : : DescriptorPool : : InternalAddGeneratedFile ( <nl> + descriptor , 230 ) ; <nl> + : : google : : protobuf : : MessageFactory : : InternalRegisterGeneratedFile ( <nl> + " helloworld . proto " , & protobuf_RegisterTypes ) ; <nl> + } <nl> + <nl> + void AddDescriptors ( ) { <nl> + static : : google : : protobuf : : internal : : once_flag once ; <nl> + : : google : : protobuf : : internal : : call_once ( once , AddDescriptorsImpl ) ; <nl> + } <nl> + / / Force AddDescriptors ( ) to be called at dynamic initialization time . <nl> + struct StaticDescriptorInitializer { <nl> + StaticDescriptorInitializer ( ) { <nl> + AddDescriptors ( ) ; <nl> + } <nl> + } static_descriptor_initializer ; <nl> + } / / namespace protobuf_helloworld_2eproto <nl> + namespace helloworld { <nl> + <nl> + / / = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = <nl> + <nl> + void HelloRequest : : InitAsDefaultInstance ( ) { <nl> + } <nl> + # if ! defined ( _MSC_VER ) | | _MSC_VER > = 1900 <nl> + const int HelloRequest : : kNameFieldNumber ; <nl> + # endif / / ! defined ( _MSC_VER ) | | _MSC_VER > = 1900 <nl> + <nl> + HelloRequest : : HelloRequest ( ) <nl> + : : : google : : protobuf : : Message ( ) , _internal_metadata_ ( NULL ) { <nl> + : : google : : protobuf : : internal : : InitSCC ( <nl> + & protobuf_helloworld_2eproto : : scc_info_HelloRequest . base ) ; <nl> + SharedCtor ( ) ; <nl> + / / @ @ protoc_insertion_point ( constructor : helloworld . HelloRequest ) <nl> + } <nl> + HelloRequest : : HelloRequest ( const HelloRequest & from ) <nl> + : : : google : : protobuf : : Message ( ) , <nl> + _internal_metadata_ ( NULL ) { <nl> + _internal_metadata_ . MergeFrom ( from . _internal_metadata_ ) ; <nl> + name_ . UnsafeSetDefault ( & : : google : : protobuf : : internal : : GetEmptyStringAlreadyInited ( ) ) ; <nl> + if ( from . name ( ) . size ( ) > 0 ) { <nl> + name_ . AssignWithDefault ( & : : google : : protobuf : : internal : : GetEmptyStringAlreadyInited ( ) , from . name_ ) ; <nl> + } <nl> + / / @ @ protoc_insertion_point ( copy_constructor : helloworld . HelloRequest ) <nl> + } <nl> + <nl> + void HelloRequest : : SharedCtor ( ) { <nl> + name_ . UnsafeSetDefault ( & : : google : : protobuf : : internal : : GetEmptyStringAlreadyInited ( ) ) ; <nl> + } <nl> + <nl> + HelloRequest : : ~ HelloRequest ( ) { <nl> + / / @ @ protoc_insertion_point ( destructor : helloworld . HelloRequest ) <nl> + SharedDtor ( ) ; <nl> + } <nl> + <nl> + void HelloRequest : : SharedDtor ( ) { <nl> + name_ . DestroyNoArena ( & : : google : : protobuf : : internal : : GetEmptyStringAlreadyInited ( ) ) ; <nl> + } <nl> + <nl> + void HelloRequest : : SetCachedSize ( int size ) const { <nl> + _cached_size_ . Set ( size ) ; <nl> + } <nl> + const : : google : : protobuf : : Descriptor * HelloRequest : : descriptor ( ) { <nl> + : : protobuf_helloworld_2eproto : : protobuf_AssignDescriptorsOnce ( ) ; <nl> + return : : protobuf_helloworld_2eproto : : file_level_metadata [ kIndexInFileMessages ] . descriptor ; <nl> + } <nl> + <nl> + const HelloRequest & HelloRequest : : default_instance ( ) { <nl> + : : google : : protobuf : : internal : : InitSCC ( & protobuf_helloworld_2eproto : : scc_info_HelloRequest . base ) ; <nl> + return * internal_default_instance ( ) ; <nl> + } <nl> + <nl> + <nl> + void HelloRequest : : Clear ( ) { <nl> + / / @ @ protoc_insertion_point ( message_clear_start : helloworld . HelloRequest ) <nl> + : : google : : protobuf : : uint32 cached_has_bits = 0 ; <nl> + / / Prevent compiler warnings about cached_has_bits being unused <nl> + ( void ) cached_has_bits ; <nl> + <nl> + name_ . ClearToEmptyNoArena ( & : : google : : protobuf : : internal : : GetEmptyStringAlreadyInited ( ) ) ; <nl> + _internal_metadata_ . Clear ( ) ; <nl> + } <nl> + <nl> + bool HelloRequest : : MergePartialFromCodedStream ( <nl> + : : google : : protobuf : : io : : CodedInputStream * input ) { <nl> + # define DO_ ( EXPRESSION ) if ( ! GOOGLE_PREDICT_TRUE ( EXPRESSION ) ) goto failure <nl> + : : google : : protobuf : : uint32 tag ; <nl> + / / @ @ protoc_insertion_point ( parse_start : helloworld . HelloRequest ) <nl> + for ( ; ; ) { <nl> + : : std : : pair < : : google : : protobuf : : uint32 , bool > p = input - > ReadTagWithCutoffNoLastTag ( 127u ) ; <nl> + tag = p . first ; <nl> + if ( ! p . second ) goto handle_unusual ; <nl> + switch ( : : google : : protobuf : : internal : : WireFormatLite : : GetTagFieldNumber ( tag ) ) { <nl> + / / string name = 1 ; <nl> + case 1 : { <nl> + if ( static_cast < : : google : : protobuf : : uint8 > ( tag ) = = <nl> + static_cast < : : google : : protobuf : : uint8 > ( 10u / * 10 & 0xFF * / ) ) { <nl> + DO_ ( : : google : : protobuf : : internal : : WireFormatLite : : ReadString ( <nl> + input , this - > mutable_name ( ) ) ) ; <nl> + DO_ ( : : google : : protobuf : : internal : : WireFormatLite : : VerifyUtf8String ( <nl> + this - > name ( ) . data ( ) , static_cast < int > ( this - > name ( ) . length ( ) ) , <nl> + : : google : : protobuf : : internal : : WireFormatLite : : PARSE , <nl> + " helloworld . HelloRequest . name " ) ) ; <nl> + } else { <nl> + goto handle_unusual ; <nl> + } <nl> + break ; <nl> + } <nl> + <nl> + default : { <nl> + handle_unusual : <nl> + if ( tag = = 0 ) { <nl> + goto success ; <nl> + } <nl> + DO_ ( : : google : : protobuf : : internal : : WireFormat : : SkipField ( <nl> + input , tag , _internal_metadata_ . mutable_unknown_fields ( ) ) ) ; <nl> + break ; <nl> + } <nl> + } <nl> + } <nl> + success : <nl> + / / @ @ protoc_insertion_point ( parse_success : helloworld . HelloRequest ) <nl> + return true ; <nl> + failure : <nl> + / / @ @ protoc_insertion_point ( parse_failure : helloworld . HelloRequest ) <nl> + return false ; <nl> + # undef DO_ <nl> + } <nl> + <nl> + void HelloRequest : : SerializeWithCachedSizes ( <nl> + : : google : : protobuf : : io : : CodedOutputStream * output ) const { <nl> + / / @ @ protoc_insertion_point ( serialize_start : helloworld . HelloRequest ) <nl> + : : google : : protobuf : : uint32 cached_has_bits = 0 ; <nl> + ( void ) cached_has_bits ; <nl> + <nl> + / / string name = 1 ; <nl> + if ( this - > name ( ) . size ( ) > 0 ) { <nl> + : : google : : protobuf : : internal : : WireFormatLite : : VerifyUtf8String ( <nl> + this - > name ( ) . data ( ) , static_cast < int > ( this - > name ( ) . length ( ) ) , <nl> + : : google : : protobuf : : internal : : WireFormatLite : : SERIALIZE , <nl> + " helloworld . HelloRequest . name " ) ; <nl> + : : google : : protobuf : : internal : : WireFormatLite : : WriteStringMaybeAliased ( <nl> + 1 , this - > name ( ) , output ) ; <nl> + } <nl> + <nl> + if ( ( _internal_metadata_ . have_unknown_fields ( ) & & : : google : : protobuf : : internal : : GetProto3PreserveUnknownsDefault ( ) ) ) { <nl> + : : google : : protobuf : : internal : : WireFormat : : SerializeUnknownFields ( <nl> + ( : : google : : protobuf : : internal : : GetProto3PreserveUnknownsDefault ( ) ? _internal_metadata_ . unknown_fields ( ) : _internal_metadata_ . default_instance ( ) ) , output ) ; <nl> + } <nl> + / / @ @ protoc_insertion_point ( serialize_end : helloworld . HelloRequest ) <nl> + } <nl> + <nl> + : : google : : protobuf : : uint8 * HelloRequest : : InternalSerializeWithCachedSizesToArray ( <nl> + bool deterministic , : : google : : protobuf : : uint8 * target ) const { <nl> + ( void ) deterministic ; / / Unused <nl> + / / @ @ protoc_insertion_point ( serialize_to_array_start : helloworld . HelloRequest ) <nl> + : : google : : protobuf : : uint32 cached_has_bits = 0 ; <nl> + ( void ) cached_has_bits ; <nl> + <nl> + / / string name = 1 ; <nl> + if ( this - > name ( ) . size ( ) > 0 ) { <nl> + : : google : : protobuf : : internal : : WireFormatLite : : VerifyUtf8String ( <nl> + this - > name ( ) . data ( ) , static_cast < int > ( this - > name ( ) . length ( ) ) , <nl> + : : google : : protobuf : : internal : : WireFormatLite : : SERIALIZE , <nl> + " helloworld . HelloRequest . name " ) ; <nl> + target = <nl> + : : google : : protobuf : : internal : : WireFormatLite : : WriteStringToArray ( <nl> + 1 , this - > name ( ) , target ) ; <nl> + } <nl> + <nl> + if ( ( _internal_metadata_ . have_unknown_fields ( ) & & : : google : : protobuf : : internal : : GetProto3PreserveUnknownsDefault ( ) ) ) { <nl> + target = : : google : : protobuf : : internal : : WireFormat : : SerializeUnknownFieldsToArray ( <nl> + ( : : google : : protobuf : : internal : : GetProto3PreserveUnknownsDefault ( ) ? _internal_metadata_ . unknown_fields ( ) : _internal_metadata_ . default_instance ( ) ) , target ) ; <nl> + } <nl> + / / @ @ protoc_insertion_point ( serialize_to_array_end : helloworld . HelloRequest ) <nl> + return target ; <nl> + } <nl> + <nl> + size_t HelloRequest : : ByteSizeLong ( ) const { <nl> + / / @ @ protoc_insertion_point ( message_byte_size_start : helloworld . HelloRequest ) <nl> + size_t total_size = 0 ; <nl> + <nl> + if ( ( _internal_metadata_ . have_unknown_fields ( ) & & : : google : : protobuf : : internal : : GetProto3PreserveUnknownsDefault ( ) ) ) { <nl> + total_size + = <nl> + : : google : : protobuf : : internal : : WireFormat : : ComputeUnknownFieldsSize ( <nl> + ( : : google : : protobuf : : internal : : GetProto3PreserveUnknownsDefault ( ) ? _internal_metadata_ . unknown_fields ( ) : _internal_metadata_ . default_instance ( ) ) ) ; <nl> + } <nl> + / / string name = 1 ; <nl> + if ( this - > name ( ) . size ( ) > 0 ) { <nl> + total_size + = 1 + <nl> + : : google : : protobuf : : internal : : WireFormatLite : : StringSize ( <nl> + this - > name ( ) ) ; <nl> + } <nl> + <nl> + int cached_size = : : google : : protobuf : : internal : : ToCachedSize ( total_size ) ; <nl> + SetCachedSize ( cached_size ) ; <nl> + return total_size ; <nl> + } <nl> + <nl> + void HelloRequest : : MergeFrom ( const : : google : : protobuf : : Message & from ) { <nl> + / / @ @ protoc_insertion_point ( generalized_merge_from_start : helloworld . HelloRequest ) <nl> + GOOGLE_DCHECK_NE ( & from , this ) ; <nl> + const HelloRequest * source = <nl> + : : google : : protobuf : : internal : : DynamicCastToGenerated < const HelloRequest > ( <nl> + & from ) ; <nl> + if ( source = = NULL ) { <nl> + / / @ @ protoc_insertion_point ( generalized_merge_from_cast_fail : helloworld . HelloRequest ) <nl> + : : google : : protobuf : : internal : : ReflectionOps : : Merge ( from , this ) ; <nl> + } else { <nl> + / / @ @ protoc_insertion_point ( generalized_merge_from_cast_success : helloworld . HelloRequest ) <nl> + MergeFrom ( * source ) ; <nl> + } <nl> + } <nl> + <nl> + void HelloRequest : : MergeFrom ( const HelloRequest & from ) { <nl> + / / @ @ protoc_insertion_point ( class_specific_merge_from_start : helloworld . HelloRequest ) <nl> + GOOGLE_DCHECK_NE ( & from , this ) ; <nl> + _internal_metadata_ . MergeFrom ( from . _internal_metadata_ ) ; <nl> + : : google : : protobuf : : uint32 cached_has_bits = 0 ; <nl> + ( void ) cached_has_bits ; <nl> + <nl> + if ( from . name ( ) . size ( ) > 0 ) { <nl> + <nl> + name_ . AssignWithDefault ( & : : google : : protobuf : : internal : : GetEmptyStringAlreadyInited ( ) , from . name_ ) ; <nl> + } <nl> + } <nl> + <nl> + void HelloRequest : : CopyFrom ( const : : google : : protobuf : : Message & from ) { <nl> + / / @ @ protoc_insertion_point ( generalized_copy_from_start : helloworld . HelloRequest ) <nl> + if ( & from = = this ) return ; <nl> + Clear ( ) ; <nl> + MergeFrom ( from ) ; <nl> + } <nl> + <nl> + void HelloRequest : : CopyFrom ( const HelloRequest & from ) { <nl> + / / @ @ protoc_insertion_point ( class_specific_copy_from_start : helloworld . HelloRequest ) <nl> + if ( & from = = this ) return ; <nl> + Clear ( ) ; <nl> + MergeFrom ( from ) ; <nl> + } <nl> + <nl> + bool HelloRequest : : IsInitialized ( ) const { <nl> + return true ; <nl> + } <nl> + <nl> + void HelloRequest : : Swap ( HelloRequest * other ) { <nl> + if ( other = = this ) return ; <nl> + InternalSwap ( other ) ; <nl> + } <nl> + void HelloRequest : : InternalSwap ( HelloRequest * other ) { <nl> + using std : : swap ; <nl> + name_ . Swap ( & other - > name_ , & : : google : : protobuf : : internal : : GetEmptyStringAlreadyInited ( ) , <nl> + GetArenaNoVirtual ( ) ) ; <nl> + _internal_metadata_ . Swap ( & other - > _internal_metadata_ ) ; <nl> + } <nl> + <nl> + : : google : : protobuf : : Metadata HelloRequest : : GetMetadata ( ) const { <nl> + protobuf_helloworld_2eproto : : protobuf_AssignDescriptorsOnce ( ) ; <nl> + return : : protobuf_helloworld_2eproto : : file_level_metadata [ kIndexInFileMessages ] ; <nl> + } <nl> + <nl> + <nl> + / / = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = <nl> + <nl> + void HelloReply : : InitAsDefaultInstance ( ) { <nl> + } <nl> + # if ! defined ( _MSC_VER ) | | _MSC_VER > = 1900 <nl> + const int HelloReply : : kMessageFieldNumber ; <nl> + # endif / / ! defined ( _MSC_VER ) | | _MSC_VER > = 1900 <nl> + <nl> + HelloReply : : HelloReply ( ) <nl> + : : : google : : protobuf : : Message ( ) , _internal_metadata_ ( NULL ) { <nl> + : : google : : protobuf : : internal : : InitSCC ( <nl> + & protobuf_helloworld_2eproto : : scc_info_HelloReply . base ) ; <nl> + SharedCtor ( ) ; <nl> + / / @ @ protoc_insertion_point ( constructor : helloworld . HelloReply ) <nl> + } <nl> + HelloReply : : HelloReply ( const HelloReply & from ) <nl> + : : : google : : protobuf : : Message ( ) , <nl> + _internal_metadata_ ( NULL ) { <nl> + _internal_metadata_ . MergeFrom ( from . _internal_metadata_ ) ; <nl> + message_ . UnsafeSetDefault ( & : : google : : protobuf : : internal : : GetEmptyStringAlreadyInited ( ) ) ; <nl> + if ( from . message ( ) . size ( ) > 0 ) { <nl> + message_ . AssignWithDefault ( & : : google : : protobuf : : internal : : GetEmptyStringAlreadyInited ( ) , from . message_ ) ; <nl> + } <nl> + / / @ @ protoc_insertion_point ( copy_constructor : helloworld . HelloReply ) <nl> + } <nl> + <nl> + void HelloReply : : SharedCtor ( ) { <nl> + message_ . UnsafeSetDefault ( & : : google : : protobuf : : internal : : GetEmptyStringAlreadyInited ( ) ) ; <nl> + } <nl> + <nl> + HelloReply : : ~ HelloReply ( ) { <nl> + / / @ @ protoc_insertion_point ( destructor : helloworld . HelloReply ) <nl> + SharedDtor ( ) ; <nl> + } <nl> + <nl> + void HelloReply : : SharedDtor ( ) { <nl> + message_ . DestroyNoArena ( & : : google : : protobuf : : internal : : GetEmptyStringAlreadyInited ( ) ) ; <nl> + } <nl> + <nl> + void HelloReply : : SetCachedSize ( int size ) const { <nl> + _cached_size_ . Set ( size ) ; <nl> + } <nl> + const : : google : : protobuf : : Descriptor * HelloReply : : descriptor ( ) { <nl> + : : protobuf_helloworld_2eproto : : protobuf_AssignDescriptorsOnce ( ) ; <nl> + return : : protobuf_helloworld_2eproto : : file_level_metadata [ kIndexInFileMessages ] . descriptor ; <nl> + } <nl> + <nl> + const HelloReply & HelloReply : : default_instance ( ) { <nl> + : : google : : protobuf : : internal : : InitSCC ( & protobuf_helloworld_2eproto : : scc_info_HelloReply . base ) ; <nl> + return * internal_default_instance ( ) ; <nl> + } <nl> + <nl> + <nl> + void HelloReply : : Clear ( ) { <nl> + / / @ @ protoc_insertion_point ( message_clear_start : helloworld . HelloReply ) <nl> + : : google : : protobuf : : uint32 cached_has_bits = 0 ; <nl> + / / Prevent compiler warnings about cached_has_bits being unused <nl> + ( void ) cached_has_bits ; <nl> + <nl> + message_ . ClearToEmptyNoArena ( & : : google : : protobuf : : internal : : GetEmptyStringAlreadyInited ( ) ) ; <nl> + _internal_metadata_ . Clear ( ) ; <nl> + } <nl> + <nl> + bool HelloReply : : MergePartialFromCodedStream ( <nl> + : : google : : protobuf : : io : : CodedInputStream * input ) { <nl> + # define DO_ ( EXPRESSION ) if ( ! GOOGLE_PREDICT_TRUE ( EXPRESSION ) ) goto failure <nl> + : : google : : protobuf : : uint32 tag ; <nl> + / / @ @ protoc_insertion_point ( parse_start : helloworld . HelloReply ) <nl> + for ( ; ; ) { <nl> + : : std : : pair < : : google : : protobuf : : uint32 , bool > p = input - > ReadTagWithCutoffNoLastTag ( 127u ) ; <nl> + tag = p . first ; <nl> + if ( ! p . second ) goto handle_unusual ; <nl> + switch ( : : google : : protobuf : : internal : : WireFormatLite : : GetTagFieldNumber ( tag ) ) { <nl> + / / string message = 1 ; <nl> + case 1 : { <nl> + if ( static_cast < : : google : : protobuf : : uint8 > ( tag ) = = <nl> + static_cast < : : google : : protobuf : : uint8 > ( 10u / * 10 & 0xFF * / ) ) { <nl> + DO_ ( : : google : : protobuf : : internal : : WireFormatLite : : ReadString ( <nl> + input , this - > mutable_message ( ) ) ) ; <nl> + DO_ ( : : google : : protobuf : : internal : : WireFormatLite : : VerifyUtf8String ( <nl> + this - > message ( ) . data ( ) , static_cast < int > ( this - > message ( ) . length ( ) ) , <nl> + : : google : : protobuf : : internal : : WireFormatLite : : PARSE , <nl> + " helloworld . HelloReply . message " ) ) ; <nl> + } else { <nl> + goto handle_unusual ; <nl> + } <nl> + break ; <nl> + } <nl> + <nl> + default : { <nl> + handle_unusual : <nl> + if ( tag = = 0 ) { <nl> + goto success ; <nl> + } <nl> + DO_ ( : : google : : protobuf : : internal : : WireFormat : : SkipField ( <nl> + input , tag , _internal_metadata_ . mutable_unknown_fields ( ) ) ) ; <nl> + break ; <nl> + } <nl> + } <nl> + } <nl> + success : <nl> + / / @ @ protoc_insertion_point ( parse_success : helloworld . HelloReply ) <nl> + return true ; <nl> + failure : <nl> + / / @ @ protoc_insertion_point ( parse_failure : helloworld . HelloReply ) <nl> + return false ; <nl> + # undef DO_ <nl> + } <nl> + <nl> + void HelloReply : : SerializeWithCachedSizes ( <nl> + : : google : : protobuf : : io : : CodedOutputStream * output ) const { <nl> + / / @ @ protoc_insertion_point ( serialize_start : helloworld . HelloReply ) <nl> + : : google : : protobuf : : uint32 cached_has_bits = 0 ; <nl> + ( void ) cached_has_bits ; <nl> + <nl> + / / string message = 1 ; <nl> + if ( this - > message ( ) . size ( ) > 0 ) { <nl> + : : google : : protobuf : : internal : : WireFormatLite : : VerifyUtf8String ( <nl> + this - > message ( ) . data ( ) , static_cast < int > ( this - > message ( ) . length ( ) ) , <nl> + : : google : : protobuf : : internal : : WireFormatLite : : SERIALIZE , <nl> + " helloworld . HelloReply . message " ) ; <nl> + : : google : : protobuf : : internal : : WireFormatLite : : WriteStringMaybeAliased ( <nl> + 1 , this - > message ( ) , output ) ; <nl> + } <nl> + <nl> + if ( ( _internal_metadata_ . have_unknown_fields ( ) & & : : google : : protobuf : : internal : : GetProto3PreserveUnknownsDefault ( ) ) ) { <nl> + : : google : : protobuf : : internal : : WireFormat : : SerializeUnknownFields ( <nl> + ( : : google : : protobuf : : internal : : GetProto3PreserveUnknownsDefault ( ) ? _internal_metadata_ . unknown_fields ( ) : _internal_metadata_ . default_instance ( ) ) , output ) ; <nl> + } <nl> + / / @ @ protoc_insertion_point ( serialize_end : helloworld . HelloReply ) <nl> + } <nl> + <nl> + : : google : : protobuf : : uint8 * HelloReply : : InternalSerializeWithCachedSizesToArray ( <nl> + bool deterministic , : : google : : protobuf : : uint8 * target ) const { <nl> + ( void ) deterministic ; / / Unused <nl> + / / @ @ protoc_insertion_point ( serialize_to_array_start : helloworld . HelloReply ) <nl> + : : google : : protobuf : : uint32 cached_has_bits = 0 ; <nl> + ( void ) cached_has_bits ; <nl> + <nl> + / / string message = 1 ; <nl> + if ( this - > message ( ) . size ( ) > 0 ) { <nl> + : : google : : protobuf : : internal : : WireFormatLite : : VerifyUtf8String ( <nl> + this - > message ( ) . data ( ) , static_cast < int > ( this - > message ( ) . length ( ) ) , <nl> + : : google : : protobuf : : internal : : WireFormatLite : : SERIALIZE , <nl> + " helloworld . HelloReply . message " ) ; <nl> + target = <nl> + : : google : : protobuf : : internal : : WireFormatLite : : WriteStringToArray ( <nl> + 1 , this - > message ( ) , target ) ; <nl> + } <nl> + <nl> + if ( ( _internal_metadata_ . have_unknown_fields ( ) & & : : google : : protobuf : : internal : : GetProto3PreserveUnknownsDefault ( ) ) ) { <nl> + target = : : google : : protobuf : : internal : : WireFormat : : SerializeUnknownFieldsToArray ( <nl> + ( : : google : : protobuf : : internal : : GetProto3PreserveUnknownsDefault ( ) ? _internal_metadata_ . unknown_fields ( ) : _internal_metadata_ . default_instance ( ) ) , target ) ; <nl> + } <nl> + / / @ @ protoc_insertion_point ( serialize_to_array_end : helloworld . HelloReply ) <nl> + return target ; <nl> + } <nl> + <nl> + size_t HelloReply : : ByteSizeLong ( ) const { <nl> + / / @ @ protoc_insertion_point ( message_byte_size_start : helloworld . HelloReply ) <nl> + size_t total_size = 0 ; <nl> + <nl> + if ( ( _internal_metadata_ . have_unknown_fields ( ) & & : : google : : protobuf : : internal : : GetProto3PreserveUnknownsDefault ( ) ) ) { <nl> + total_size + = <nl> + : : google : : protobuf : : internal : : WireFormat : : ComputeUnknownFieldsSize ( <nl> + ( : : google : : protobuf : : internal : : GetProto3PreserveUnknownsDefault ( ) ? _internal_metadata_ . unknown_fields ( ) : _internal_metadata_ . default_instance ( ) ) ) ; <nl> + } <nl> + / / string message = 1 ; <nl> + if ( this - > message ( ) . size ( ) > 0 ) { <nl> + total_size + = 1 + <nl> + : : google : : protobuf : : internal : : WireFormatLite : : StringSize ( <nl> + this - > message ( ) ) ; <nl> + } <nl> + <nl> + int cached_size = : : google : : protobuf : : internal : : ToCachedSize ( total_size ) ; <nl> + SetCachedSize ( cached_size ) ; <nl> + return total_size ; <nl> + } <nl> + <nl> + void HelloReply : : MergeFrom ( const : : google : : protobuf : : Message & from ) { <nl> + / / @ @ protoc_insertion_point ( generalized_merge_from_start : helloworld . HelloReply ) <nl> + GOOGLE_DCHECK_NE ( & from , this ) ; <nl> + const HelloReply * source = <nl> + : : google : : protobuf : : internal : : DynamicCastToGenerated < const HelloReply > ( <nl> + & from ) ; <nl> + if ( source = = NULL ) { <nl> + / / @ @ protoc_insertion_point ( generalized_merge_from_cast_fail : helloworld . HelloReply ) <nl> + : : google : : protobuf : : internal : : ReflectionOps : : Merge ( from , this ) ; <nl> + } else { <nl> + / / @ @ protoc_insertion_point ( generalized_merge_from_cast_success : helloworld . HelloReply ) <nl> + MergeFrom ( * source ) ; <nl> + } <nl> + } <nl> + <nl> + void HelloReply : : MergeFrom ( const HelloReply & from ) { <nl> + / / @ @ protoc_insertion_point ( class_specific_merge_from_start : helloworld . HelloReply ) <nl> + GOOGLE_DCHECK_NE ( & from , this ) ; <nl> + _internal_metadata_ . MergeFrom ( from . _internal_metadata_ ) ; <nl> + : : google : : protobuf : : uint32 cached_has_bits = 0 ; <nl> + ( void ) cached_has_bits ; <nl> + <nl> + if ( from . message ( ) . size ( ) > 0 ) { <nl> + <nl> + message_ . AssignWithDefault ( & : : google : : protobuf : : internal : : GetEmptyStringAlreadyInited ( ) , from . message_ ) ; <nl> + } <nl> + } <nl> + <nl> + void HelloReply : : CopyFrom ( const : : google : : protobuf : : Message & from ) { <nl> + / / @ @ protoc_insertion_point ( generalized_copy_from_start : helloworld . HelloReply ) <nl> + if ( & from = = this ) return ; <nl> + Clear ( ) ; <nl> + MergeFrom ( from ) ; <nl> + } <nl> + <nl> + void HelloReply : : CopyFrom ( const HelloReply & from ) { <nl> + / / @ @ protoc_insertion_point ( class_specific_copy_from_start : helloworld . HelloReply ) <nl> + if ( & from = = this ) return ; <nl> + Clear ( ) ; <nl> + MergeFrom ( from ) ; <nl> + } <nl> + <nl> + bool HelloReply : : IsInitialized ( ) const { <nl> + return true ; <nl> + } <nl> + <nl> + void HelloReply : : Swap ( HelloReply * other ) { <nl> + if ( other = = this ) return ; <nl> + InternalSwap ( other ) ; <nl> + } <nl> + void HelloReply : : InternalSwap ( HelloReply * other ) { <nl> + using std : : swap ; <nl> + message_ . Swap ( & other - > message_ , & : : google : : protobuf : : internal : : GetEmptyStringAlreadyInited ( ) , <nl> + GetArenaNoVirtual ( ) ) ; <nl> + _internal_metadata_ . Swap ( & other - > _internal_metadata_ ) ; <nl> + } <nl> + <nl> + : : google : : protobuf : : Metadata HelloReply : : GetMetadata ( ) const { <nl> + protobuf_helloworld_2eproto : : protobuf_AssignDescriptorsOnce ( ) ; <nl> + return : : protobuf_helloworld_2eproto : : file_level_metadata [ kIndexInFileMessages ] ; <nl> + } <nl> + <nl> + <nl> + / / @ @ protoc_insertion_point ( namespace_scope ) <nl> + } / / namespace helloworld <nl> + namespace google { <nl> + namespace protobuf { <nl> + template < > GOOGLE_PROTOBUF_ATTRIBUTE_NOINLINE : : helloworld : : HelloRequest * Arena : : CreateMaybeMessage < : : helloworld : : HelloRequest > ( Arena * arena ) { <nl> + return Arena : : CreateInternal < : : helloworld : : HelloRequest > ( arena ) ; <nl> + } <nl> + template < > GOOGLE_PROTOBUF_ATTRIBUTE_NOINLINE : : helloworld : : HelloReply * Arena : : CreateMaybeMessage < : : helloworld : : HelloReply > ( Arena * arena ) { <nl> + return Arena : : CreateInternal < : : helloworld : : HelloReply > ( arena ) ; <nl> + } <nl> + } / / namespace protobuf <nl> + } / / namespace google <nl> + <nl> + / / @ @ protoc_insertion_point ( global_scope ) <nl> new file mode 100644 <nl> index 00000000000 . . 57f6817e310 <nl> mmm / dev / null <nl> ppp b / examples / cpp / metadata / helloworld . pb . h <nl> <nl> + / / Generated by the protocol buffer compiler . DO NOT EDIT ! <nl> + / / source : helloworld . proto <nl> + <nl> + # ifndef PROTOBUF_INCLUDED_helloworld_2eproto <nl> + # define PROTOBUF_INCLUDED_helloworld_2eproto <nl> + <nl> + # include < string > <nl> + <nl> + # include < google / protobuf / stubs / common . h > <nl> + <nl> + # if GOOGLE_PROTOBUF_VERSION < 3006001 <nl> + # error This file was generated by a newer version of protoc which is <nl> + # error incompatible with your Protocol Buffer headers . Please update <nl> + # error your headers . <nl> + # endif <nl> + # if 3006001 < GOOGLE_PROTOBUF_MIN_PROTOC_VERSION <nl> + # error This file was generated by an older version of protoc which is <nl> + # error incompatible with your Protocol Buffer headers . Please <nl> + # error regenerate this file with a newer version of protoc . <nl> + # endif <nl> + <nl> + # include < google / protobuf / io / coded_stream . h > <nl> + # include < google / protobuf / arena . h > <nl> + # include < google / protobuf / arenastring . h > <nl> + # include < google / protobuf / generated_message_table_driven . h > <nl> + # include < google / protobuf / generated_message_util . h > <nl> + # include < google / protobuf / inlined_string_field . h > <nl> + # include < google / protobuf / metadata . h > <nl> + # include < google / protobuf / message . h > <nl> + # include < google / protobuf / repeated_field . h > / / IWYU pragma : export <nl> + # include < google / protobuf / extension_set . h > / / IWYU pragma : export <nl> + # include < google / protobuf / unknown_field_set . h > <nl> + / / @ @ protoc_insertion_point ( includes ) <nl> + # define PROTOBUF_INTERNAL_EXPORT_protobuf_helloworld_2eproto <nl> + <nl> + namespace protobuf_helloworld_2eproto { <nl> + / / Internal implementation detail - - do not use these members . <nl> + struct TableStruct { <nl> + static const : : google : : protobuf : : internal : : ParseTableField entries [ ] ; <nl> + static const : : google : : protobuf : : internal : : AuxillaryParseTableField aux [ ] ; <nl> + static const : : google : : protobuf : : internal : : ParseTable schema [ 2 ] ; <nl> + static const : : google : : protobuf : : internal : : FieldMetadata field_metadata [ ] ; <nl> + static const : : google : : protobuf : : internal : : SerializationTable serialization_table [ ] ; <nl> + static const : : google : : protobuf : : uint32 offsets [ ] ; <nl> + } ; <nl> + void AddDescriptors ( ) ; <nl> + } / / namespace protobuf_helloworld_2eproto <nl> + namespace helloworld { <nl> + class HelloReply ; <nl> + class HelloReplyDefaultTypeInternal ; <nl> + extern HelloReplyDefaultTypeInternal _HelloReply_default_instance_ ; <nl> + class HelloRequest ; <nl> + class HelloRequestDefaultTypeInternal ; <nl> + extern HelloRequestDefaultTypeInternal _HelloRequest_default_instance_ ; <nl> + } / / namespace helloworld <nl> + namespace google { <nl> + namespace protobuf { <nl> + template < > : : helloworld : : HelloReply * Arena : : CreateMaybeMessage < : : helloworld : : HelloReply > ( Arena * ) ; <nl> + template < > : : helloworld : : HelloRequest * Arena : : CreateMaybeMessage < : : helloworld : : HelloRequest > ( Arena * ) ; <nl> + } / / namespace protobuf <nl> + } / / namespace google <nl> + namespace helloworld { <nl> + <nl> + / / = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = <nl> + <nl> + class HelloRequest : public : : google : : protobuf : : Message / * @ @ protoc_insertion_point ( class_definition : helloworld . HelloRequest ) * / { <nl> + public : <nl> + HelloRequest ( ) ; <nl> + virtual ~ HelloRequest ( ) ; <nl> + <nl> + HelloRequest ( const HelloRequest & from ) ; <nl> + <nl> + inline HelloRequest & operator = ( const HelloRequest & from ) { <nl> + CopyFrom ( from ) ; <nl> + return * this ; <nl> + } <nl> + # if LANG_CXX11 <nl> + HelloRequest ( HelloRequest & & from ) noexcept <nl> + : HelloRequest ( ) { <nl> + * this = : : std : : move ( from ) ; <nl> + } <nl> + <nl> + inline HelloRequest & operator = ( HelloRequest & & from ) noexcept { <nl> + if ( GetArenaNoVirtual ( ) = = from . GetArenaNoVirtual ( ) ) { <nl> + if ( this ! = & from ) InternalSwap ( & from ) ; <nl> + } else { <nl> + CopyFrom ( from ) ; <nl> + } <nl> + return * this ; <nl> + } <nl> + # endif <nl> + static const : : google : : protobuf : : Descriptor * descriptor ( ) ; <nl> + static const HelloRequest & default_instance ( ) ; <nl> + <nl> + static void InitAsDefaultInstance ( ) ; / / FOR INTERNAL USE ONLY <nl> + static inline const HelloRequest * internal_default_instance ( ) { <nl> + return reinterpret_cast < const HelloRequest * > ( <nl> + & _HelloRequest_default_instance_ ) ; <nl> + } <nl> + static constexpr int kIndexInFileMessages = <nl> + 0 ; <nl> + <nl> + void Swap ( HelloRequest * other ) ; <nl> + friend void swap ( HelloRequest & a , HelloRequest & b ) { <nl> + a . Swap ( & b ) ; <nl> + } <nl> + <nl> + / / implements Message mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + <nl> + inline HelloRequest * New ( ) const final { <nl> + return CreateMaybeMessage < HelloRequest > ( NULL ) ; <nl> + } <nl> + <nl> + HelloRequest * New ( : : google : : protobuf : : Arena * arena ) const final { <nl> + return CreateMaybeMessage < HelloRequest > ( arena ) ; <nl> + } <nl> + void CopyFrom ( const : : google : : protobuf : : Message & from ) final ; <nl> + void MergeFrom ( const : : google : : protobuf : : Message & from ) final ; <nl> + void CopyFrom ( const HelloRequest & from ) ; <nl> + void MergeFrom ( const HelloRequest & from ) ; <nl> + void Clear ( ) final ; <nl> + bool IsInitialized ( ) const final ; <nl> + <nl> + size_t ByteSizeLong ( ) const final ; <nl> + bool MergePartialFromCodedStream ( <nl> + : : google : : protobuf : : io : : CodedInputStream * input ) final ; <nl> + void SerializeWithCachedSizes ( <nl> + : : google : : protobuf : : io : : CodedOutputStream * output ) const final ; <nl> + : : google : : protobuf : : uint8 * InternalSerializeWithCachedSizesToArray ( <nl> + bool deterministic , : : google : : protobuf : : uint8 * target ) const final ; <nl> + int GetCachedSize ( ) const final { return _cached_size_ . Get ( ) ; } <nl> + <nl> + private : <nl> + void SharedCtor ( ) ; <nl> + void SharedDtor ( ) ; <nl> + void SetCachedSize ( int size ) const final ; <nl> + void InternalSwap ( HelloRequest * other ) ; <nl> + private : <nl> + inline : : google : : protobuf : : Arena * GetArenaNoVirtual ( ) const { <nl> + return NULL ; <nl> + } <nl> + inline void * MaybeArenaPtr ( ) const { <nl> + return NULL ; <nl> + } <nl> + public : <nl> + <nl> + : : google : : protobuf : : Metadata GetMetadata ( ) const final ; <nl> + <nl> + / / nested types mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + <nl> + / / accessors mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + <nl> + / / string name = 1 ; <nl> + void clear_name ( ) ; <nl> + static const int kNameFieldNumber = 1 ; <nl> + const : : std : : string & name ( ) const ; <nl> + void set_name ( const : : std : : string & value ) ; <nl> + # if LANG_CXX11 <nl> + void set_name ( : : std : : string & & value ) ; <nl> + # endif <nl> + void set_name ( const char * value ) ; <nl> + void set_name ( const char * value , size_t size ) ; <nl> + : : std : : string * mutable_name ( ) ; <nl> + : : std : : string * release_name ( ) ; <nl> + void set_allocated_name ( : : std : : string * name ) ; <nl> + <nl> + / / @ @ protoc_insertion_point ( class_scope : helloworld . HelloRequest ) <nl> + private : <nl> + <nl> + : : google : : protobuf : : internal : : InternalMetadataWithArena _internal_metadata_ ; <nl> + : : google : : protobuf : : internal : : ArenaStringPtr name_ ; <nl> + mutable : : google : : protobuf : : internal : : CachedSize _cached_size_ ; <nl> + friend struct : : protobuf_helloworld_2eproto : : TableStruct ; <nl> + } ; <nl> + / / mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + <nl> + class HelloReply : public : : google : : protobuf : : Message / * @ @ protoc_insertion_point ( class_definition : helloworld . HelloReply ) * / { <nl> + public : <nl> + HelloReply ( ) ; <nl> + virtual ~ HelloReply ( ) ; <nl> + <nl> + HelloReply ( const HelloReply & from ) ; <nl> + <nl> + inline HelloReply & operator = ( const HelloReply & from ) { <nl> + CopyFrom ( from ) ; <nl> + return * this ; <nl> + } <nl> + # if LANG_CXX11 <nl> + HelloReply ( HelloReply & & from ) noexcept <nl> + : HelloReply ( ) { <nl> + * this = : : std : : move ( from ) ; <nl> + } <nl> + <nl> + inline HelloReply & operator = ( HelloReply & & from ) noexcept { <nl> + if ( GetArenaNoVirtual ( ) = = from . GetArenaNoVirtual ( ) ) { <nl> + if ( this ! = & from ) InternalSwap ( & from ) ; <nl> + } else { <nl> + CopyFrom ( from ) ; <nl> + } <nl> + return * this ; <nl> + } <nl> + # endif <nl> + static const : : google : : protobuf : : Descriptor * descriptor ( ) ; <nl> + static const HelloReply & default_instance ( ) ; <nl> + <nl> + static void InitAsDefaultInstance ( ) ; / / FOR INTERNAL USE ONLY <nl> + static inline const HelloReply * internal_default_instance ( ) { <nl> + return reinterpret_cast < const HelloReply * > ( <nl> + & _HelloReply_default_instance_ ) ; <nl> + } <nl> + static constexpr int kIndexInFileMessages = <nl> + 1 ; <nl> + <nl> + void Swap ( HelloReply * other ) ; <nl> + friend void swap ( HelloReply & a , HelloReply & b ) { <nl> + a . Swap ( & b ) ; <nl> + } <nl> + <nl> + / / implements Message mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + <nl> + inline HelloReply * New ( ) const final { <nl> + return CreateMaybeMessage < HelloReply > ( NULL ) ; <nl> + } <nl> + <nl> + HelloReply * New ( : : google : : protobuf : : Arena * arena ) const final { <nl> + return CreateMaybeMessage < HelloReply > ( arena ) ; <nl> + } <nl> + void CopyFrom ( const : : google : : protobuf : : Message & from ) final ; <nl> + void MergeFrom ( const : : google : : protobuf : : Message & from ) final ; <nl> + void CopyFrom ( const HelloReply & from ) ; <nl> + void MergeFrom ( const HelloReply & from ) ; <nl> + void Clear ( ) final ; <nl> + bool IsInitialized ( ) const final ; <nl> + <nl> + size_t ByteSizeLong ( ) const final ; <nl> + bool MergePartialFromCodedStream ( <nl> + : : google : : protobuf : : io : : CodedInputStream * input ) final ; <nl> + void SerializeWithCachedSizes ( <nl> + : : google : : protobuf : : io : : CodedOutputStream * output ) const final ; <nl> + : : google : : protobuf : : uint8 * InternalSerializeWithCachedSizesToArray ( <nl> + bool deterministic , : : google : : protobuf : : uint8 * target ) const final ; <nl> + int GetCachedSize ( ) const final { return _cached_size_ . Get ( ) ; } <nl> + <nl> + private : <nl> + void SharedCtor ( ) ; <nl> + void SharedDtor ( ) ; <nl> + void SetCachedSize ( int size ) const final ; <nl> + void InternalSwap ( HelloReply * other ) ; <nl> + private : <nl> + inline : : google : : protobuf : : Arena * GetArenaNoVirtual ( ) const { <nl> + return NULL ; <nl> + } <nl> + inline void * MaybeArenaPtr ( ) const { <nl> + return NULL ; <nl> + } <nl> + public : <nl> + <nl> + : : google : : protobuf : : Metadata GetMetadata ( ) const final ; <nl> + <nl> + / / nested types mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + <nl> + / / accessors mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + <nl> + / / string message = 1 ; <nl> + void clear_message ( ) ; <nl> + static const int kMessageFieldNumber = 1 ; <nl> + const : : std : : string & message ( ) const ; <nl> + void set_message ( const : : std : : string & value ) ; <nl> + # if LANG_CXX11 <nl> + void set_message ( : : std : : string & & value ) ; <nl> + # endif <nl> + void set_message ( const char * value ) ; <nl> + void set_message ( const char * value , size_t size ) ; <nl> + : : std : : string * mutable_message ( ) ; <nl> + : : std : : string * release_message ( ) ; <nl> + void set_allocated_message ( : : std : : string * message ) ; <nl> + <nl> + / / @ @ protoc_insertion_point ( class_scope : helloworld . HelloReply ) <nl> + private : <nl> + <nl> + : : google : : protobuf : : internal : : InternalMetadataWithArena _internal_metadata_ ; <nl> + : : google : : protobuf : : internal : : ArenaStringPtr message_ ; <nl> + mutable : : google : : protobuf : : internal : : CachedSize _cached_size_ ; <nl> + friend struct : : protobuf_helloworld_2eproto : : TableStruct ; <nl> + } ; <nl> + / / = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = <nl> + <nl> + <nl> + / / = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = <nl> + <nl> + # ifdef __GNUC__ <nl> + # pragma GCC diagnostic push <nl> + # pragma GCC diagnostic ignored " - Wstrict - aliasing " <nl> + # endif / / __GNUC__ <nl> + / / HelloRequest <nl> + <nl> + / / string name = 1 ; <nl> + inline void HelloRequest : : clear_name ( ) { <nl> + name_ . ClearToEmptyNoArena ( & : : google : : protobuf : : internal : : GetEmptyStringAlreadyInited ( ) ) ; <nl> + } <nl> + inline const : : std : : string & HelloRequest : : name ( ) const { <nl> + / / @ @ protoc_insertion_point ( field_get : helloworld . HelloRequest . name ) <nl> + return name_ . GetNoArena ( ) ; <nl> + } <nl> + inline void HelloRequest : : set_name ( const : : std : : string & value ) { <nl> + <nl> + name_ . SetNoArena ( & : : google : : protobuf : : internal : : GetEmptyStringAlreadyInited ( ) , value ) ; <nl> + / / @ @ protoc_insertion_point ( field_set : helloworld . HelloRequest . name ) <nl> + } <nl> + # if LANG_CXX11 <nl> + inline void HelloRequest : : set_name ( : : std : : string & & value ) { <nl> + <nl> + name_ . SetNoArena ( <nl> + & : : google : : protobuf : : internal : : GetEmptyStringAlreadyInited ( ) , : : std : : move ( value ) ) ; <nl> + / / @ @ protoc_insertion_point ( field_set_rvalue : helloworld . HelloRequest . name ) <nl> + } <nl> + # endif <nl> + inline void HelloRequest : : set_name ( const char * value ) { <nl> + GOOGLE_DCHECK ( value ! = NULL ) ; <nl> + <nl> + name_ . SetNoArena ( & : : google : : protobuf : : internal : : GetEmptyStringAlreadyInited ( ) , : : std : : string ( value ) ) ; <nl> + / / @ @ protoc_insertion_point ( field_set_char : helloworld . HelloRequest . name ) <nl> + } <nl> + inline void HelloRequest : : set_name ( const char * value , size_t size ) { <nl> + <nl> + name_ . SetNoArena ( & : : google : : protobuf : : internal : : GetEmptyStringAlreadyInited ( ) , <nl> + : : std : : string ( reinterpret_cast < const char * > ( value ) , size ) ) ; <nl> + / / @ @ protoc_insertion_point ( field_set_pointer : helloworld . HelloRequest . name ) <nl> + } <nl> + inline : : std : : string * HelloRequest : : mutable_name ( ) { <nl> + <nl> + / / @ @ protoc_insertion_point ( field_mutable : helloworld . HelloRequest . name ) <nl> + return name_ . MutableNoArena ( & : : google : : protobuf : : internal : : GetEmptyStringAlreadyInited ( ) ) ; <nl> + } <nl> + inline : : std : : string * HelloRequest : : release_name ( ) { <nl> + / / @ @ protoc_insertion_point ( field_release : helloworld . HelloRequest . name ) <nl> + <nl> + return name_ . ReleaseNoArena ( & : : google : : protobuf : : internal : : GetEmptyStringAlreadyInited ( ) ) ; <nl> + } <nl> + inline void HelloRequest : : set_allocated_name ( : : std : : string * name ) { <nl> + if ( name ! = NULL ) { <nl> + <nl> + } else { <nl> + <nl> + } <nl> + name_ . SetAllocatedNoArena ( & : : google : : protobuf : : internal : : GetEmptyStringAlreadyInited ( ) , name ) ; <nl> + / / @ @ protoc_insertion_point ( field_set_allocated : helloworld . HelloRequest . name ) <nl> + } <nl> + <nl> + / / mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + <nl> + / / HelloReply <nl> + <nl> + / / string message = 1 ; <nl> + inline void HelloReply : : clear_message ( ) { <nl> + message_ . ClearToEmptyNoArena ( & : : google : : protobuf : : internal : : GetEmptyStringAlreadyInited ( ) ) ; <nl> + } <nl> + inline const : : std : : string & HelloReply : : message ( ) const { <nl> + / / @ @ protoc_insertion_point ( field_get : helloworld . HelloReply . message ) <nl> + return message_ . GetNoArena ( ) ; <nl> + } <nl> + inline void HelloReply : : set_message ( const : : std : : string & value ) { <nl> + <nl> + message_ . SetNoArena ( & : : google : : protobuf : : internal : : GetEmptyStringAlreadyInited ( ) , value ) ; <nl> + / / @ @ protoc_insertion_point ( field_set : helloworld . HelloReply . message ) <nl> + } <nl> + # if LANG_CXX11 <nl> + inline void HelloReply : : set_message ( : : std : : string & & value ) { <nl> + <nl> + message_ . SetNoArena ( <nl> + & : : google : : protobuf : : internal : : GetEmptyStringAlreadyInited ( ) , : : std : : move ( value ) ) ; <nl> + / / @ @ protoc_insertion_point ( field_set_rvalue : helloworld . HelloReply . message ) <nl> + } <nl> + # endif <nl> + inline void HelloReply : : set_message ( const char * value ) { <nl> + GOOGLE_DCHECK ( value ! = NULL ) ; <nl> + <nl> + message_ . SetNoArena ( & : : google : : protobuf : : internal : : GetEmptyStringAlreadyInited ( ) , : : std : : string ( value ) ) ; <nl> + / / @ @ protoc_insertion_point ( field_set_char : helloworld . HelloReply . message ) <nl> + } <nl> + inline void HelloReply : : set_message ( const char * value , size_t size ) { <nl> + <nl> + message_ . SetNoArena ( & : : google : : protobuf : : internal : : GetEmptyStringAlreadyInited ( ) , <nl> + : : std : : string ( reinterpret_cast < const char * > ( value ) , size ) ) ; <nl> + / / @ @ protoc_insertion_point ( field_set_pointer : helloworld . HelloReply . message ) <nl> + } <nl> + inline : : std : : string * HelloReply : : mutable_message ( ) { <nl> + <nl> + / / @ @ protoc_insertion_point ( field_mutable : helloworld . HelloReply . message ) <nl> + return message_ . MutableNoArena ( & : : google : : protobuf : : internal : : GetEmptyStringAlreadyInited ( ) ) ; <nl> + } <nl> + inline : : std : : string * HelloReply : : release_message ( ) { <nl> + / / @ @ protoc_insertion_point ( field_release : helloworld . HelloReply . message ) <nl> + <nl> + return message_ . ReleaseNoArena ( & : : google : : protobuf : : internal : : GetEmptyStringAlreadyInited ( ) ) ; <nl> + } <nl> + inline void HelloReply : : set_allocated_message ( : : std : : string * message ) { <nl> + if ( message ! = NULL ) { <nl> + <nl> + } else { <nl> + <nl> + } <nl> + message_ . SetAllocatedNoArena ( & : : google : : protobuf : : internal : : GetEmptyStringAlreadyInited ( ) , message ) ; <nl> + / / @ @ protoc_insertion_point ( field_set_allocated : helloworld . HelloReply . message ) <nl> + } <nl> + <nl> + # ifdef __GNUC__ <nl> + # pragma GCC diagnostic pop <nl> + # endif / / __GNUC__ <nl> + / / mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + <nl> + <nl> + / / @ @ protoc_insertion_point ( namespace_scope ) <nl> + <nl> + } / / namespace helloworld <nl> + <nl> + / / @ @ protoc_insertion_point ( global_scope ) <nl> + <nl> + # endif / / PROTOBUF_INCLUDED_helloworld_2eproto <nl> new file mode 100644 <nl> index 00000000000 . . 85671e8aaa3 <nl> Binary files / dev / null and b / examples / cpp / metadata / helloworld . pb . o differ <nl>
Metadata tutorial
grpc/grpc
63e73e428fdfaf940074bb59753cd26ad24d3b49
2018-12-14T00:51:58Z
mmm a / android / sdk / src / main / java / com / taobao / weex / bridge / WXBridgeManager . java <nl> ppp b / android / sdk / src / main / java / com / taobao / weex / bridge / WXBridgeManager . java <nl> public void createInstance ( final String instanceId , final String template , <nl> return ; <nl> } <nl> <nl> - if ( ! isJSFrameworkInit ( ) & & reInitCount = = 1 ) { <nl> + if ( ! isJSFrameworkInit ( ) & & reInitCount = = 1 & & ! WXEnvironment . sDebugServerConnectable ) { <nl> instance . onRenderError ( WXRenderErrorCode . WX_CREATE_INSTANCE_ERROR , " createInstance fail ! " ) ; <nl> post ( new Runnable ( ) { <nl> @ Override <nl>
* [ android ] exception opt , on debug apk connecting debug server will hold on and wait initframework , on release will return and use H5 render soon
apache/incubator-weex
6e413271d3e07775c6a9dcc189ee6404327fefd0
2017-10-25T13:26:25Z
mmm a / tools / autograd / gen_python_functions . py <nl> ppp b / tools / autograd / gen_python_functions . py <nl> <nl> # <nl> <nl> from collections import defaultdict <nl> + import itertools <nl> import re <nl> - from . gen_variable_type import should_trace <nl> + from . gen_variable_type import DONT_RECORD_TRACE <nl> from . utils import write , is_tensor_method <nl> <nl> from tools . codegen . code_template import CodeTemplate <nl> <nl> from tools . codegen . gen import cpp_string , with_native_function <nl> from tools . codegen . model import * <nl> <nl> - from typing import Dict , Optional , List , Any <nl> + from typing import Dict , Optional , List , Any , Tuple , Set <nl> <nl> # <nl> # declarations blocklist <nl> <nl> ' div ( Tensor , Scalar ) ' , ' div_ ( Tensor , Scalar ) ' , <nl> ] <nl> <nl> - NATIVE_NAMESPACE_MAPPING = { <nl> - " torch " : " THPVariableFunctionsModule " , <nl> - " torch . nn " : " THPNNVariableFunctionsModule " , <nl> - " torch . fft " : " THPFFTVariableFunctionsModule " , <nl> - " torch . linalg " : " THPLinalgVariableFunctionsModule " , <nl> - } <nl> - <nl> def should_generate_python_binding ( declaration ) : <nl> name = declaration [ ' name ' ] <nl> for pattern in SKIP_PYTHON_BINDINGS : <nl> def group_declarations_by_op_name ( declarations ) : <nl> <nl> def create_python_bindings ( python_functions , is_python_method , module ) : <nl> " " " Generates Python bindings to ATen functions " " " <nl> - py_methods = [ ] <nl> - py_method_defs = [ ] <nl> - py_forwards = [ ] <nl> + py_methods : List [ str ] = [ ] <nl> + py_method_defs : List [ str ] = [ ] <nl> + py_forwards : List [ str ] = [ ] <nl> <nl> for name in sorted ( python_functions . keys ( ) ) : <nl> - overload_decls = python_functions [ name ] <nl> - <nl> - for declaration in overload_decls : <nl> - # TODO : change all methods to directly process python signatures instead of decls . <nl> - declaration [ ' python_signature ' ] = decl_to_python_signature ( declaration , method = is_python_method ) <nl> - declaration [ ' native_function ' ] = decl_to_native_function ( declaration ) <nl> - <nl> - py_methods . append ( method_impl ( name , overload_decls , is_python_method , module ) ) <nl> - py_method_defs . append ( method_def ( name , overload_decls , is_python_method , module ) ) <nl> - py_forwards . extend ( forward_decls ( name , overload_decls , is_python_method , module ) ) <nl> + overloads = list ( decl_to_signature_function_pair ( decl , method = is_python_method ) <nl> + for decl in python_functions [ name ] ) <nl> + py_methods . append ( method_impl ( name , module , overloads , method = is_python_method ) ) <nl> + py_method_defs . append ( method_def ( name , module , overloads , method = is_python_method ) ) <nl> + py_forwards . extend ( forward_decls ( name , overloads , method = is_python_method ) ) <nl> <nl> return { <nl> ' py_forwards ' : py_forwards , <nl> def create_python_bindings ( python_functions , is_python_method , module ) : <nl> ' py_method_defs ' : py_method_defs , <nl> } <nl> <nl> + # <nl> + # declaration derived props , utils , etc . <nl> + # declarations are dicts loaded from Declarations . yaml , <nl> + # passed to our codegen methods by callers in gen_autograd <nl> + # <nl> <nl> - # handler for output / no - output overload pair <nl> - # ( plugged into PY_VARIABLE_CASE as $ { call_dispatch } ) <nl> - PY_VARIABLE_OUT = CodeTemplate ( " " " \ <nl> - if ( _r . isNone ( $ { out_idx } ) ) { <nl> - $ { call_dispatch } <nl> - } else { <nl> - $ { call_dispatch_out } <nl> - } <nl> - " " " ) <nl> + def get_pycname ( name : str ) - > str : <nl> + return f ' THPVariable_ { name } ' <nl> <nl> - # handler for a single parsed signature - may be a single overload or <nl> - # a pair of overloads that whose signatures only differ in output params <nl> - PY_VARIABLE_CASE = CodeTemplate ( " " " \ <nl> - case $ { i } : { <nl> - $ { body } <nl> - } <nl> - " " " ) <nl> <nl> + def is_noarg ( overloads : Sequence [ PythonSignatureNativeFunctionPair ] ) - > bool : <nl> + return len ( overloads ) = = 1 and overloads [ 0 ] . signature . arguments_count ( ) = = 0 <nl> <nl> - def emit_dispatch_case ( i , dictionary , is_python_method ) : <nl> - " " " <nl> - Emit dispatch code for a single parsed signature . This corresponds to either <nl> - a single overload , or a pair that differ only in output params . In the latter <nl> - case , a single signature is used for both and dispatching switches on the <nl> - presence / absence of passed output args . <nl> - - i : this signature ' s position in generated binding ' s signature list if number of <nl> - signatures > 1 , otherwise None <nl> - - dictionary : contains a no - output overload declaration under ' base ' , and optionally <nl> - a second overload with outputs under ' out ' <nl> - - true if we ' re generating a python method , in which case self is not parsed but <nl> - passed directly <nl> - " " " <nl> - base_decl = dictionary [ ' base ' ] <nl> - python_sig = base_decl [ ' python_signature ' ] <nl> <nl> - if ' out ' in dictionary : <nl> - # dispatch to output or no - output variant based on arg test <nl> - out_decl = dictionary [ ' out ' ] <nl> - python_sig = out_decl [ ' python_signature ' ] # prefer output variant <nl> + def is_output ( arg ) : <nl> + return arg . get ( ' output ' , False ) <nl> + <nl> <nl> - out_idx = get_python_output_index ( out_decl ) <nl> + def has_outputs ( declaration ) : <nl> + return any ( [ is_output ( arg ) for arg in declaration [ ' arguments ' ] ] ) <nl> <nl> - call_dispatch = emit_single_dispatch ( python_sig , base_decl , is_python_method ) <nl> - call_dispatch_out = emit_single_dispatch ( python_sig , out_decl , is_python_method ) <nl> <nl> - # dispatch output and no - output variants , branch on _r . isNone ( < out_idx > ) <nl> - body = PY_VARIABLE_OUT . substitute ( <nl> - out_idx = out_idx , <nl> - call_dispatch = call_dispatch , <nl> - call_dispatch_out = call_dispatch_out , <nl> - ) <nl> - else : <nl> - # no - output version only <nl> - body = emit_single_dispatch ( python_sig , base_decl , is_python_method ) <nl> + def is_torch_function ( declaration ) : <nl> + return ' namespace ' in declaration [ ' method_of ' ] <nl> <nl> - if i is not None : <nl> - # generate case for ith overload <nl> - return PY_VARIABLE_CASE . substitute ( i = i , body = body ) <nl> + <nl> + def is_nn_module_function ( declaration ) : <nl> + return declaration . get ( ' python_module ' ) = = ' nn ' <nl> + <nl> + <nl> + def is_fft_module_function ( declaration ) : <nl> + return declaration . get ( ' python_module ' ) = = ' fft ' <nl> + <nl> + <nl> + def is_linalg_module_function ( declaration ) : <nl> + return declaration . get ( ' python_module ' ) = = ' linalg ' <nl> + <nl> + <nl> + def op_name ( declaration ) : <nl> + name = declaration [ ' name ' ] <nl> + if has_outputs ( declaration ) : <nl> + if not name . endswith ( " _out " ) : <nl> + raise RuntimeError ( <nl> + ' { } has output params , expecting name ending with \ ' _out \ ' ' . <nl> + format ( declaration [ ' name ' ] ) ) <nl> + return name [ : - 4 ] <nl> else : <nl> - # only one overload , omit case wrapper <nl> - return body <nl> + if name . endswith ( " _out " ) : <nl> + raise RuntimeError ( <nl> + ' { } : name ends with \ ' _out \ ' , expecting output params ' . <nl> + format ( declaration [ ' name ' ] ) ) <nl> + return name <nl> <nl> + # ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ # <nl> # <nl> - # named tuple codegen <nl> + # Named Tuple Codegen <nl> # <nl> + # ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ # <nl> <nl> - def namedtuple_fieldnames ( declaration ) : <nl> - returns = declaration [ ' returns ' ] <nl> - if len ( returns ) < = 1 or all ( [ ' field_name ' not in x for x in returns ] ) : <nl> + # TODO : remove the copy of this method in ' tools / pyi / gen_pyi . py ' . <nl> + @ with_native_function <nl> + def namedtuple_fieldnames ( f : NativeFunction ) - > List [ str ] : <nl> + returns = f . func . returns <nl> + if len ( returns ) < = 1 or all ( map ( lambda r : r . name is None , returns ) ) : <nl> return [ ] <nl> else : <nl> - def get_field_name ( x ) : <nl> - # See Note [ field_name versus name ] <nl> - if ' field_name ' not in x : <nl> - # When building on Windows , ` PyStructSequence_UnnamedField ` could not be <nl> - # resolved by the linker for some reason , which cause error in building : <nl> - # <nl> - # python_nn_functions . cpp . obj : error LNK2001 : unresolved external symbol <nl> - # PyStructSequence_UnnamedField <nl> - # <nl> - # Thus , at this point in time , we do not support unnamed <nl> - # fields in namedtuple ; you must either name all fields , <nl> - # or none of them . <nl> - raise ValueError ( " Unnamed field is not supported by codegen " ) <nl> - else : <nl> - return x [ ' field_name ' ] <nl> - return [ get_field_name ( x ) for x in returns ] <nl> - <nl> - PY_NAMEDTUPLE_FIELDSDEF = CodeTemplate ( " " " \ <nl> - static PyStructSequence_Field $ { fieldsname } [ ] = { $ { fields , } { nullptr } } ; <nl> - " " " ) <nl> - <nl> - PY_NAMEDTUPLE_TYPEDEF = CodeTemplate ( " " " \ <nl> - static PyTypeObject $ { typename } ; <nl> - static bool $ { typename } _initialized = false ; <nl> - if ( ! $ { typename } _initialized ) { <nl> - $ { typename } _initialized = true ; <nl> - static PyStructSequence_Desc desc = { " torch . return_types . $ { name } " , nullptr , $ { fieldsname } , $ { size } } ; <nl> - PyStructSequence_InitType ( & $ { typename } , & desc ) ; <nl> - $ { typename } . tp_repr = ( reprfunc ) torch : : utils : : returned_structseq_repr ; <nl> - } <nl> - " " " ) <nl> - <nl> - <nl> - def emit_namedtuple_typedefs ( declarations ) : <nl> + if any ( map ( lambda r : r . name is None , returns ) ) : <nl> + # When building on Windows , ` PyStructSequence_UnnamedField ` could not be <nl> + # resolved by the linker for some reason , which cause error in building : <nl> + # <nl> + # python_nn_functions . cpp . obj : error LNK2001 : unresolved external symbol <nl> + # PyStructSequence_UnnamedField <nl> + # <nl> + # Thus , at this point in time , we do not support unnamed <nl> + # fields in namedtuple ; you must either name all fields , <nl> + # or none of them . <nl> + raise ValueError ( " Unnamed field is not supported by codegen " ) <nl> + <nl> + return list ( map ( lambda r : str ( r . name ) , returns ) ) <nl> + <nl> + @ with_native_function <nl> + def gen_namedtuple_typename_key ( f : NativeFunction ) - > str : <nl> + name = cpp . name ( f . func ) <nl> + fieldnames = namedtuple_fieldnames ( f ) <nl> + return ' _ ' . join ( [ name ] + fieldnames ) <nl> + <nl> + def emit_namedtuple_typedefs ( <nl> + overloads : Sequence [ PythonSignatureNativeFunctionPair ] <nl> + ) - > Tuple [ List [ str ] , Dict [ str , str ] ] : <nl> " " " <nl> Generate block of named tuple type def inits , and add typeref snippets <nl> to declarations that use them <nl> " " " <nl> - flddefnames = { } # map from unique field name lists to field def name <nl> - flddefs = [ ] # field def declarations <nl> - typenames = { } # map from unique name + field name lists to typedef name <nl> - typedefs = [ ] # typedef declarations and init code <nl> - <nl> - for decl in declarations : <nl> - fieldnames = namedtuple_fieldnames ( decl ) <nl> - if fieldnames = = [ ] : <nl> - decl [ ' namedtuple_typeref ' ] = ' ' <nl> + flddefnames : Dict [ str , str ] = { } # map from unique field name lists to field def name <nl> + flddefs : List [ str ] = [ ] # field def declarations <nl> + typenames : Dict [ str , str ] = { } # map from unique name + field name lists to typedef name <nl> + typedefs : List [ str ] = [ ] # typedef declarations and init code <nl> + <nl> + for overload in overloads : <nl> + fieldnames = namedtuple_fieldnames ( overload . function ) <nl> + if not fieldnames : <nl> continue <nl> <nl> fn_key = ' _ ' . join ( fieldnames ) <nl> fieldsname = flddefnames . get ( fn_key ) <nl> if fieldsname is None : <nl> - fieldsname = ' NamedTuple_fields { } ' . format ( ' ' if flddefs = = [ ] else len ( flddefs ) ) <nl> - fields = [ ' { { " { } " , " " } } ' . format ( fn ) for fn in fieldnames ] <nl> - fieldsdef = PY_NAMEDTUPLE_FIELDSDEF . substitute ( <nl> - fieldsname = fieldsname , <nl> - fields = fields <nl> - ) <nl> + fieldsname = f ' NamedTuple_fields { " " if not flddefs else len ( flddefs ) } ' <nl> flddefnames [ fn_key ] = fieldsname <nl> - flddefs . append ( fieldsdef ) <nl> + fields = ' , ' . join ( f ' { { " { fn } " , " " } } ' for fn in fieldnames ) <nl> + flddefs . append ( f " " " \ <nl> + static PyStructSequence_Field { fieldsname } [ ] = { { { fields } , { { nullptr } } } } ; <nl> + " " " ) <nl> <nl> - name = decl [ ' name ' ] <nl> - key = ' { } _ { } ' . format ( name , ' _ ' . join ( fieldnames ) ) <nl> - typename = typenames . get ( key ) <nl> + name = cpp . name ( overload . function . func ) # use @ with_native_function ? <nl> + tn_key = gen_namedtuple_typename_key ( overload . function ) <nl> + typename = typenames . get ( tn_key ) <nl> if typename is None : <nl> - typename = ' NamedTuple { } ' . format ( ' ' if typedefs = = [ ] else len ( typedefs ) ) <nl> - typedef = PY_NAMEDTUPLE_TYPEDEF . substitute ( <nl> - name = name , <nl> - typename = typename , <nl> - size = len ( fieldnames ) , <nl> - fieldsname = fieldsname <nl> - ) <nl> - typenames [ key ] = typename <nl> - typedefs . append ( typedef ) <nl> - <nl> - decl [ ' namedtuple_typeref ' ] = ' & { } , ' . format ( typename ) <nl> + typename = f ' NamedTuple { " " if not typedefs else len ( typedefs ) } ' <nl> + typenames [ tn_key ] = typename <nl> + typedefs . append ( f " " " \ <nl> + static PyTypeObject { typename } ; <nl> + static bool { typename } _initialized = false ; <nl> + if ( ! { typename } _initialized ) { { <nl> + { typename } _initialized = true ; <nl> + static PyStructSequence_Desc desc = { { " torch . return_types . { name } " , nullptr , { fieldsname } , { len ( fieldnames ) } } } ; <nl> + PyStructSequence_InitType ( & { typename } , & desc ) ; <nl> + { typename } . tp_repr = ( reprfunc ) torch : : utils : : returned_structseq_repr ; <nl> + } } <nl> + " " " ) <nl> <nl> - return flddefs + typedefs <nl> + return flddefs + typedefs , typenames <nl> <nl> + # ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ # <nl> # <nl> - # method impl codegen <nl> + # Method Impl Codegen <nl> # <nl> - <nl> - def get_pycname ( name ) : <nl> - return ' THPVariable_ { } ' . format ( name ) <nl> - <nl> - <nl> - def is_noarg_binding ( overloads ) : <nl> - return len ( overloads ) = = 1 and get_python_argc ( overloads [ 0 ] ) = = 0 <nl> - <nl> + # ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ # <nl> <nl> # python binding for all overloads of a particular function / method <nl> PY_VARIABLE_METHOD_VARARGS = CodeTemplate ( r " " " \ <nl> def is_noarg_binding ( overloads ) : <nl> <nl> " " " ) <nl> <nl> + # handler for a single parsed signature - may be a single overload or <nl> + # a pair of overloads that whose signatures only differ in output params <nl> + # ( plugged into PY_VARIABLE_METHOD_VARARGS as an item in $ { dispatch } ) <nl> + PY_VARIABLE_CASE = CodeTemplate ( " " " \ <nl> + case $ { overload_index } : { <nl> + $ { body } <nl> + } <nl> + " " " ) <nl> + <nl> # python binding for single - overload function / method <nl> PY_VARIABLE_METHOD_VARARGS_SINGLETON = CodeTemplate ( " " " \ <nl> / / $ { name } <nl> def is_noarg_binding ( overloads ) : <nl> <nl> " " " ) <nl> <nl> - TORCH_FUNCTION_CHECK = CodeTemplate ( " " " \ <nl> - if ( _r . has_torch_function ( ) ) { <nl> - return handle_torch_function ( _r , $ { self_ } , args , kwargs , $ { namespace } , $ { modulename } ) ; <nl> - } <nl> - " " " ) <nl> - <nl> - TORCH_FUNCTION_CHECK_NOARGS = CodeTemplate ( " " " \ <nl> - if ( check_has_torch_function ( self_ ) ) { <nl> - return handle_torch_function ( self_ , $ { name } ) ; <nl> - } <nl> - " " " ) <nl> - <nl> - # NOTE : we type the unpacked self as Tensor not Variable to avoid return type <nl> - # discrepancies on method resolution ( e . g . Variable : : detach_ returns void <nl> - # rather than Tensor & ) <nl> - UNPACK_SELF = " Tensor & self = reinterpret_cast < THPVariable * > ( self_ ) - > cdata ; " <nl> - <nl> - <nl> - def method_impl ( name , declarations , is_python_method , module ) : <nl> + def method_impl ( <nl> + name : str , <nl> + module : str , <nl> + overloads : Sequence [ PythonSignatureNativeFunctionPair ] , <nl> + * , <nl> + method : bool <nl> + ) - > str : <nl> " " " <nl> Generate a python binding for all overloads of an op . <nl> " " " <nl> pycname = get_pycname ( name ) <nl> + noarg = is_noarg ( overloads ) <nl> + namedtuple_inits , namedtuple_typenames = emit_namedtuple_typedefs ( overloads ) <nl> <nl> method_header = [ ' HANDLE_TH_ERRORS ' ] <nl> - method_header + = emit_namedtuple_typedefs ( declarations ) <nl> - method_header + = [ UNPACK_SELF ] if is_python_method else [ ] <nl> + method_header + = namedtuple_inits <nl> + method_header + = [ <nl> + " Tensor & self = reinterpret_cast < THPVariable * > ( self_ ) - > cdata ; " <nl> + ] if method else [ ] <nl> <nl> - method_footer = [ ' END_HANDLE_TH_ERRORS ' ] <nl> + method_footer = ( [ ] if noarg else [ ' Py_RETURN_NONE ; ' ] ) + [ ' END_HANDLE_TH_ERRORS ' ] <nl> <nl> - check_has_torch_function = TORCH_FUNCTION_CHECK_NOARGS . substitute ( <nl> - name = ' " ' + name + ' " ' , <nl> - ) if is_python_method else ' ' <nl> + traceable = ' true ' if all ( should_trace ( o . function ) for o in overloads ) else ' false ' <nl> <nl> - # emit dispatch <nl> - if is_noarg_binding ( declarations ) : <nl> - python_sig = declarations [ 0 ] [ ' python_signature ' ] <nl> - dispatch = emit_single_dispatch ( python_sig , declarations [ 0 ] , is_python_method ) <nl> - return PY_VARIABLE_METHOD_NOARGS . substitute ( <nl> - name = name , <nl> - pycname = pycname , <nl> - method_header = method_header , <nl> - dispatch = dispatch , <nl> - method_footer = method_footer , <nl> - check_has_torch_function = check_has_torch_function , <nl> - ) <nl> - <nl> - method_footer = [ ' Py_RETURN_NONE ; ' ] + method_footer <nl> - <nl> - grouped = group_overloads ( declarations , is_python_method ) <nl> - is_singleton = len ( grouped ) = = 1 <nl> - <nl> - signatures = [ ] <nl> - dispatch = [ ] <nl> - for i , dictionary in enumerate ( grouped ) : <nl> - signature = dictionary [ ' signature ' ] <nl> + grouped_overloads : Sequence [ PythonSignatureGroup ] = group_overloads ( overloads ) <nl> + is_singleton = len ( grouped_overloads ) = = 1 <nl> + signatures : List [ str ] = [ ] <nl> + dispatch : List [ str ] = [ ] <nl> + for overload_index , overload in enumerate ( grouped_overloads ) : <nl> + signature = overload . signature . signature_str ( ) <nl> signatures . append ( f ' { cpp_string ( str ( signature ) ) } , ' ) <nl> - overload_index = i if not is_singleton else None <nl> - dispatch . append ( emit_dispatch_case ( overload_index , dictionary , is_python_method ) ) <nl> - <nl> - if is_singleton : <nl> + dispatch_body = emit_dispatch_case ( overload , namedtuple_typenames ) <nl> + dispatch . append ( <nl> + PY_VARIABLE_CASE . substitute ( overload_index = overload_index , body = dispatch_body ) <nl> + if not is_singleton else dispatch_body ) <nl> + <nl> + if noarg : <nl> + template = PY_VARIABLE_METHOD_NOARGS <nl> + elif is_singleton : <nl> template = PY_VARIABLE_METHOD_VARARGS_SINGLETON <nl> else : <nl> template = PY_VARIABLE_METHOD_VARARGS <nl> <nl> - if module : <nl> - check_has_torch_function = TORCH_FUNCTION_CHECK . substitute ( <nl> - namespace = NATIVE_NAMESPACE_MAPPING [ module ] , <nl> - modulename = ' " ' + module + ' " ' , <nl> - self_ = " self_ " if is_python_method else " nullptr " , <nl> - ) <nl> - else : <nl> - check_has_torch_function = TORCH_FUNCTION_CHECK . substitute ( <nl> - namespace = " THPVariableClass " , <nl> - modulename = ' " torch . Tensor " ' , <nl> - self_ = " self_ " if is_python_method else " nullptr " , <nl> - ) <nl> - <nl> - max_args = max ( [ get_python_argc ( decl ) for decl in declarations ] ) <nl> - traceable = ' true ' if all ( should_trace ( d ) for d in declarations ) else ' false ' <nl> - <nl> return template . substitute ( <nl> name = name , <nl> pycname = pycname , <nl> method_header = method_header , <nl> - max_args = max_args , <nl> + max_args = max ( map ( lambda o : o . signature . arguments_count ( ) , overloads ) ) , <nl> signatures = signatures , <nl> traceable = traceable , <nl> - check_has_torch_function = check_has_torch_function , <nl> + check_has_torch_function = gen_has_torch_function_check ( <nl> + name = name , <nl> + module = module , <nl> + noarg = noarg , <nl> + method = method , <nl> + ) , <nl> dispatch = dispatch , <nl> method_footer = method_footer , <nl> - self_ = " self_ " if is_python_method else " nullptr " , <nl> + self_ = " self_ " if method else " nullptr " , <nl> ) <nl> <nl> + def gen_has_torch_function_check ( name : str , module : str , * , noarg : bool , method : bool ) - > str : <nl> + if noarg : <nl> + if method : <nl> + return f " " " \ <nl> + if ( check_has_torch_function ( self_ ) ) { { <nl> + return handle_torch_function ( self_ , " { name } " ) ; <nl> + } } <nl> + " " " <nl> + else : <nl> + return ' ' <nl> + <nl> + self_ = " self_ " if method else " nullptr " <nl> + namespace = { <nl> + " torch " : " THPVariableFunctionsModule " , <nl> + " torch . nn " : " THPNNVariableFunctionsModule " , <nl> + " torch . fft " : " THPFFTVariableFunctionsModule " , <nl> + " torch . linalg " : " THPLinalgVariableFunctionsModule " , <nl> + } [ module ] if module else " THPVariableClass " <nl> + <nl> + return f " " " \ <nl> + if ( _r . has_torch_function ( ) ) { { <nl> + return handle_torch_function ( _r , { self_ } , args , kwargs , { namespace } , " { module or " torch . Tensor " } " ) ; <nl> + } } <nl> + " " " <nl> <nl> - # <nl> - # forward declarations <nl> - # <nl> - <nl> - PY_VARIABLE_FUNCTION_VARARGS_FORWARD_DECLARATION = CodeTemplate ( " " " \ <nl> - static PyObject * $ { pycname } ( PyObject * self_ , PyObject * args , PyObject * kwargs ) ; <nl> - " " " ) <nl> - <nl> - PY_VARIABLE_FUNCTION_NOARGS_FORWARD_DECLARATION = CodeTemplate ( " " " \ <nl> - static PyObject * $ { pycname } ( PyObject * self_ , PyObject * args ) ; <nl> + # handler for output / no - output overload pair <nl> + PY_VARIABLE_OUT = CodeTemplate ( " " " \ <nl> + if ( _r . isNone ( $ { out_idx } ) ) { <nl> + $ { call_dispatch } <nl> + } else { <nl> + $ { call_dispatch_out } <nl> + } <nl> " " " ) <nl> <nl> + def emit_dispatch_case ( <nl> + overload : PythonSignatureGroup , <nl> + namedtuple_typenames : Dict [ str , str ] , <nl> + ) - > str : <nl> + " " " <nl> + Emit dispatch code for a single parsed signature . This corresponds to either <nl> + a single native function , or a pair that differ only in output params . In the <nl> + latter case , a single python signature is used for both and dispatching <nl> + switches on the presence / absence of passed output args . <nl> + " " " <nl> + if overload . outplace is not None : <nl> + # dispatch output and no - output variants , branch on _r . isNone ( < out_idx > ) <nl> + return PY_VARIABLE_OUT . substitute ( <nl> + out_idx = overload . signature . output_idx ( ) , <nl> + call_dispatch = emit_single_dispatch ( <nl> + overload . signature , overload . base , namedtuple_typenames ) , <nl> + call_dispatch_out = emit_single_dispatch ( <nl> + overload . signature , overload . outplace , namedtuple_typenames ) , <nl> + ) <nl> + else : <nl> + # no - output version only <nl> + return emit_single_dispatch ( <nl> + overload . signature , overload . base , namedtuple_typenames ) <nl> + <nl> + # Copied from ' gen_variable_type . should_trace ( ) ' . <nl> + # TODO : consolidate after migrating autograd codegen . <nl> + @ with_native_function <nl> + def should_trace ( f : NativeFunction ) - > bool : <nl> + # Operations involving Storage or Type are not traceable at the moment <nl> + if any ( str ( arg . type ) in { ' Storage ' , ' Type ' , ' ConstQuantizerPtr ' } <nl> + for arg in f . func . schema_order_arguments ( ) ) : <nl> + return False <nl> + # We can ' t trace functions which don ' t have any Tensor or TensorList returns <nl> + if not any ( r . type . is_tensor_like ( ) for r in f . func . returns ) : <nl> + return False <nl> + name = cpp . name ( f . func ) <nl> + base_name = f . func . name . name . base <nl> + if base_name in DONT_RECORD_TRACE or name in DONT_RECORD_TRACE : <nl> + return False <nl> + return True <nl> <nl> - def forward_decls ( name , declarations , is_python_method , module ) : <nl> - if is_python_method : <nl> - return [ ] <nl> + # ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ # <nl> + # <nl> + # Forward Declarations Codegen <nl> + # <nl> + # ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ # <nl> <nl> - if is_noarg_binding ( declarations ) : <nl> - template = PY_VARIABLE_FUNCTION_NOARGS_FORWARD_DECLARATION <nl> - else : <nl> - template = PY_VARIABLE_FUNCTION_VARARGS_FORWARD_DECLARATION <nl> + def forward_decls ( <nl> + name : str , <nl> + overloads : Sequence [ PythonSignatureNativeFunctionPair ] , <nl> + * , <nl> + method : bool <nl> + ) - > Tuple [ str , . . . ] : <nl> + if method : <nl> + return ( ) <nl> <nl> pycname = get_pycname ( name ) <nl> - return [ template . substitute ( pycname = pycname ) ] <nl> - <nl> + if is_noarg ( overloads ) : <nl> + return ( f " " " \ <nl> + static PyObject * { pycname } ( PyObject * self_ , PyObject * args ) ; <nl> + " " " , ) <nl> + else : <nl> + return ( f " " " \ <nl> + static PyObject * { pycname } ( PyObject * self_ , PyObject * args , PyObject * kwargs ) ; <nl> + " " " , ) <nl> <nl> + # ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ # <nl> # <nl> - # method def ( binding table entry ) codegen <nl> + # Method Def ( Binding Table Entry ) Codegen <nl> # <nl> + # ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ # <nl> <nl> # Python binary operator dunder methods <nl> BINARY_OP_NAMES = [ <nl> def forward_decls ( name , declarations , is_python_method , module ) : <nl> ' __or__ ' , ' __ror__ ' , ' __ior__ ' , <nl> ] <nl> <nl> - # PyMethodDef entry for binary op , throws not implemented error <nl> - PY_VARIABLE_METHOD_BINOP_DEF = CodeTemplate ( " " " \ <nl> - { " $ { name } " , $ { pyfunc_cast } ( TypeError_to_NotImplemented_ < $ { pycname } > ) , $ { flags } , NULL } , " " " ) <nl> - <nl> - # PyMethodDef entry <nl> - PY_VARIABLE_METHOD_DEF = CodeTemplate ( " " " \ <nl> - { " $ { name } " , $ { pyfunc_cast } ( $ { pycname } ) , $ { flags } , NULL } , " " " ) <nl> - <nl> - <nl> - def method_def ( name , declarations , is_python_method , module ) : <nl> + def method_def ( <nl> + name : str , <nl> + module : str , <nl> + overloads : Sequence [ PythonSignatureNativeFunctionPair ] , <nl> + * , <nl> + method : bool <nl> + ) - > str : <nl> " " " <nl> Generate method def entry . <nl> " " " <nl> pycname = get_pycname ( name ) <nl> <nl> - if is_noarg_binding ( declarations ) : <nl> + if is_noarg ( overloads ) : <nl> pyfunc_cast = ' ' <nl> - flags = ' METH_NOARGS ' if is_python_method else ' METH_VARARGS | METH_KEYWORDS ' <nl> + flags = ' METH_NOARGS ' if method else ' METH_VARARGS | METH_KEYWORDS ' <nl> else : <nl> pyfunc_cast = ' castPyCFunctionWithKeywords ' <nl> flags = ' METH_VARARGS | METH_KEYWORDS ' <nl> def method_def ( name , declarations , is_python_method , module ) : <nl> flags + = ' | METH_STATIC ' <nl> <nl> if name in BINARY_OP_NAMES : <nl> - def_template = PY_VARIABLE_METHOD_BINOP_DEF <nl> + # PyMethodDef entry for binary op , throws not implemented error <nl> + return f " " " \ <nl> + { { " { name } " , { pyfunc_cast } ( TypeError_to_NotImplemented_ < { pycname } > ) , { flags } , NULL } } , " " " <nl> else : <nl> - def_template = PY_VARIABLE_METHOD_DEF <nl> - <nl> - return def_template . substitute ( <nl> - name = name , <nl> - pycname = pycname , <nl> - pyfunc_cast = pyfunc_cast , <nl> - flags = flags , <nl> - ) <nl> + # PyMethodDef entry <nl> + return f " " " \ <nl> + { { " { name } " , { pyfunc_cast } ( { pycname } ) , { flags } , NULL } } , " " " <nl> <nl> + # ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ # <nl> # <nl> - # overload sorting and grouping <nl> + # Overload Sorting and Grouping <nl> # <nl> + # ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ # <nl> <nl> - def group_overloads ( declarations , is_python_method ) : <nl> - " " " Returns a list of dictionaries containing the optional keys : <nl> - <nl> - " base " : the regular ATen declaration ( e . g . conv2d ) <nl> - " out " : the out variant ( e . g . conv2d_out ) <nl> - " signature " : the signature used for Python argument parsing <nl> - <nl> - Note that we merge pairs of declarations with signatures that <nl> - are equivalent mod output arguments , and use a single entry in <nl> - the python_arg_parser sig list for both ( output arguments become <nl> - optional ) <nl> - " " " <nl> - grouped = defaultdict ( dict ) <nl> + def group_overloads ( <nl> + overloads : Sequence [ PythonSignatureNativeFunctionPair ] <nl> + ) - > Sequence [ PythonSignatureGroup ] : <nl> + bases : Dict [ str , PythonSignatureNativeFunctionPair ] = { } <nl> + outplaces : Dict [ str , PythonSignatureNativeFunctionPair ] = { } <nl> <nl> # first group by signature ignoring out arguments <nl> - for declaration in declarations : <nl> - signature = get_python_signature ( declaration , is_python_method , skip_outputs = True ) <nl> - v = grouped [ signature ] <nl> - if declaration [ ' name ' ] . endswith ( ' _out ' ) : <nl> - v [ ' out ' ] = declaration <nl> - # prefer the signature with optional out = . . . arguments <nl> - v [ ' signature ' ] = get_python_signature ( declaration , is_python_method ) <nl> + for overload in overloads : <nl> + sig = overload . signature . signature_str ( skip_outputs = True ) <nl> + if overload . function . func . is_out_fn ( ) : <nl> + if sig in outplaces : <nl> + raise RuntimeError ( <nl> + f ' Found duplicated function definition : \ n - { overload . function . func } . \ n ' <nl> + f ' Existing definition : \ n - { outplaces [ sig ] . function . func } . ' <nl> + ) <nl> + outplaces [ sig ] = overload <nl> else : <nl> - v [ ' base ' ] = declaration <nl> - if ' signature ' not in v : <nl> - v [ ' signature ' ] = signature <nl> - <nl> - result = [ ] <nl> - for x , dictionary in sorted ( grouped . items ( ) ) : <nl> - if ' base ' not in dictionary : <nl> - candidates = [ ] <nl> - non_out_name = dictionary [ ' out ' ] [ ' operator_name ' ] <nl> - for declaration in declarations : <nl> - if declaration [ ' name ' ] = = non_out_name and not declaration [ ' deprecated ' ] : <nl> - signature = get_python_signature ( declaration , is_python_method , skip_outputs = True ) <nl> - candidates . append ( signature ) <nl> + if sig in bases : <nl> + raise RuntimeError ( <nl> + f ' Found duplicated function definition : \ n - { overload . function . func } . \ n ' <nl> + f ' Existing definition : \ n - { bases [ sig ] . function . func } . ' <nl> + ) <nl> + bases [ sig ] = overload <nl> + <nl> + for sig , out in outplaces . items ( ) : <nl> + if sig not in bases : <nl> + candidates : List [ str ] = [ ] <nl> + for overload in overloads : <nl> + if str ( overload . function . func . name . name ) = = str ( out . function . func . name . name ) \ <nl> + and not overload . function . func . is_out_fn ( ) \ <nl> + and not overload . signature . deprecated : <nl> + candidates . append ( overload . signature . signature_str ( skip_outputs = True ) ) <nl> + out_sig = out . signature . signature_str ( ) <nl> raise RuntimeError ( <nl> - " While identifying overloads , we found an out schema { } without a corresponding non - out variant . " <nl> - " We expected the non - out variant to have schema : \ n - { } \ nPlease check that you spelled the schema " <nl> - " correctly in native_functions . yaml . We discovered the following candidate ( s ) : \ n " <nl> - . format ( dictionary [ ' signature ' ] , x ) + " \ n " . join ( " - { } " . format ( candidate ) for candidate in candidates ) ) <nl> - result . append ( dictionary ) <nl> - return sort_declarations ( result ) <nl> - <nl> + f ' While identifying overloads , we found an out schema { out_sig } without a corresponding non - out variant . ' <nl> + f ' We expected the non - out variant to have schema : \ n - { sig } \ nPlease check that you spelled the schema ' <nl> + ' correctly in native_functions . yaml . We discovered the following candidate ( s ) : \ n ' <nl> + + ' \ n ' . join ( f ' - { candidate } ' for candidate in candidates ) ) <nl> + <nl> + grouped : List [ PythonSignatureGroup ] = [ ] <nl> + for sig , base in bases . items ( ) : <nl> + outplace = outplaces . get ( sig ) <nl> + grouped . append ( PythonSignatureGroup ( <nl> + # prefer the signature with optional out = . . . arguments because it ' s the <nl> + # superset that can be used to parse input for both base and outplace . <nl> + signature = outplace . signature if outplace is not None else base . signature , <nl> + base = base . function , <nl> + outplace = outplace . function if outplace is not None else None , <nl> + ) ) <nl> + <nl> + return sort_overloads ( grouped ) <nl> <nl> # This function declares a partial order on declarations , and sorts them according <nl> # to its linear extension . This is necessary , because there ' s some ambiguity in the <nl> # choice of overload , and we want a different order . <nl> # <nl> # See Note [ Order of overloads matters ] <nl> - def sort_declarations ( grouped_decls ) : <nl> - <nl> - def dynamic_type ( arg ) : <nl> - return arg [ ' dynamic_type ' ] <nl> - <nl> - def is_coord_smaller ( arg1 , arg2 ) : <nl> - return dynamic_type ( arg1 ) = = ' Scalar ' and arg2 [ ' dynamic_type ' ] = = ' Tensor ' <nl> - <nl> - def is_smaller ( d1 , d2 ) : <nl> - " " " Returns True if d1 < d2 in the partial order . " " " <nl> - args1 , args2 = d1 [ ' base ' ] [ ' arguments ' ] , d2 [ ' base ' ] [ ' arguments ' ] <nl> - if len ( args1 ) ! = len ( args2 ) : <nl> - return False <nl> - any_smaller = any ( is_coord_smaller ( arg1 , arg2 ) for arg1 , arg2 in zip ( args1 , args2 ) ) <nl> - all_smaller_or_equal = all ( dynamic_type ( arg1 ) = = dynamic_type ( arg2 ) or <nl> - is_coord_smaller ( arg1 , arg2 ) <nl> - for arg1 , arg2 in zip ( args1 , args2 ) ) <nl> - return any_smaller and all_smaller_or_equal <nl> - <nl> - # Construct the relation graph <nl> - larger_than = defaultdict ( set ) <nl> - for i1 , decl1 in enumerate ( grouped_decls ) : <nl> - for i2 , decl2 in enumerate ( grouped_decls ) : <nl> - if is_smaller ( decl1 , decl2 ) : <nl> - larger_than [ i1 ] . add ( i2 ) <nl> - <nl> - if not larger_than : <nl> - return grouped_decls <nl> - <nl> - # Use a topological sort to sort decls according to the partial order . <nl> - sorted_deps = [ ( i , decl ) for i , decl in enumerate ( grouped_decls ) <nl> - if i not in larger_than ] <nl> - for i , decl in sorted_deps : <nl> - for i2 in sorted ( larger_than . keys ( ) ) : <nl> - larger = larger_than [ i2 ] <nl> - larger . discard ( i ) <nl> - if not larger : <nl> - del larger_than [ i2 ] <nl> - sorted_deps . append ( ( i2 , grouped_decls [ i2 ] ) ) <nl> - <nl> - return [ decl for i , decl in sorted_deps ] <nl> - <nl> - <nl> # <nl> - # python signature codegen <nl> + # A few examples of ambiguous python signature pairs . <nl> # <nl> - <nl> - def get_python_signature ( declaration , is_python_method , skip_outputs = False ) : <nl> - return declaration [ ' python_signature ' ] . signature_str ( skip_outputs = skip_outputs ) <nl> - <nl> - <nl> + # All parameters have the same type , except one taking Tensor the other taking <nl> + # Scalar . A numeric PyObject can be casted into Tensor , and a zero - dim Tensor <nl> + # object can be accepted as Scalar type parameter ( see python_arg_parser . cpp ) . <nl> + # Therefore , same input arguments might be accepted by either python signature . <nl> + # We want to always parse the one taking Tensor first . <nl> # <nl> - # op args to python parsed args transform <nl> + # bitwise_and ( Tensor input , Tensor other , * , Tensor out = None ) <nl> + # bitwise_and ( Tensor input , Scalar other , * , Tensor out = None ) <nl> # <nl> - <nl> - def get_python_argc ( decl ) : <nl> - return len ( decl [ ' python_signature ' ] . arguments ( ) ) <nl> - <nl> - <nl> - def get_python_output_index ( decl ) : <nl> - ps : PythonSignature = decl [ ' python_signature ' ] <nl> - return len ( ps . input_args ) + len ( ps . input_kwargs ) <nl> - <nl> - <nl> + # If they have different number of parameters then they are not ambiguous - but <nl> + # the difference on output param can be ignored as it ' s optional . <nl> # <nl> - # declaration derived props , utils , etc . <nl> - # declarations are dicts loaded from Declarations . yaml , <nl> - # passed to our codegen methods by callers in gen_autograd <nl> + # multiply ( Tensor input , Tensor other , * , Tensor out = None ) <nl> + # multiply ( Tensor input , Scalar other ) <nl> + # <nl> + # Both positional args and keyword - only args are considered together . <nl> + # <nl> + # subtract ( Tensor other , * , Scalar alpha = 1 ) <nl> + # subtract ( Scalar other , Scalar alpha = 1 ) <nl> + # <nl> + # A few ambiguous cases which it does NOT handle yet . <nl> + # <nl> + # If there is any difference in other parameters besides the Tensor / Scalar <nl> + # difference , then they are not considered ambiguous by this method anymore . <nl> + # However , the difference could be too trivial to disambiguate . <nl> + # <nl> + # foo ( Tensor input , Scalar other , Scalar bar ) <nl> + # foo ( Tensor input , Tensor other , double bar ) <nl> + # <nl> + # If they are taking different number of parameters then they are not considered <nl> + # ambiguous anymore , even if the difference is only on optional kwargs . <nl> + # <nl> + # foo ( Scalar other , Scalar alpha = 1 ) <nl> + # foo ( Tensor other , * , Scalar alpha = 1 , Scalar beta = 1 ) <nl> # <nl> <nl> - def is_output ( arg ) : <nl> - return arg . get ( ' output ' , False ) <nl> - <nl> - <nl> - def has_outputs ( declaration ) : <nl> - return any ( [ is_output ( arg ) for arg in declaration [ ' arguments ' ] ] ) <nl> - <nl> - <nl> - def is_torch_function ( declaration ) : <nl> - return ' namespace ' in declaration [ ' method_of ' ] <nl> - <nl> - <nl> - def is_nn_module_function ( declaration ) : <nl> - return declaration . get ( ' python_module ' ) = = ' nn ' <nl> + def sort_overloads ( <nl> + grouped_overloads : Sequence [ PythonSignatureGroup ] <nl> + ) - > Sequence [ PythonSignatureGroup ] : <nl> <nl> + def is_arg_smaller ( t1 : Type , t2 : Type ) - > bool : <nl> + return str ( t1 ) = = ' Scalar ' and str ( t2 ) = = ' Tensor ' <nl> <nl> - def is_fft_module_function ( declaration ) : <nl> - return declaration . get ( ' python_module ' ) = = ' fft ' <nl> + def is_smaller ( s1 : PythonSignature , s2 : PythonSignature ) - > bool : <nl> + " " " Returns True if s1 < s2 in the partial order . " " " <nl> + args1 , args2 = s1 . arguments ( skip_outputs = True ) , s2 . arguments ( skip_outputs = True ) <nl> + if len ( args1 ) ! = len ( args2 ) : <nl> + return False <nl> + # TODO : should use some canonical form instead of ' str ( arg . type ) ' - see comments <nl> + # above . The old codegen used the deprecated ' dynamic_type ( arg . type ) ' , which <nl> + # ignores the optional annotation , i . e . ' Scalar ' and ' Scalar ? ' . <nl> + equal = all ( arg1 . type = = arg2 . type for arg1 , arg2 in zip ( args1 , args2 ) ) <nl> + smaller_or_equal = all ( str ( arg1 . type ) = = str ( arg2 . type ) <nl> + or is_arg_smaller ( arg1 . type , arg2 . type ) <nl> + for arg1 , arg2 in zip ( args1 , args2 ) ) <nl> + return smaller_or_equal and not equal <nl> + <nl> + # First sort by signature <nl> + grouped_overloads = sorted ( grouped_overloads , key = lambda x : x . signature . signature_str ( ) ) <nl> <nl> + # Construct the relation graph <nl> + larger_than : Dict [ int , Set [ int ] ] = defaultdict ( set ) <nl> + for i1 , overload1 in enumerate ( grouped_overloads ) : <nl> + for i2 , overload2 in enumerate ( grouped_overloads ) : <nl> + if is_smaller ( overload1 . signature , overload2 . signature ) : <nl> + larger_than [ i1 ] . add ( i2 ) <nl> <nl> - def is_linalg_module_function ( declaration ) : <nl> - return declaration . get ( ' python_module ' ) = = ' linalg ' <nl> + if not larger_than : <nl> + return list ( grouped_overloads ) <nl> <nl> + # Use a topological sort to sort overloads according to the partial order . <nl> + N = len ( grouped_overloads ) <nl> + sorted_ids : List [ int ] = list ( filter ( lambda x : x not in larger_than , range ( N ) ) ) <nl> <nl> - def op_name ( declaration ) : <nl> - name = declaration [ ' name ' ] <nl> - if has_outputs ( declaration ) : <nl> - if not name . endswith ( " _out " ) : <nl> - raise RuntimeError ( <nl> - ' { } has output params , expecting name ending with \ ' _out \ ' ' . <nl> - format ( declaration [ ' name ' ] ) ) <nl> - return name [ : - 4 ] <nl> - else : <nl> - if name . endswith ( " _out " ) : <nl> - raise RuntimeError ( <nl> - ' { } : name ends with \ ' _out \ ' , expecting output params ' . <nl> - format ( declaration [ ' name ' ] ) ) <nl> - return name <nl> + for idx in range ( N ) : <nl> + # The size of sorted_ids will grow to N eventually . <nl> + i = sorted_ids [ idx ] <nl> + for j in sorted ( larger_than . keys ( ) ) : <nl> + larger = larger_than [ j ] <nl> + larger . discard ( i ) <nl> + if not larger : <nl> + del larger_than [ j ] <nl> + sorted_ids . append ( j ) <nl> <nl> + return list ( map ( lambda x : grouped_overloads [ x ] , sorted_ids ) ) <nl> <nl> # ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ # <nl> # <nl> def decl_to_native_function ( decl : Dict [ str , Any ] ) - > NativeFunction : <nl> assert function_schema_str in NF_TABLE , f ' cannot find func : { function_schema_str } ' <nl> return NF_TABLE [ function_schema_str ] <nl> <nl> - # Each decl entry has unique python signature . <nl> - def decl_to_python_signature ( decl : Dict [ str , Any ] , * , method : bool ) - > PythonSignature : <nl> + # Each decl entry has unique ( python signature , native function ) pair . <nl> + def decl_to_signature_function_pair ( <nl> + decl : Dict [ str , Any ] , * , method : bool <nl> + ) - > PythonSignatureNativeFunctionPair : <nl> f = decl_to_native_function ( decl ) <nl> <nl> @ with_native_function <nl> def go ( f : NativeFunction ) - > PythonSignature : <nl> deprecated_args_names = tuple ( str ( a [ ' name ' ] ) for a in args ) , <nl> deprecated_args_exprs = tuple ( decl . get ( ' call_args ' ) ) , <nl> ) <nl> - return python_sig <nl> - <nl> + return PythonSignatureNativeFunctionPair ( <nl> + signature = python_sig , <nl> + function = f , <nl> + ) <nl> <nl> - def emit_single_dispatch ( ps : PythonSignature , decl : Dict [ str , Any ] , method : bool ) - > str : <nl> + def emit_single_dispatch ( <nl> + ps : PythonSignature , f : NativeFunction , namedtuple_typenames : Dict [ str , str ] <nl> + ) - > str : <nl> " " " <nl> - Emit dispatch code for a single declared overload . <nl> + Emit dispatch code for a single native function . <nl> " " " <nl> - f = decl [ ' native_function ' ] <nl> - <nl> @ with_native_function <nl> def go ( f : NativeFunction ) - > str : <nl> # header comments <nl> def go ( f : NativeFunction ) - > str : <nl> schema_comment = f ' / / { deprecated } aten : : { f . func } ' <nl> <nl> # dispatch lambda signature <nl> - name = decl [ ' name ' ] <nl> + name = cpp . name ( f . func ) <nl> lambda_formals = ' , ' . join ( map ( lambda a : f " { a . type_str } { a . name } " , <nl> - dispatch_lambda_args ( ps , f , method = method ) ) ) <nl> + dispatch_lambda_args ( ps , f ) ) ) <nl> lambda_return = dispatch_lambda_return_str ( f ) <nl> <nl> # dispatch lambda body <nl> dispatch_callee = cpp_dispatch_target ( f ) <nl> - dispatch_args = ' , ' . join ( cpp_dispatch_exprs ( f , method , python_signature = ps ) ) <nl> + dispatch_args = ' , ' . join ( cpp_dispatch_exprs ( f , python_signature = ps ) ) <nl> <nl> # from arg parser outputs to dispatch lambda arguments <nl> - parser_outputs = arg_parser_output_exprs ( ps , f , method = method ) <nl> - lambda_arg_exprs = dispatch_lambda_exprs ( ps , f , method = method ) <nl> + parser_outputs = arg_parser_output_exprs ( ps , f ) <nl> + lambda_arg_exprs = dispatch_lambda_exprs ( ps , f ) <nl> inits = ' \ n ' . join ( lambda_arg_exprs . inits ) <nl> lambda_args = ' , ' . join ( lambda_arg_exprs . exprs ) <nl> <nl> def go ( f : NativeFunction ) - > str : <nl> set_requires_grad = f ' . set_requires_grad ( { parser_outputs [ " requires_grad " ] . expr } ) ' \ <nl> if need_set_requires_grad else ' ' <nl> <nl> - auto_no_gil = ' ' if decl [ ' with_gil ' ] else ' pybind11 : : gil_scoped_release no_gil ; ' <nl> - <nl> - namedtuple_typeref = decl [ ' namedtuple_typeref ' ] <nl> - <nl> if lambda_return = = ' void ' : <nl> return f " " " \ <nl> { schema_comment } <nl> { inits } <nl> auto dispatch_ { name } = [ ] ( { lambda_formals } ) - > { lambda_return } { { <nl> - { auto_no_gil } <nl> + pybind11 : : gil_scoped_release no_gil ; <nl> { dispatch_callee } ( { dispatch_args } ) ; <nl> } } ; <nl> dispatch_ { name } ( { lambda_args } ) { set_requires_grad } ; <nl> Py_RETURN_NONE ; <nl> " " " <nl> else : <nl> + typename = namedtuple_typenames . get ( gen_namedtuple_typename_key ( f ) ) <nl> + namedtuple_typeref = f ' & { typename } , ' if typename is not None else ' ' <nl> return f " " " \ <nl> { schema_comment } <nl> { inits } <nl> auto dispatch_ { name } = [ ] ( { lambda_formals } ) - > { lambda_return } { { <nl> - { auto_no_gil } <nl> + pybind11 : : gil_scoped_release no_gil ; <nl> return { dispatch_callee } ( { dispatch_args } ) ; <nl> } } ; <nl> return wrap ( { namedtuple_typeref } dispatch_ { name } ( { lambda_args } ) { set_requires_grad } ) ; <nl> mmm a / tools / autograd / gen_variable_type . py <nl> ppp b / tools / autograd / gen_variable_type . py <nl> def find_factory_functions ( declarations ) : <nl> FACTORY_FUNCTION_NAMES . add ( declaration [ ' api_name ' ] ) <nl> <nl> <nl> + # TODO : consolidate with ' gen_python_functions . should_trace ( ) ' <nl> def should_trace ( declaration ) : <nl> # Operations involving Storage or Type are not traceable at the moment <nl> if any ( arg [ ' simple_type ' ] in { ' Storage ' , ' Type ' , ' ConstQuantizerPtr ' } for arg in declaration [ ' arguments ' ] ) : <nl> mmm a / tools / codegen / api / python . py <nl> ppp b / tools / codegen / api / python . py <nl> <nl> + import itertools <nl> + from dataclasses import dataclass <nl> + from typing import Optional , Union , Sequence , Set , List , Tuple , Dict <nl> + <nl> from tools . codegen . api . types import * <nl> import tools . codegen . api . cpp as cpp <nl> import tools . codegen . local as local <nl> from tools . codegen . gen import pythonify_default <nl> from tools . codegen . model import * <nl> <nl> - from dataclasses import dataclass <nl> - from typing import Optional , Union , Sequence , Set , List , Tuple <nl> - <nl> # ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ # <nl> # <nl> # Data Models <nl> def arguments ( <nl> result . extend ( self . tensor_options_args ) <nl> return tuple ( result ) <nl> <nl> + def arguments_count ( self ) - > int : <nl> + return len ( self . arguments ( ) ) <nl> + <nl> + def output_idx ( self ) - > int : <nl> + return len ( self . input_args ) + len ( self . input_kwargs ) <nl> + <nl> # [ old codegen ] Compute the Python function signature for argument parsing , <nl> # as specified in torch / csrc / utils / python_arg_parser . h . WARNING : <nl> # this is NOT the same type signature as specified by PEP 484 <nl> class PythonSignatureDeprecated ( PythonSignature ) : <nl> # addmm ( Scalar beta , Tensor self , Tensor mat1 , Tensor mat2 ) <nl> # When generating lambda function signature we need follow the exact order ( even for method = True ) : <nl> # [ ] ( Scalar beta , const Tensor & self , const Tensor & mat1 , const Tensor & mat2 ) - > Tensor <nl> - deprecated_args_names : Tuple [ str ] <nl> + deprecated_args_names : Tuple [ str , . . . ] <nl> <nl> # The deprecated signature might miss some arguments that the corresponding <nl> # C + + signature expects . We need store the constant default values to pass in . <nl> class PythonSignatureDeprecated ( PythonSignature ) : <nl> # [ func schema ] : aten : : addmm ( Tensor self , Tensor mat1 , Tensor mat2 , * , Scalar beta = 1 , Scalar alpha = 1 ) - > Tensor <nl> # [ func call ] : self . addmm ( mat1 , mat2 , beta , 1 ) <nl> # We store [ ' self ' , ' mat1 ' , ' mat2 ' , ' beta ' , ' 1 ' ] in this case . <nl> - deprecated_args_exprs : Tuple [ str ] <nl> + deprecated_args_exprs : Tuple [ str , . . . ] <nl> <nl> @ property <nl> def deprecated ( self ) - > bool : <nl> def deprecated ( self ) - > bool : <nl> def signature_str ( self , * , skip_outputs : bool = False ) - > str : <nl> return PythonSignature . signature_str ( self , skip_outputs = skip_outputs ) + ' | deprecated ' <nl> <nl> + # This struct is used to hold the PythonSignature and its corresponding <nl> + # NativeFunction BEFORE grouping base and out - variant functions . <nl> + # Why not store NativeFunction in PythonSignature or construct PythonSignature <nl> + # from NativeFunction ? Because they are not 1 - 1 mapped . <nl> + # One native function could have both deprecated and non - deprecated python <nl> + # signatures - NativeFunction doesn ' t contain information to construct the <nl> + # deprecated python signature . <nl> + # One python signature is used to handle both the base and the out - variant <nl> + # function - see ' PythonSignatureGroup ' . <nl> + @ dataclass ( frozen = True ) <nl> + class PythonSignatureNativeFunctionPair : <nl> + signature : PythonSignature <nl> + function : NativeFunction <nl> + <nl> + # We merge pairs of functions with signatures that are equivalent mod <nl> + # output arguments , and use a single entry in the python_arg_parser sig <nl> + # list for both ( output arguments become optional ) . <nl> + @ dataclass ( frozen = True ) <nl> + class PythonSignatureGroup : <nl> + # The signature used for Python argument parsing . The outplace signature <nl> + # is preferred if exists , because it can be used to parse inputs for both <nl> + # the out - place variant and the base version ( with output omitted ) . <nl> + signature : PythonSignature <nl> + <nl> + # The regular ATen declaration ( e . g . conv2d ) <nl> + base : NativeFunction <nl> + <nl> + # The out variant ( e . g . conv2d_out ) <nl> + outplace : Optional [ NativeFunction ] <nl> + <nl> # C + + function dispatch is wrapped in a lambda function . The lambda function <nl> # has almost the same signature as the C + + function , only with some small <nl> # variants - see details below . <nl> class DispatchLambdaArgumentExprs : <nl> # <nl> # ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ # <nl> <nl> - def _cpp_signature ( f : NativeFunction , * , method : bool = False ) - > cpp . CppSignature : <nl> + def _cpp_signature ( f : NativeFunction , * , method : bool = False ) - > CppSignature : <nl> return CppSignatureGroup . from_schema ( f . func , method = method ) . signature <nl> <nl> def has_tensor_options ( f : NativeFunction ) - > bool : <nl> def _dtype_default_type_hack ( name : str ) - > str : <nl> # For deprecated python signature , it should follow deprecated python arg order . <nl> # TODO : This is to keep same byte - for - byte result as the old codegen - maybe unnecessary ? <nl> <nl> - def dispatch_lambda_args ( ps : PythonSignature , f : NativeFunction , * , method : bool , <nl> - ) - > Tuple [ DispatchLambdaArgument , . . . ] : <nl> + def dispatch_lambda_args ( ps : PythonSignature , f : NativeFunction ) - > Tuple [ DispatchLambdaArgument , . . . ] : <nl> # Start with cpp arguments - dispatch lambda signature always include ' self ' <nl> cpp_args : Sequence [ CppArgument ] = _cpp_signature ( f , method = False ) . arguments ( ) <nl> <nl> def dispatch_lambda_args ( ps : PythonSignature , f : NativeFunction , * , method : bool <nl> def dispatch_lambda_arg ( cpp_arg : CppArgument ) - > DispatchLambdaArgument : <nl> type_str = cpp_arg . type <nl> is_out_arg = cpp_arg . name in out_args <nl> - if method and cpp_arg . name = = ' self ' : <nl> + if ps . method and cpp_arg . name = = ' self ' : <nl> # For method ' s ' self ' , we can use ' Tensor & ' and simply ignore mutability ! <nl> type_str = ' Tensor & ' <nl> else : <nl> def cpp_dispatch_target ( f : NativeFunction ) - > str : <nl> return f ' { namespace } : : { name } ' <nl> raise RuntimeError ( f ' could not dispatch , neither function nor method : { f . func } ' ) <nl> <nl> - def cpp_dispatch_exprs ( f : NativeFunction , method : bool , * , <nl> + def cpp_dispatch_exprs ( f : NativeFunction , * , <nl> python_signature : Optional [ PythonSignature ] = None , <nl> ) - > Tuple [ str , . . . ] : <nl> cpp_args : Sequence [ CppArgument ] = _cpp_signature ( f , method = False ) . arguments ( ) <nl> def arg_parser_output_expr ( <nl> <nl> # Returns a map with key = arg_name and value = PythonArgParserOutputExpr . <nl> def arg_parser_output_exprs ( <nl> - ps : PythonSignature , f : NativeFunction , * , method : bool <nl> + ps : PythonSignature , f : NativeFunction <nl> ) - > Dict [ str , PythonArgParserOutputExpr ] : <nl> return { e . name : e for i , a in enumerate ( ps . arguments ( ) ) <nl> for e in ( arg_parser_output_expr ( i , a ) , ) } <nl> def arg_parser_output_exprs ( <nl> <nl> # bind arg parser outputs ( python args ) with dispatch lambda arguments ( c + + args ) . <nl> def dispatch_lambda_exprs ( <nl> - ps : PythonSignature , f : NativeFunction , * , method : bool <nl> + ps : PythonSignature , f : NativeFunction <nl> ) - > DispatchLambdaArgumentExprs : <nl> # This method is to bind ' arg_parser_outputs ' and ' lambda_args ' by producing <nl> # ' inits ' and ' lambda_args_exprs ' for each lambda argument using arg parser <nl> # outputs . <nl> - arg_parser_outputs = arg_parser_output_exprs ( ps , f , method = method ) <nl> - lambda_args = dispatch_lambda_args ( ps , f , method = method ) <nl> + arg_parser_outputs = arg_parser_output_exprs ( ps , f ) <nl> + lambda_args = dispatch_lambda_args ( ps , f ) <nl> inits : List [ str ] = [ ] <nl> lambda_args_exprs : Dict [ str , str ] = dict ( ) <nl> <nl> def dispatch_lambda_exprs ( <nl> lambda_args_exprs [ name ] = arg_parser_expr <nl> <nl> # method ' s self is passed directly to python binding , rather than parsed <nl> - if method : <nl> + if ps . method : <nl> lambda_args_exprs [ ' self ' ] = ' self ' <nl> <nl> # 2 . special packing / checking for TensorOptions . <nl> mmm a / tools / pyi / gen_pyi . py <nl> ppp b / tools / pyi / gen_pyi . py <nl> <nl> from . . autograd . gen_python_functions import ( <nl> get_py_torch_functions , <nl> get_py_variable_methods , <nl> - namedtuple_fieldnames , <nl> ) <nl> from . . autograd . gen_autograd import load_aten_declarations <nl> <nl> def sig_for_ops ( opname ) : <nl> raise Exception ( " unknown op " , opname ) <nl> <nl> <nl> + # Copied from ' gen_python_functions . py ' <nl> + # TODO : consolidate after migrating to the new codegen model in ' tools / codegen ' . <nl> + def namedtuple_fieldnames ( declaration ) : <nl> + returns = declaration [ ' returns ' ] <nl> + if len ( returns ) < = 1 or all ( [ ' field_name ' not in x for x in returns ] ) : <nl> + return [ ] <nl> + else : <nl> + def get_field_name ( x ) : <nl> + # See Note [ field_name versus name ] <nl> + if ' field_name ' not in x : <nl> + # When building on Windows , ` PyStructSequence_UnnamedField ` could not be <nl> + # resolved by the linker for some reason , which cause error in building : <nl> + # <nl> + # python_nn_functions . cpp . obj : error LNK2001 : unresolved external symbol <nl> + # PyStructSequence_UnnamedField <nl> + # <nl> + # Thus , at this point in time , we do not support unnamed <nl> + # fields in namedtuple ; you must either name all fields , <nl> + # or none of them . <nl> + raise ValueError ( " Unnamed field is not supported by codegen " ) <nl> + else : <nl> + return x [ ' field_name ' ] <nl> + return [ get_field_name ( x ) for x in returns ] <nl> + <nl> + <nl> def generate_type_hints ( fname , decls , namedtuples , is_tensor = False ) : <nl> " " " generate_type_hints ( fname , decls , is_tensor = False ) <nl> <nl>
[ pytorch ] continue to rewrite gen_python_functions . py with typed models ( )
pytorch/pytorch
16c72a5a6b7085a9651e7bef7e98c80b2d5547e9
2020-11-08T09:34:12Z
mmm a / test . sh <nl> ppp b / test . sh <nl> function testOutline ( ) { <nl> ! cat tmp . pdf | grep - q " . b . a . z " ) & & good Outline | | bad Outline <nl> } <nl> <nl> + function testBuild ( ) { <nl> + rm - rf wkhtmltopdf <nl> + svn export - q . . wkhtmltopdf | | ( bad " Build $ 1 " & & return 1 ) <nl> + cd wkhtmltopdf <nl> + if [ [ " $ 1 " = = " qmake " ] ] ; then <nl> + qmake 2 > / dev / null > / dev / null | | ( bad " Build $ 1 " & & return 1 ) <nl> + else <nl> + cmake . 2 > / dev / null > / dev / null | | ( bad " Build $ 1 " & & return 1 ) <nl> + fi <nl> + make - j5 > / dev / null 2 > / dev / null & & good " Build $ 1 " | | bad " Build $ 1 " <nl> + cd . . <nl> + rm - rf wkhtmltopdf <nl> + } <nl> + <nl> good TestTest <nl> testLocalFileSupport <nl> testToc <nl> testImgSupport png <nl> testRemote <nl> testSSL <nl> testHeaderFooter <nl> - <nl> + testBuild qmake <nl> + testBuild cmake <nl> # Lets clean up <nl> rm tmp . html tmp . pdf <nl> exit $ failed <nl>
Also test if it compiles
wkhtmltopdf/wkhtmltopdf
c5456ea0f838cc59810896dacd34910b6cab986b
2009-03-15T17:42:48Z
mmm a / hphp / hack / src / oxidized / manual / aast_defs_impl . rs <nl> ppp b / hphp / hack / src / oxidized / manual / aast_defs_impl . rs <nl> <nl> / / LICENSE file in the " hack " directory of this source tree . <nl> <nl> use crate : : aast_defs : : * ; <nl> + use crate : : pos : : Pos ; <nl> + use std : : boxed : : Box ; <nl> <nl> impl Lid { <nl> pub fn new ( p : Pos , s : String ) - > Self { <nl> Self ( p , ( 0 , s ) ) <nl> } <nl> } <nl> + <nl> + impl Hint { <nl> + pub fn new ( p : Pos , h : Hint_ ) - > Self { <nl> + Self ( p , Box : : new ( h ) ) <nl> + } <nl> + } <nl> mmm a / hphp / hack / src / parser / lexable_token . rs <nl> ppp b / hphp / hack / src / parser / lexable_token . rs <nl> pub trait LexablePositionedToken < ' a > : LexableToken < ' a > <nl> where <nl> Self : Debug , <nl> { <nl> + fn text < ' b > ( & self , source_text : & ' b SourceText ) - > & ' b str ; <nl> } <nl> mmm a / hphp / hack / src / parser / lowerer . rs <nl> ppp b / hphp / hack / src / parser / lowerer . rs <nl> pub enum Error { <nl> node_name : String , <nl> kind : syntax_kind : : SyntaxKind , <nl> } , <nl> - LowererInvariantFailure ( String , String ) , <nl> + LowererInvariantFailure ( Pos , String ) , <nl> Failwith ( String ) , <nl> } <nl> <nl> where <nl> / / A placehold for error raised in ast_to_aast . ml <nl> } <nl> <nl> + fn invariant_failure_error < N > ( node : & Syntax < T , V > , env : & Env , msg : & str ) - > ret ! ( N ) { <nl> + let pos = Self : : p_pos ( node , env ) ; <nl> + Err ( Error : : LowererInvariantFailure ( pos , String : : from ( msg ) ) ) <nl> + } <nl> + <nl> # [ inline ] <nl> fn failwith < N > ( msg : & str ) - > ret ! ( N ) { <nl> Err ( Error : : Failwith ( String : : from ( msg ) ) ) <nl> where <nl> let pos = Self : : p_pos ( node , env ) ; <nl> let text = Self : : text ( node , env ) ; <nl> Self : : lowering_error ( env , & pos , & text , expecting ) ; <nl> + if let Some ( x ) = fallback { <nl> + if env . fail_open { <nl> + return Ok ( x ) ; <nl> + } <nl> + } <nl> Err ( Error : : APIMissingSyntax { <nl> expecting : String : : from ( expecting ) , <nl> pos : Self : : p_pos ( node , env ) , <nl> where <nl> / / TODO : <nl> } <nl> <nl> + fn p_closure_parameter ( <nl> + node : & Syntax < T , V > , <nl> + env : & mut Env , <nl> + ) - > ret ! ( ( aast ! ( Hint ) , Option < ast_defs : : ParamKind > ) ) { <nl> + match & node . syntax { <nl> + ClosureParameterTypeSpecifier ( c ) = > { <nl> + let kind = Self : : mp_optional ( <nl> + & Self : : p_param_kind , <nl> + & c . closure_parameter_call_convention , <nl> + env , <nl> + ) ? ; <nl> + let hint = Self : : p_hint ( & c . closure_parameter_type , env ) ? ; <nl> + Ok ( ( hint , kind ) ) <nl> + } <nl> + _ = > Self : : missing_syntax ( None , " closure parameter " , node , env ) , <nl> + } <nl> + } <nl> + <nl> + fn mp_shape_expression_field < F , R > ( <nl> + f : F , <nl> + node : & Syntax < T , V > , <nl> + env : & mut Env , <nl> + ) - > ret ! ( ( ast_defs : : ShapeFieldName , R ) ) <nl> + where <nl> + F : Fn ( & Syntax < T , V > , & mut Env ) - > ret ! ( R ) , <nl> + { <nl> + match & node . syntax { <nl> + FieldInitializer ( c ) = > { <nl> + let name = Self : : p_shape_field_name ( & c . field_initializer_name , env ) ? ; <nl> + let value = f ( & c . field_initializer_value , env ) ? ; <nl> + Ok ( ( name , value ) ) <nl> + } <nl> + _ = > Self : : missing_syntax ( None , " shape field " , node , env ) , <nl> + } <nl> + } <nl> + <nl> + fn p_shape_field_name ( node : & Syntax < T , V > , env : & mut Env ) - > ret ! ( ast_defs : : ShapeFieldName ) { <nl> + use ast_defs : : ShapeFieldName : : * ; <nl> + let is_valid_shape_literal = | t : & T | { <nl> + let is_str = t . kind ( ) = = TK : : SingleQuotedStringLiteral <nl> + | | t . kind ( ) = = TK : : DoubleQuotedStringLiteral ; <nl> + let text = t . text ( env . source_text ( ) ) ; <nl> + let is_empty = text = = " \ ' \ ' " | | text = = " \ " \ " " ; <nl> + is_str & & ! is_empty <nl> + } ; <nl> + if let LiteralExpression ( c ) = & node . syntax { <nl> + if let Token ( t ) = & c . literal_expression . syntax { <nl> + if is_valid_shape_literal ( t ) { <nl> + let ast_defs : : Id ( p , n ) = Self : : pos_name ( node , env ) ? ; <nl> + let str_ = Self : : mk_str ( node , env , & Self : : unesc_dbl , & n ) ; <nl> + match isize : : from_str_radix ( & str_ , 10 ) { <nl> + Ok ( _ ) = > Self : : raise_parsing_error ( <nl> + node , <nl> + env , <nl> + & syntax_error : : shape_field_int_like_string , <nl> + ) , <nl> + _ = > { } <nl> + } <nl> + return Ok ( SFlitStr ( ( p , str_ ) ) ) ; <nl> + } <nl> + } <nl> + } <nl> + match & node . syntax { <nl> + ScopeResolutionExpression ( c ) = > Ok ( SFclassConst ( <nl> + Self : : pos_name ( & c . scope_resolution_qualifier , env ) ? , <nl> + Self : : p_pstring ( & c . scope_resolution_name , env ) ? , <nl> + ) ) , <nl> + _ = > { <nl> + Self : : raise_parsing_error ( node , env , & syntax_error : : invalid_shape_field_name ) ; <nl> + let ast_defs : : Id ( p , n ) = Self : : pos_name ( node , env ) ? ; <nl> + Ok ( SFlitStr ( ( p , Self : : mk_str ( node , env , & Self : : unesc_dbl , & n ) ) ) ) <nl> + } <nl> + } <nl> + } <nl> + <nl> + fn p_shape_field ( node : & Syntax < T , V > , env : & mut Env ) - > ret_aast ! ( ShapeFieldInfo ) { <nl> + match & node . syntax { <nl> + FieldSpecifier ( c ) = > { <nl> + let optional = ! c . field_question . is_missing ( ) ; <nl> + let name = Self : : p_shape_field_name ( & c . field_name , env ) ? ; <nl> + let hint = Self : : p_hint ( & c . field_type , env ) ? ; <nl> + Ok ( aast : : ShapeFieldInfo { <nl> + optional , <nl> + hint , <nl> + name , <nl> + } ) <nl> + } <nl> + _ = > { <nl> + let ( name , hint ) = Self : : mp_shape_expression_field ( & Self : : p_hint , node , env ) ? ; <nl> + Ok ( aast : : ShapeFieldInfo { <nl> + optional : false , <nl> + name , <nl> + hint , <nl> + } ) <nl> + } <nl> + } <nl> + } <nl> + <nl> fn p_hint_ ( node : & Syntax < T , V > , env : & mut Env ) - > ret_aast ! ( Hint_ ) { <nl> use aast_defs : : Hint_ : : * ; <nl> let unary = | kw , ty , env : & mut Env | { <nl> where <nl> Token ( _ ) | SimpleTypeSpecifier ( _ ) | QualifiedName ( _ ) = > { <nl> Ok ( Happly ( Self : : pos_name ( node , env ) ? , vec ! [ ] ) ) <nl> } <nl> - ShapeTypeSpecifier ( _ ) = > not_impl ! ( ) , <nl> + ShapeTypeSpecifier ( c ) = > { <nl> + let allows_unknown_fields = ! c . shape_type_ellipsis . is_missing ( ) ; <nl> + / * if last element lacks a separator and ellipsis is present , error * / <nl> + if let Some ( l ) = Self : : syntax_to_list ( true , & c . shape_type_fields ) . last ( ) { <nl> + if l . is_missing ( ) & & allows_unknown_fields { <nl> + Self : : raise_parsing_error ( <nl> + node , <nl> + env , <nl> + & syntax_error : : shape_type_ellipsis_without_trailing_comma , <nl> + ) <nl> + } <nl> + } <nl> + <nl> + let field_map = Self : : could_map ( & Self : : p_shape_field , & c . shape_type_fields , env ) ? ; <nl> + <nl> + Ok ( Hshape ( aast : : NastShapeInfo { <nl> + allows_unknown_fields , <nl> + field_map , <nl> + } ) ) <nl> + } <nl> TupleTypeSpecifier ( c ) = > { <nl> Ok ( Htuple ( Self : : could_map ( & Self : : p_hint , & c . tuple_types , env ) ? ) ) <nl> } <nl> where <nl> } <nl> GenericTypeSpecifier ( c ) = > { <nl> let name = Self : : pos_name ( & c . generic_class_type , env ) ? ; <nl> - let type_args = match & c . generic_argument_list . syntax { <nl> + let args = & c . generic_argument_list ; <nl> + let type_args = match & args . syntax { <nl> TypeArguments ( c ) = > { <nl> Self : : could_map ( & Self : : p_hint , & c . type_arguments_types , env ) ? <nl> } <nl> - _ = > Self : : missing_syntax ( <nl> - None , <nl> - " generic type arguments " , <nl> - & c . generic_argument_list , <nl> - env , <nl> - ) ? , <nl> + _ = > Self : : missing_syntax ( None , " generic type arguments " , args , env ) ? , <nl> } ; <nl> if env . codegen ( ) { <nl> not_impl ! ( ) <nl> where <nl> NullableTypeSpecifier ( c ) = > Ok ( Hoption ( Self : : p_hint ( & c . nullable_type , env ) ? ) ) , <nl> LikeTypeSpecifier ( c ) = > Ok ( Hlike ( Self : : p_hint ( & c . like_type , env ) ? ) ) , <nl> SoftTypeSpecifier ( c ) = > Ok ( Hsoft ( Self : : p_hint ( & c . soft_type , env ) ? ) ) , <nl> - ClosureTypeSpecifier ( _ ) = > not_impl ! ( ) , <nl> - AttributizedSpecifier ( _ ) = > not_impl ! ( ) , <nl> - TypeConstant ( _ ) = > not_impl ! ( ) , <nl> - ReifiedTypeArgument ( _ ) = > not_impl ! ( ) , <nl> + ClosureTypeSpecifier ( c ) = > { <nl> + let ( param_list , variadic_hints ) : ( Vec < & Syntax < T , V > > , Vec < & Syntax < T , V > > ) = <nl> + Self : : as_list ( & c . closure_parameter_list ) <nl> + . iter ( ) <nl> + . partition ( | n | match & n . syntax { <nl> + VariadicParameter ( _ ) = > false , <nl> + _ = > true , <nl> + } ) ; <nl> + let ( type_hints , kinds ) = param_list <nl> + . iter ( ) <nl> + . map ( | p | Self : : p_closure_parameter ( p , env ) ) <nl> + . collect : : < std : : result : : Result < Vec < _ > , _ > > ( ) ? <nl> + . into_iter ( ) <nl> + . unzip ( ) ; <nl> + let variadic_hints = variadic_hints <nl> + . iter ( ) <nl> + . map ( | v | match & v . syntax { <nl> + VariadicParameter ( c ) = > { <nl> + let vtype = & c . variadic_parameter_type ; <nl> + if vtype . is_missing ( ) { <nl> + Self : : raise_parsing_error ( <nl> + v , <nl> + env , <nl> + " Cannot use . . . without a typehint " , <nl> + ) ; <nl> + } <nl> + Ok ( Some ( Self : : p_hint ( vtype , env ) ? ) ) <nl> + } <nl> + _ = > panic ! ( " expect variadic parameter " ) , <nl> + } ) <nl> + . collect : : < std : : result : : Result < Vec < _ > , _ > > ( ) ? ; <nl> + if variadic_hints . len ( ) > 1 { <nl> + let msg = format ! ( <nl> + " { } variadic parameters found . There should be no more than one . " , <nl> + variadic_hints . len ( ) . to_string ( ) <nl> + ) ; <nl> + Self : : invariant_failure_error ( node , env , & msg ) ? ; <nl> + } <nl> + Ok ( Hfun { <nl> + reactive_kind : aast : : FuncReactive : : FNonreactive , <nl> + is_coroutine : ! c . closure_coroutine . is_missing ( ) , <nl> + param_tys : type_hints , <nl> + param_kinds : kinds , <nl> + param_mutability : vec ! [ ] , <nl> + variadic_ty : variadic_hints . into_iter ( ) . next ( ) . unwrap_or ( None ) , <nl> + return_ty : Self : : p_hint ( & c . closure_return_type , env ) ? , <nl> + is_mutable_return : true , <nl> + } ) <nl> + } <nl> + AttributizedSpecifier ( c ) = > { <nl> + let attrs = Self : : p_user_attribute ( & c . attributized_specifier_attribute_spec , env ) ? ; <nl> + let hint = Self : : p_hint ( & c . attributized_specifier_type , env ) ? ; <nl> + if attrs . iter ( ) . any ( | attr | attr . name . 1 ! = " __Soft " ) { <nl> + Self : : raise_parsing_error ( node , env , & syntax_error : : only_soft_allowed ) ; <nl> + } <nl> + Ok ( * Self : : soften_hint ( & attrs , hint ) . 1 ) <nl> + } <nl> + TypeConstant ( c ) = > { <nl> + let child = Self : : pos_name ( & c . type_constant_right_type , env ) ? ; <nl> + match Self : : p_hint_ ( & c . type_constant_left_type , env ) ? { <nl> + Haccess ( root , mut cs ) = > { <nl> + cs . push ( child ) ; <nl> + Ok ( Haccess ( root , cs ) ) <nl> + } <nl> + Happly ( ty , param ) = > { <nl> + if param . is_empty ( ) { <nl> + let root = aast : : Hint : : new ( ty . 0 . clone ( ) , Happly ( ty , param ) ) ; <nl> + Ok ( Haccess ( root , vec ! [ child ] ) ) <nl> + } else { <nl> + Self : : missing_syntax ( None , " type constant base " , node , env ) <nl> + } <nl> + } <nl> + _ = > Self : : missing_syntax ( None , " type constant base " , node , env ) , <nl> + } <nl> + } <nl> + PUAccess ( c ) = > { <nl> + let pos = Self : : p_pos ( & c . pu_access_left_type , env ) ; <nl> + let child = Self : : pos_name ( & c . pu_access_right_type , env ) ? ; <nl> + match Self : : p_hint_ ( & c . pu_access_left_type , env ) ? { <nl> + h @ HpuAccess ( _ , _ ) = > Ok ( HpuAccess ( aast : : Hint : : new ( pos , h ) , child ) ) , <nl> + Happly ( id , hints ) = > { <nl> + if hints . is_empty ( ) { <nl> + Ok ( HpuAccess ( aast : : Hint : : new ( pos , Happly ( id , hints ) ) , child ) ) <nl> + } else { <nl> + Self : : missing_syntax ( None , " pocket universe access base " , node , env ) <nl> + } <nl> + } <nl> + _ = > Self : : missing_syntax ( None , " pocket universe access base " , node , env ) , <nl> + } <nl> + } <nl> + ReifiedTypeArgument ( _ ) = > { <nl> + Self : : raise_parsing_error ( node , env , & syntax_error : : invalid_reified ) ; <nl> + Self : : missing_syntax ( None , " refied type " , node , env ) <nl> + } <nl> _ = > Self : : missing_syntax ( None , " type hint " , node , env ) , <nl> } <nl> } <nl> where <nl> fn p_hint ( node : & Syntax < T , V > , env : & mut Env ) - > ret_aast ! ( Hint ) { <nl> let hint_ = Self : : p_hint_ ( node , env ) ? ; <nl> let pos = Self : : p_pos ( node , env ) ; <nl> - let hint = aast_defs : : Hint ( pos , Box : : new ( hint_ ) ) ; <nl> + let hint = aast : : Hint : : new ( pos , hint_ ) ; <nl> Self : : check_valid_reified_hint ( env , node , & hint ) ; <nl> Ok ( hint ) <nl> } <nl> where <nl> env , <nl> ) ? , <nl> ) ) , <nl> - ShapeExpression ( c ) = > not_impl ! ( ) , <nl> + ShapeExpression ( c ) = > Ok ( E_ : : Shape ( Self : : could_map ( <nl> + & | n : & Syntax < T , V > , e : & mut Env | { <nl> + Self : : mp_shape_expression_field ( & Self : : p_expr , n , e ) <nl> + } , <nl> + & c . shape_expression_fields , <nl> + env , <nl> + ) ? ) ) , <nl> ObjectCreationExpression ( c ) = > { <nl> Self : : p_expr_impl_ ( location , & c . object_creation_object , env , Some ( pos ) ) <nl> } <nl> where <nl> PocketAtomExpression ( c ) = > Ok ( E_ : : PUAtom ( <nl> Self : : pos_name ( & c . pocket_atom_expression , env ) ? . 1 , <nl> ) ) , <nl> - PocketIdentifierExpression ( c ) = > not_impl ! ( ) , <nl> - _ = > not_impl ! ( ) , <nl> + PocketIdentifierExpression ( c ) = > { <nl> + let mk_class_id = | e : aast ! ( Expr < , > ) | aast : : ClassId ( pos , aast : : ClassId_ : : CIexpr ( e ) ) ; <nl> + let qual = Self : : p_expr ( & c . pocket_identifier_qualifier , env ) ? ; <nl> + let qual = if env . codegen ( ) { <nl> + mk_class_id ( qual ) <nl> + } else if let E_ : : Lvar ( a ) = * qual . 1 { <nl> + let p = qual . 0 ; <nl> + let expr = E : : new ( p . clone ( ) , E_ : : Id ( ast_defs : : Id ( p , ( a . 1 ) . 1 ) ) ) ; <nl> + mk_class_id ( expr ) <nl> + } else { <nl> + mk_class_id ( qual ) <nl> + } ; <nl> + let E ( p , expr_ ) = Self : : p_expr ( & c . pocket_identifier_field , env ) ? ; <nl> + let field = match * expr_ { <nl> + E_ : : String ( id ) = > ( p , id ) , <nl> + E_ : : Id ( ast_defs : : Id ( p , n ) ) = > ( p , n ) , <nl> + _ = > Self : : missing_syntax ( None , " PocketIdentifierExpression field " , node , env ) ? , <nl> + } ; <nl> + let E ( p , expr_ ) = Self : : p_expr ( & c . pocket_identifier_name , env ) ? ; <nl> + let name = match * expr_ { <nl> + E_ : : String ( id ) = > ( p , id ) , <nl> + E_ : : Id ( ast_defs : : Id ( p , n ) ) = > ( p , n ) , <nl> + _ = > Self : : missing_syntax ( None , " PocketIdentifierExpression name " , node , env ) ? , <nl> + } ; <nl> + Ok ( E_ : : PUIdentifier ( qual , field , name ) ) <nl> + } <nl> + _ = > Self : : missing_syntax ( Some ( E_ : : Null ) , " expression " , node , env ) , <nl> } <nl> } <nl> <nl> where <nl> } <nl> GotoLabel ( c ) = > { <nl> if env . is_typechecker ( ) & & ! env . parser_options . po_allow_goto { <nl> - Self : : raise_parsing_error ( node , env , " & syntax_error : : goto_label " ) ; <nl> + Self : : raise_parsing_error ( node , env , & syntax_error : : goto_label ) ; <nl> } <nl> Ok ( S : : new ( <nl> pos , <nl> where <nl> } <nl> GotoStatement ( c ) = > { <nl> if env . is_typechecker ( ) & & ! env . parser_options . po_allow_goto { <nl> - Self : : raise_parsing_error ( node , env , " & syntax_error : : goto_label " ) ; <nl> + Self : : raise_parsing_error ( node , env , & syntax_error : : goto_label ) ; <nl> } <nl> Ok ( S : : new ( <nl> pos , <nl> where <nl> . map_or ( S_ : : Continue , S_ : : TempContinue ) ; <nl> Ok ( S : : new ( pos , ctn ) ) <nl> } <nl> + ConcurrentStatement ( c ) = > not_impl ! ( ) , <nl> MarkupSection ( _ ) = > Self : : p_markup ( node , env ) , <nl> - _ = > not_impl ! ( ) , <nl> + _ = > Self : : missing_syntax ( <nl> + Some ( S : : new ( Pos : : make_none ( ) , S_ : : Noop ) ) , <nl> + " statement " , <nl> + node , <nl> + env , <nl> + ) , <nl> } <nl> } <nl> <nl> where <nl> match * ( e . 1 ) { <nl> aast : : Expr_ : : ArrayGet ( ref e , Some ( _ ) ) = > Self : : check_mutate_class_const ( e , node , env ) , <nl> aast : : Expr_ : : ClassConst ( _ , _ ) = > { <nl> - Self : : raise_parsing_error ( node , env , " & syntax_error : : const_mutation " ) <nl> + Self : : raise_parsing_error ( node , env , & syntax_error : : const_mutation ) <nl> } <nl> _ = > { } <nl> } <nl> where <nl> <nl> fn soften_hint ( attrs : & [ aast ! ( UserAttribute < , > ) ] , hint : aast ! ( Hint ) ) - > aast ! ( Hint ) { <nl> if Self : : has_soft ( attrs ) { <nl> - aast : : Hint ( hint . 0 . clone ( ) , Box : : new ( aast : : Hint_ : : Hsoft ( hint ) ) ) <nl> + aast : : Hint : : new ( hint . 0 . clone ( ) , aast : : Hint_ : : Hsoft ( hint ) ) <nl> } else { <nl> hint <nl> } <nl> where <nl> fh_return_type , <nl> } ) <nl> } <nl> - LambdaSignature ( _ ) = > not_impl ! ( ) , <nl> + LambdaSignature ( c ) = > { <nl> + let mut header = FunHdr : : make_empty ( ) ; <nl> + header . fh_parameters = <nl> + Self : : could_map ( & Self : : p_fun_param , & c . lambda_parameters , env ) ? ; <nl> + header . fh_return_type = Self : : mp_optional ( & Self : : p_hint , & c . lambda_type , env ) ? ; <nl> + Ok ( header ) <nl> + } <nl> Token ( _ ) = > Ok ( FunHdr : : make_empty ( ) ) , <nl> _ = > Self : : missing_syntax ( None , " function header " , node , env ) , <nl> } <nl> where <nl> } <nl> } <nl> } <nl> - _ = > not_impl ! ( ) , <nl> + _ = > { <nl> + let f = | e : & mut Env | { <nl> + let expr = Self : : p_expr ( node , e ) ? ; <nl> + Ok ( aast : : Stmt : : new ( <nl> + expr . 0 . clone ( ) , <nl> + aast : : Stmt_ : : Return ( Some ( expr ) ) , <nl> + ) ) <nl> + } ; <nl> + Ok ( vec ! [ Self : : lift_awaits_in_statement ( f , node , env ) ? ] ) <nl> + } <nl> } <nl> } <nl> <nl> mmm a / hphp / hack / src / parser / positioned_token . rs <nl> ppp b / hphp / hack / src / parser / positioned_token . rs <nl> impl PartialEq for PositionedToken { <nl> } <nl> impl Eq for PositionedToken { } <nl> <nl> - impl < ' a > LexablePositionedToken < ' a > for PositionedToken { } <nl> + impl < ' a > LexablePositionedToken < ' a > for PositionedToken { <nl> + fn text < ' b > ( & self , source_text : & ' b SourceText ) - > & ' b str { <nl> + source_text . sub_as_str ( self . start_offset ( ) , self . width ( ) ) <nl> + } <nl> + } <nl> mmm a / hphp / hack / src / utils / escaper . rs <nl> ppp b / hphp / hack / src / utils / escaper . rs <nl> fn unescape_literal ( literal_kind : LiteralKind , s : & str ) - > Result < String , Invali <nl> if literal_kind = = LiteralKind : : LiteralBacktick { <nl> push ( & mut output , ' ` ' ) <nl> } else { <nl> - push_str ( & mut output , " \ \ ' " ) <nl> + push_str ( & mut output , " \ \ ` " ) <nl> } <nl> } <nl> ' \ " ' = > { <nl> pub fn extract_unquoted_string ( <nl> / / quoted ) and end with a line containing only the terminator and a <nl> / / semicolon followed by a blank line . We need to drop the opening line <nl> / / as well as the blank line and preceding terminator line . <nl> - match ( content . find ( ' \ n ' ) , content . rfind ( ' \ n ' ) ) { <nl> + match ( content . find ( ' \ n ' ) , content [ . . start + len - 1 ] . rfind ( ' \ n ' ) ) { <nl> ( Some ( start_ ) , Some ( end_ ) ) = > <nl> / / An empty heredoc , this way , will have start > = end <nl> { <nl> pub fn extract_unquoted_string ( <nl> Ok ( content [ start_ + 1 . . end_ ] . to_string ( ) ) <nl> } <nl> } <nl> - _ = > Err ( InvalidString { <nl> - msg : String : : from ( " out of bounds " ) , <nl> - } ) , <nl> + _ = > Ok ( String : : from ( content ) ) , <nl> } <nl> } else { <nl> static SINGLE_QUOTE : u8 = ' \ ' ' as u8 ; <nl> mod tests { <nl> assert_eq ! ( unescape_nowdoc ( " home \ \ $ " ) . unwrap ( ) , " home \ \ $ " . to_string ( ) ) ; <nl> assert_eq ! ( unescape_single ( " home \ \ ' " ) . unwrap ( ) , " home ' " . to_string ( ) ) ; <nl> assert_eq ! ( unescape_nowdoc ( " home \ \ ' " ) . unwrap ( ) , " home \ \ ' " . to_string ( ) ) ; <nl> + assert_eq ! ( unescape_nowdoc ( " \ \ ` " ) . unwrap ( ) , " \ \ ` " ) ; <nl> assert_eq ! ( unescape_single ( " \ \ a \ \ \ ' " ) . unwrap ( ) , " \ \ a ' " ) ; <nl> assert_eq ! ( unescape_long_string ( " \ \ a " ) . unwrap ( ) , " \ x07 " ) ; <nl> assert_eq ! ( unescape_long_string ( " \ \ v " ) . unwrap ( ) , " \ x0b " ) ; <nl> mod tests { <nl> assert_eq ! ( unescape_long_string ( " \ \ e " ) . unwrap ( ) , " \ x1b " ) ; <nl> assert_eq ! ( unescape_long_string ( " \ \ f " ) . unwrap ( ) , " \ x0c " ) ; <nl> assert_eq ! ( unescape_long_string ( " \ \ \ " " ) . unwrap ( ) , " \ " " ) ; <nl> + assert_eq ! ( unescape_long_string ( " \ \ ` " ) . unwrap ( ) , " \ \ ` " ) ; <nl> assert_eq ! ( unescape_heredoc ( " \ \ \ " " ) . unwrap ( ) , " \ \ \ " " ) ; <nl> assert_eq ! ( unescape_heredoc ( " \ \ p " ) . unwrap ( ) , " \ \ p " ) ; <nl> assert_eq ! ( unescape_long_string ( " \ \ r " ) . unwrap ( ) , " " ) ; <nl> mod tests { <nl> ) ; <nl> assert_eq ! ( unescape_long_string ( " \ \ xb1 " ) . unwrap ( ) . as_bytes ( ) , & [ 177u8 ] ) ; <nl> <nl> + let euro = " \ u { 20AC } " ; / / as bytes [ 226 , 130 , 172 ] <nl> + assert_eq ! ( <nl> + unescape_long_string ( euro ) . unwrap ( ) . as_bytes ( ) , <nl> + & [ 226u8 , 130u8 , 172u8 ] <nl> + ) ; <nl> + assert_eq ! ( unescape_backtick ( " \ \ ` " ) . unwrap ( ) , " ` " ) ; <nl> + assert_eq ! ( unescape_long_string ( " \ \ xb1 " ) . unwrap ( ) . as_bytes ( ) , & [ 177u8 ] ) ; <nl> + <nl> let euro = " \ u { 20AC } " ; / / as bytes [ 226 , 130 , 172 ] <nl> assert_eq ! ( <nl> unescape_long_string ( euro ) . unwrap ( ) . as_bytes ( ) , <nl> mod tests { <nl> extract_unquoted_string ( " < < < EOT \ na \ nEOT ; " , 0 , 13 ) . unwrap ( ) , <nl> " a " <nl> ) ; <nl> + assert_eq ! ( <nl> + extract_unquoted_string ( " < < < EOT \ n \ nEOT ; \ n " , 0 , 13 ) . unwrap ( ) , <nl> + " " <nl> + ) ; <nl> + assert_eq ! ( <nl> + extract_unquoted_string ( " < < < EOT \ na \ nEOT ; \ n " , 0 , 14 ) . unwrap ( ) , <nl> + " a " <nl> + ) ; <nl> } <nl> <nl> } <nl> mmm a / hphp / hack / test / rust / rust_ocaml_test . ml <nl> ppp b / hphp / hack / test / rust / rust_ocaml_test . ml <nl> let get_files_in_path ~ args path = <nl> @ @ String_utils . string_ends_with <nl> f <nl> " runs_out_of_retries_in_line_splitter . php " <nl> + & & not <nl> + @ @ String_utils . string_ends_with <nl> + f <nl> + " ffp / tests / const_initializers . php " <nl> | _ - > true ) <nl> files <nl> <nl>
p_hint 2 / 2
facebook/hhvm
ed06fd57c6469fe0be77c3cab424176307fa5290
2019-09-20T07:46:37Z
mmm a / src / validation . cpp <nl> ppp b / src / validation . cpp <nl> bool RewindBlockIndex ( const CChainParams & params ) { <nl> return false ; <nl> } <nl> <nl> + LOCK ( cs_main ) ; <nl> if ( : : ChainActive ( ) . Tip ( ) ! = nullptr ) { <nl> / / FlushStateToDisk can possibly read : : ChainActive ( ) . Be conservative <nl> / / and skip it here , we ' re about to - reindex - chainstate anyway , so <nl>
validation : Hold cs_main when reading chainActive in RewindBlockIndex
bitcoin/bitcoin
1609809fb2a4c2ec15b7c26dc328e2e666bd5d57
2019-05-15T12:58:15Z
mmm a / . circleci / Dockerfile . cpu <nl> ppp b / . circleci / Dockerfile . cpu <nl> sudo apt - get install - y python3 - dev python3 - pip swig libmkl - dev <nl> # Install recent CMake . <nl> RUN wget - nv - O - https : / / github . com / Kitware / CMake / releases / download / v3 . 17 . 1 / cmake - 3 . 17 . 1 - Linux - x86_64 . tar . gz | sudo tar xzf - - - strip - components = 1 - C / usr <nl> <nl> - # Install numpy / scipy for python tests . <nl> - RUN pip3 install numpy scipy <nl> + # Install numpy / scipy / pytorch for python tests . <nl> + RUN pip3 install numpy scipy torch <nl> mmm a / . circleci / config . yml <nl> ppp b / . circleci / config . yml <nl> jobs : <nl> - run : <nl> name : Test python extension <nl> command : | <nl> - pip3 install pytest <nl> + pip3 install pytest torch <nl> export PYTHONPATH = " $ ( ls - d . / build / faiss / python / build / lib * / ) " <nl> pytest - - junitxml = test - results / pytest / results . xml . / tests <nl> - store_test_results : <nl> jobs : <nl> name : Build / test <nl> command : | <nl> conda install conda - build <nl> + conda install - c pytorch pytorch <nl> cd conda <nl> conda build faiss - - python 3 . 7 <nl> <nl>
Make pytorch available in CircleCI . ( )
facebookresearch/faiss
bab6db84e05b1218eb5e690153d77becf26cbb94
2020-10-23T04:10:32Z
mmm a / src / mongo / db / cst / bson_lexer . cpp <nl> ppp b / src / mongo / db / cst / bson_lexer . cpp <nl> const StringMap < ParserGen : : token_type > reservedKeyFieldnameLookup = { <nl> { " $ concatArrays " , ParserGen : : token : : CONCAT_ARRAYS } , <nl> { " $ filter " , ParserGen : : token : : FILTER } , <nl> { " $ first " , ParserGen : : token : : FIRST } , <nl> - { " $ in " , ParserGen : : token : : IN } , <nl> + { " $ in " , ParserGen : : token : : IN_ } , <nl> { " $ indexOfArray " , ParserGen : : token : : INDEX_OF_ARRAY } , <nl> { " $ isArray " , ParserGen : : token : : IS_ARRAY } , <nl> { " as " , ParserGen : : token : : ARG_AS } , <nl> mmm a / src / mongo / db / cst / parser_gen . hpp <nl> ppp b / src / mongo / db / cst / parser_gen . hpp <nl> class ParserGen { <nl> GTE = 90 , / / GTE <nl> HOUR = 91 , / / HOUR <nl> ID = 92 , / / ID <nl> - IN = 93 , / / IN <nl> + IN_ = 93 , / / IN <nl> INDEX_KEY = 94 , / / " indexKey " <nl> INDEX_OF_ARRAY = 95 , / / INDEX_OF_ARRAY <nl> INDEX_OF_BYTES = 96 , / / INDEX_OF_BYTES <nl> class ParserGen { <nl> tok = = token : : EXPONENT | | tok = = token : : EXPR | | tok = = token : : FILTER | | <nl> tok = = token : : FIRST | | tok = = token : : FLOOR | | tok = = token : : GEO_NEAR_DISTANCE | | <nl> tok = = token : : GEO_NEAR_POINT | | tok = = token : : GT | | tok = = token : : GTE | | <nl> - tok = = token : : HOUR | | tok = = token : : ID | | tok = = token : : IN | | <nl> + tok = = token : : HOUR | | tok = = token : : ID | | tok = = token : : IN_ | | <nl> tok = = token : : INDEX_KEY | | tok = = token : : INDEX_OF_ARRAY | | <nl> tok = = token : : INDEX_OF_BYTES | | tok = = token : : INDEX_OF_CP | | <nl> tok = = token : : INT_NEGATIVE_ONE | | tok = = token : : INT_ONE | | tok = = token : : INT_ZERO | | <nl> class ParserGen { <nl> tok = = token : : EXPONENT | | tok = = token : : EXPR | | tok = = token : : FILTER | | <nl> tok = = token : : FIRST | | tok = = token : : FLOOR | | tok = = token : : GEO_NEAR_DISTANCE | | <nl> tok = = token : : GEO_NEAR_POINT | | tok = = token : : GT | | tok = = token : : GTE | | <nl> - tok = = token : : HOUR | | tok = = token : : ID | | tok = = token : : IN | | <nl> + tok = = token : : HOUR | | tok = = token : : ID | | tok = = token : : IN_ | | <nl> tok = = token : : INDEX_KEY | | tok = = token : : INDEX_OF_ARRAY | | <nl> tok = = token : : INDEX_OF_BYTES | | tok = = token : : INDEX_OF_CP | | <nl> tok = = token : : INT_NEGATIVE_ONE | | tok = = token : : INT_ONE | | tok = = token : : INT_ZERO | | <nl> class ParserGen { <nl> # endif <nl> # if 201103L < = YY_CPLUSPLUS <nl> static symbol_type make_IN ( location_type l ) { <nl> - return symbol_type ( token : : IN , std : : move ( l ) ) ; <nl> + return symbol_type ( token : : IN_ , std : : move ( l ) ) ; <nl> } <nl> # else <nl> static symbol_type make_IN ( const location_type & l ) { <nl> - return symbol_type ( token : : IN , l ) ; <nl> + return symbol_type ( token : : IN_ , l ) ; <nl> } <nl> # endif <nl> # if 201103L < = YY_CPLUSPLUS <nl>
SERVER - 48863 Fix broken Windows compile
mongodb/mongo
f6174fa2dcec85dd1ac79d184a90557e101a4b3f
2020-10-08T10:44:32Z
mmm a / xbmc / epg / GUIEPGGridContainer . cpp <nl> ppp b / xbmc / epg / GUIEPGGridContainer . cpp <nl> CGUIEPGGridContainer : : CGUIEPGGridContainer ( const CGUIEPGGridContainer & other ) <nl> m_blocksPerPage ( other . m_blocksPerPage ) , <nl> m_blockCursor ( other . m_blockCursor ) , <nl> m_blockOffset ( other . m_blockOffset ) , <nl> + m_blockTravelAxis ( other . m_blockTravelAxis ) , <nl> m_cacheChannelItems ( other . m_cacheChannelItems ) , <nl> m_cacheProgrammeItems ( other . m_cacheProgrammeItems ) , <nl> m_cacheRulerItems ( other . m_cacheRulerItems ) , <nl>
Merge pull request from ksooo / pvr - cid1401587
xbmc/xbmc
c5983f3781991de6f7c2b3fa2d2bedea80f23145
2017-03-01T17:30:27Z
mmm a / BUILD <nl> ppp b / BUILD <nl> grpc_cc_library ( <nl> ] , <nl> ) <nl> <nl> - grpc_cc_library ( <nl> - name = " grpc_http_util " , <nl> - srcs = [ <nl> - " src / core / ext / filters / http / client / util . cc " , <nl> - ] , <nl> - hdrs = [ <nl> - " src / core / ext / filters / http / client / util . h " , <nl> - ] , <nl> - language = " c + + " , <nl> - deps = [ <nl> - " grpc_base " , <nl> - ] , <nl> - ) <nl> - <nl> grpc_cc_library ( <nl> name = " grpc_http_filters " , <nl> srcs = [ <nl> grpc_cc_library ( <nl> language = " c + + " , <nl> deps = [ <nl> " grpc_base " , <nl> - " grpc_http_util " , <nl> " grpc_message_size_filter " , <nl> ] , <nl> ) <nl> grpc_cc_library ( <nl> deps = [ <nl> " grpc_base " , <nl> " grpc_client_channel " , <nl> - " grpc_http_util " , <nl> " grpc_resolver_xds_header " , <nl> " grpc_xds_api_header " , <nl> ] , <nl> mmm a / BUILD . gn <nl> ppp b / BUILD . gn <nl> config ( " grpc_config " ) { <nl> " src / core / ext / filters / deadline / deadline_filter . h " , <nl> " src / core / ext / filters / http / client / http_client_filter . cc " , <nl> " src / core / ext / filters / http / client / http_client_filter . h " , <nl> - " src / core / ext / filters / http / client / util . cc " , <nl> - " src / core / ext / filters / http / client / util . h " , <nl> " src / core / ext / filters / http / client_authority_filter . cc " , <nl> " src / core / ext / filters / http / client_authority_filter . h " , <nl> " src / core / ext / filters / http / http_filters_plugin . cc " , <nl> mmm a / CMakeLists . txt <nl> ppp b / CMakeLists . txt <nl> add_library ( grpc <nl> src / core / ext / filters / client_idle / client_idle_filter . cc <nl> src / core / ext / filters / deadline / deadline_filter . cc <nl> src / core / ext / filters / http / client / http_client_filter . cc <nl> - src / core / ext / filters / http / client / util . cc <nl> src / core / ext / filters / http / client_authority_filter . cc <nl> src / core / ext / filters / http / http_filters_plugin . cc <nl> src / core / ext / filters / http / message_compress / message_compress_filter . cc <nl> add_library ( grpc_unsecure <nl> src / core / ext / filters / client_idle / client_idle_filter . cc <nl> src / core / ext / filters / deadline / deadline_filter . cc <nl> src / core / ext / filters / http / client / http_client_filter . cc <nl> - src / core / ext / filters / http / client / util . cc <nl> src / core / ext / filters / http / client_authority_filter . cc <nl> src / core / ext / filters / http / http_filters_plugin . cc <nl> src / core / ext / filters / http / message_compress / message_compress_filter . cc <nl> mmm a / Makefile <nl> ppp b / Makefile <nl> LIBGRPC_SRC = \ <nl> src / core / ext / filters / client_idle / client_idle_filter . cc \ <nl> src / core / ext / filters / deadline / deadline_filter . cc \ <nl> src / core / ext / filters / http / client / http_client_filter . cc \ <nl> - src / core / ext / filters / http / client / util . cc \ <nl> src / core / ext / filters / http / client_authority_filter . cc \ <nl> src / core / ext / filters / http / http_filters_plugin . cc \ <nl> src / core / ext / filters / http / message_compress / message_compress_filter . cc \ <nl> LIBGRPC_UNSECURE_SRC = \ <nl> src / core / ext / filters / client_idle / client_idle_filter . cc \ <nl> src / core / ext / filters / deadline / deadline_filter . cc \ <nl> src / core / ext / filters / http / client / http_client_filter . cc \ <nl> - src / core / ext / filters / http / client / util . cc \ <nl> src / core / ext / filters / http / client_authority_filter . cc \ <nl> src / core / ext / filters / http / http_filters_plugin . cc \ <nl> src / core / ext / filters / http / message_compress / message_compress_filter . cc \ <nl> mmm a / build_autogenerated . yaml <nl> ppp b / build_autogenerated . yaml <nl> libs : <nl> - src / core / ext / filters / client_channel / xds / xds_client_stats . h <nl> - src / core / ext / filters / deadline / deadline_filter . h <nl> - src / core / ext / filters / http / client / http_client_filter . h <nl> - - src / core / ext / filters / http / client / util . h <nl> - src / core / ext / filters / http / client_authority_filter . h <nl> - src / core / ext / filters / http / message_compress / message_compress_filter . h <nl> - src / core / ext / filters / http / message_compress / message_decompress_filter . h <nl> libs : <nl> - src / core / ext / filters / client_idle / client_idle_filter . cc <nl> - src / core / ext / filters / deadline / deadline_filter . cc <nl> - src / core / ext / filters / http / client / http_client_filter . cc <nl> - - src / core / ext / filters / http / client / util . cc <nl> - src / core / ext / filters / http / client_authority_filter . cc <nl> - src / core / ext / filters / http / http_filters_plugin . cc <nl> - src / core / ext / filters / http / message_compress / message_compress_filter . cc <nl> libs : <nl> - src / core / ext / filters / client_channel / xds / xds_client_stats . h <nl> - src / core / ext / filters / deadline / deadline_filter . h <nl> - src / core / ext / filters / http / client / http_client_filter . h <nl> - - src / core / ext / filters / http / client / util . h <nl> - src / core / ext / filters / http / client_authority_filter . h <nl> - src / core / ext / filters / http / message_compress / message_compress_filter . h <nl> - src / core / ext / filters / http / message_compress / message_decompress_filter . h <nl> libs : <nl> - src / core / ext / filters / client_idle / client_idle_filter . cc <nl> - src / core / ext / filters / deadline / deadline_filter . cc <nl> - src / core / ext / filters / http / client / http_client_filter . cc <nl> - - src / core / ext / filters / http / client / util . cc <nl> - src / core / ext / filters / http / client_authority_filter . cc <nl> - src / core / ext / filters / http / http_filters_plugin . cc <nl> - src / core / ext / filters / http / message_compress / message_compress_filter . cc <nl> mmm a / config . m4 <nl> ppp b / config . m4 <nl> if test " $ PHP_GRPC " ! = " no " ; then <nl> src / core / ext / filters / client_idle / client_idle_filter . cc \ <nl> src / core / ext / filters / deadline / deadline_filter . cc \ <nl> src / core / ext / filters / http / client / http_client_filter . cc \ <nl> - src / core / ext / filters / http / client / util . cc \ <nl> src / core / ext / filters / http / client_authority_filter . cc \ <nl> src / core / ext / filters / http / http_filters_plugin . cc \ <nl> src / core / ext / filters / http / message_compress / message_compress_filter . cc \ <nl> mmm a / config . w32 <nl> ppp b / config . w32 <nl> if ( PHP_GRPC ! = " no " ) { <nl> " src \ \ core \ \ ext \ \ filters \ \ client_idle \ \ client_idle_filter . cc " + <nl> " src \ \ core \ \ ext \ \ filters \ \ deadline \ \ deadline_filter . cc " + <nl> " src \ \ core \ \ ext \ \ filters \ \ http \ \ client \ \ http_client_filter . cc " + <nl> - " src \ \ core \ \ ext \ \ filters \ \ http \ \ client \ \ util . cc " + <nl> " src \ \ core \ \ ext \ \ filters \ \ http \ \ client_authority_filter . cc " + <nl> " src \ \ core \ \ ext \ \ filters \ \ http \ \ http_filters_plugin . cc " + <nl> " src \ \ core \ \ ext \ \ filters \ \ http \ \ message_compress \ \ message_compress_filter . cc " + <nl> mmm a / gRPC - C + + . podspec <nl> ppp b / gRPC - C + + . podspec <nl> Pod : : Spec . new do | s | <nl> ' src / core / ext / filters / client_channel / xds / xds_client_stats . h ' , <nl> ' src / core / ext / filters / deadline / deadline_filter . h ' , <nl> ' src / core / ext / filters / http / client / http_client_filter . h ' , <nl> - ' src / core / ext / filters / http / client / util . h ' , <nl> ' src / core / ext / filters / http / client_authority_filter . h ' , <nl> ' src / core / ext / filters / http / message_compress / message_compress_filter . h ' , <nl> ' src / core / ext / filters / http / message_compress / message_decompress_filter . h ' , <nl> Pod : : Spec . new do | s | <nl> ' src / core / ext / filters / client_channel / xds / xds_client_stats . h ' , <nl> ' src / core / ext / filters / deadline / deadline_filter . h ' , <nl> ' src / core / ext / filters / http / client / http_client_filter . h ' , <nl> - ' src / core / ext / filters / http / client / util . h ' , <nl> ' src / core / ext / filters / http / client_authority_filter . h ' , <nl> ' src / core / ext / filters / http / message_compress / message_compress_filter . h ' , <nl> ' src / core / ext / filters / http / message_compress / message_decompress_filter . h ' , <nl> mmm a / gRPC - Core . podspec <nl> ppp b / gRPC - Core . podspec <nl> Pod : : Spec . new do | s | <nl> ' src / core / ext / filters / deadline / deadline_filter . h ' , <nl> ' src / core / ext / filters / http / client / http_client_filter . cc ' , <nl> ' src / core / ext / filters / http / client / http_client_filter . h ' , <nl> - ' src / core / ext / filters / http / client / util . cc ' , <nl> - ' src / core / ext / filters / http / client / util . h ' , <nl> ' src / core / ext / filters / http / client_authority_filter . cc ' , <nl> ' src / core / ext / filters / http / client_authority_filter . h ' , <nl> ' src / core / ext / filters / http / http_filters_plugin . cc ' , <nl> Pod : : Spec . new do | s | <nl> ' src / core / ext / filters / client_channel / xds / xds_client_stats . h ' , <nl> ' src / core / ext / filters / deadline / deadline_filter . h ' , <nl> ' src / core / ext / filters / http / client / http_client_filter . h ' , <nl> - ' src / core / ext / filters / http / client / util . h ' , <nl> ' src / core / ext / filters / http / client_authority_filter . h ' , <nl> ' src / core / ext / filters / http / message_compress / message_compress_filter . h ' , <nl> ' src / core / ext / filters / http / message_compress / message_decompress_filter . h ' , <nl> mmm a / grpc . gemspec <nl> ppp b / grpc . gemspec <nl> Gem : : Specification . new do | s | <nl> s . files + = % w ( src / core / ext / filters / deadline / deadline_filter . h ) <nl> s . files + = % w ( src / core / ext / filters / http / client / http_client_filter . cc ) <nl> s . files + = % w ( src / core / ext / filters / http / client / http_client_filter . h ) <nl> - s . files + = % w ( src / core / ext / filters / http / client / util . cc ) <nl> - s . files + = % w ( src / core / ext / filters / http / client / util . h ) <nl> s . files + = % w ( src / core / ext / filters / http / client_authority_filter . cc ) <nl> s . files + = % w ( src / core / ext / filters / http / client_authority_filter . h ) <nl> s . files + = % w ( src / core / ext / filters / http / http_filters_plugin . cc ) <nl> mmm a / grpc . gyp <nl> ppp b / grpc . gyp <nl> <nl> ' src / core / ext / filters / client_idle / client_idle_filter . cc ' , <nl> ' src / core / ext / filters / deadline / deadline_filter . cc ' , <nl> ' src / core / ext / filters / http / client / http_client_filter . cc ' , <nl> - ' src / core / ext / filters / http / client / util . cc ' , <nl> ' src / core / ext / filters / http / client_authority_filter . cc ' , <nl> ' src / core / ext / filters / http / http_filters_plugin . cc ' , <nl> ' src / core / ext / filters / http / message_compress / message_compress_filter . cc ' , <nl> <nl> ' src / core / ext / filters / client_idle / client_idle_filter . cc ' , <nl> ' src / core / ext / filters / deadline / deadline_filter . cc ' , <nl> ' src / core / ext / filters / http / client / http_client_filter . cc ' , <nl> - ' src / core / ext / filters / http / client / util . cc ' , <nl> ' src / core / ext / filters / http / client_authority_filter . cc ' , <nl> ' src / core / ext / filters / http / http_filters_plugin . cc ' , <nl> ' src / core / ext / filters / http / message_compress / message_compress_filter . cc ' , <nl> mmm a / package . xml <nl> ppp b / package . xml <nl> <nl> < file baseinstalldir = " / " name = " src / core / ext / filters / deadline / deadline_filter . h " role = " src " / > <nl> < file baseinstalldir = " / " name = " src / core / ext / filters / http / client / http_client_filter . cc " role = " src " / > <nl> < file baseinstalldir = " / " name = " src / core / ext / filters / http / client / http_client_filter . h " role = " src " / > <nl> - < file baseinstalldir = " / " name = " src / core / ext / filters / http / client / util . cc " role = " src " / > <nl> - < file baseinstalldir = " / " name = " src / core / ext / filters / http / client / util . h " role = " src " / > <nl> < file baseinstalldir = " / " name = " src / core / ext / filters / http / client_authority_filter . cc " role = " src " / > <nl> < file baseinstalldir = " / " name = " src / core / ext / filters / http / client_authority_filter . h " role = " src " / > <nl> < file baseinstalldir = " / " name = " src / core / ext / filters / http / http_filters_plugin . cc " role = " src " / > <nl> mmm a / src / core / ext / filters / client_channel / lb_policy / xds / xds_routing . cc <nl> ppp b / src / core / ext / filters / client_channel / lb_policy / xds / xds_routing . cc <nl> <nl> # include " src / core / ext / filters / client_channel / lb_policy_registry . h " <nl> # include " src / core / ext / filters / client_channel / resolver / xds / xds_resolver . h " <nl> # include " src / core / ext / filters / client_channel / xds / xds_api . h " <nl> - # include " src / core / ext / filters / http / client / util . h " <nl> # include " src / core / lib / channel / channel_args . h " <nl> # include " src / core / lib / gpr / string . h " <nl> # include " src / core / lib / gprpp / orphanable . h " <nl> class XdsRoutingLb : public LoadBalancingPolicy { <nl> / / Maintains an ordered xds route table as provided by RDS response . <nl> using RouteTable = std : : vector < Route > ; <nl> <nl> - RoutePicker ( RouteTable route_table , std : : string user_agent , <nl> - RefCountedPtr < XdsRoutingLbConfig > config ) <nl> - : route_table_ ( std : : move ( route_table ) ) , <nl> - user_agent_ ( std : : move ( user_agent ) ) , <nl> - config_ ( std : : move ( config ) ) { } <nl> + explicit RoutePicker ( RouteTable route_table , <nl> + RefCountedPtr < XdsRoutingLbConfig > config ) <nl> + : route_table_ ( std : : move ( route_table ) ) , config_ ( std : : move ( config ) ) { } <nl> <nl> PickResult Pick ( PickArgs args ) override ; <nl> <nl> private : <nl> RouteTable route_table_ ; <nl> - / / Storing user_agent generated from args from http layer . <nl> - std : : string user_agent_ ; <nl> / / Take a reference to config so that we can use <nl> / / XdsApi : : RdsUpdate : : RdsRoute : : Matchers from it . <nl> RefCountedPtr < XdsRoutingLbConfig > config_ ; <nl> class XdsRoutingLb : public LoadBalancingPolicy { <nl> <nl> / / Children . <nl> std : : map < std : : string , OrphanablePtr < XdsRoutingChild > > actions_ ; <nl> - <nl> - / / Storing user_agent generated from args from http layer . <nl> - std : : string user_agent_ ; <nl> } ; <nl> <nl> / / <nl> absl : : optional < absl : : string_view > GetMetadataValue ( <nl> <nl> bool HeaderMatchHelper ( <nl> const XdsApi : : RdsUpdate : : RdsRoute : : Matchers : : HeaderMatcher & header_matcher , <nl> - LoadBalancingPolicy : : MetadataInterface * initial_metadata , <nl> - const std : : string & user_agent , absl : : string_view deadline ) { <nl> - std : : string concatenated_value ; <nl> - absl : : optional < absl : : string_view > value ; <nl> - if ( header_matcher . name = = " grpc - tags - bin " | | <nl> - header_matcher . name = = " grpc - trace - bin " | | <nl> - header_matcher . name = = " grpc - previous - rpc - attempts " ) { <nl> - value = absl : : nullopt ; <nl> - } else if ( header_matcher . name = = " content - type " ) { <nl> - value = " application / grpc " ; <nl> - } else if ( header_matcher . name = = " user - agent " ) { <nl> - value = user_agent ; <nl> - } else if ( header_matcher . name = = " grpc - timeout " ) { <nl> - value = deadline ; <nl> - } else { <nl> - value = GetMetadataValue ( header_matcher . name , initial_metadata , <nl> - & concatenated_value ) ; <nl> - } <nl> + LoadBalancingPolicy : : MetadataInterface * initial_metadata ) { <nl> + auto value = GetMetadataValue ( header_matcher . name , initial_metadata ) ; <nl> if ( ! value . has_value ( ) ) { <nl> if ( header_matcher . type = = XdsApi : : RdsUpdate : : RdsRoute : : Matchers : : <nl> HeaderMatcher : : HeaderMatcherType : : PRESENT ) { <nl> bool HeaderMatchHelper ( <nl> } <nl> <nl> bool HeadersMatch ( <nl> + LoadBalancingPolicy : : PickArgs args , <nl> const std : : vector < XdsApi : : RdsUpdate : : RdsRoute : : Matchers : : HeaderMatcher > & <nl> - header_matchers , <nl> - LoadBalancingPolicy : : MetadataInterface * initial_metadata , <nl> - const std : : string & user_agent , absl : : string_view deadline ) { <nl> + header_matchers ) { <nl> for ( const auto & header_matcher : header_matchers ) { <nl> - bool match = HeaderMatchHelper ( header_matcher , initial_metadata , user_agent , <nl> - deadline ) ; <nl> + bool match = HeaderMatchHelper ( header_matcher , args . initial_metadata ) ; <nl> if ( header_matcher . invert_match ) match = ! match ; <nl> if ( ! match ) return false ; <nl> } <nl> XdsRoutingLb : : PickResult XdsRoutingLb : : RoutePicker : : Pick ( PickArgs args ) { <nl> / / Path matching . <nl> if ( ! PathMatch ( args . path , route . matchers - > path_matcher ) ) continue ; <nl> / / Header Matching . <nl> - if ( ! HeadersMatch ( route . matchers - > header_matchers , args . initial_metadata , <nl> - user_agent_ , <nl> - args . call_state - > ExperimentalGetCallAttribute ( <nl> - kCallAttributeDeadline ) ) ) <nl> - continue ; <nl> + if ( ! HeadersMatch ( args , route . matchers - > header_matchers ) ) continue ; <nl> / / Match fraction check <nl> if ( route . matchers - > fraction_per_million . has_value ( ) & & <nl> - ! UnderFraction ( route . matchers - > fraction_per_million . value ( ) ) ) { <nl> + ! UnderFraction ( route . matchers - > fraction_per_million . value ( ) ) ) <nl> continue ; <nl> - } <nl> / / Found a match <nl> return route . picker - > Pick ( args ) ; <nl> } <nl> XdsRoutingLb : : PickResult XdsRoutingLb : : RoutePicker : : Pick ( PickArgs args ) { <nl> / / XdsRoutingLb <nl> / / <nl> <nl> - XdsRoutingLb : : XdsRoutingLb ( Args args ) <nl> - : LoadBalancingPolicy ( std : : move ( args ) ) , <nl> - user_agent_ ( GenerateUserAgentFromArgs ( args . args , " chttp2 " ) ) { } <nl> + XdsRoutingLb : : XdsRoutingLb ( Args args ) : LoadBalancingPolicy ( std : : move ( args ) ) { } <nl> <nl> XdsRoutingLb : : ~ XdsRoutingLb ( ) { <nl> if ( GRPC_TRACE_FLAG_ENABLED ( grpc_xds_routing_lb_trace ) ) { <nl> void XdsRoutingLb : : UpdateStateLocked ( ) { <nl> } <nl> route_table . push_back ( std : : move ( route ) ) ; <nl> } <nl> - picker = absl : : make_unique < RoutePicker > ( std : : move ( route_table ) , <nl> - user_agent_ , config_ ) ; <nl> + picker = absl : : make_unique < RoutePicker > ( std : : move ( route_table ) , config_ ) ; <nl> break ; <nl> } <nl> case GRPC_CHANNEL_CONNECTING : <nl> mmm a / src / core / ext / filters / http / client / http_client_filter . cc <nl> ppp b / src / core / ext / filters / http / client / http_client_filter . cc <nl> <nl> # include < grpc / support / log . h > <nl> <nl> # include " src / core / ext / filters / http / client / http_client_filter . h " <nl> - # include " src / core / ext / filters / http / client / util . h " <nl> # include " src / core / lib / gpr / string . h " <nl> # include " src / core / lib / gprpp / manual_constructor . h " <nl> # include " src / core / lib / profiling / timers . h " <nl> static size_t max_payload_size_from_args ( const grpc_channel_args * args ) { <nl> <nl> static grpc_core : : ManagedMemorySlice user_agent_from_args ( <nl> const grpc_channel_args * args , const char * transport_name ) { <nl> - return grpc_core : : ManagedMemorySlice ( <nl> - grpc_core : : GenerateUserAgentFromArgs ( args , transport_name ) . c_str ( ) ) ; <nl> + std : : vector < std : : string > user_agent_fields ; <nl> + <nl> + for ( size_t i = 0 ; args & & i < args - > num_args ; i + + ) { <nl> + if ( 0 = = strcmp ( args - > args [ i ] . key , GRPC_ARG_PRIMARY_USER_AGENT_STRING ) ) { <nl> + if ( args - > args [ i ] . type ! = GRPC_ARG_STRING ) { <nl> + gpr_log ( GPR_ERROR , " Channel argument ' % s ' should be a string " , <nl> + GRPC_ARG_PRIMARY_USER_AGENT_STRING ) ; <nl> + } else { <nl> + user_agent_fields . push_back ( args - > args [ i ] . value . string ) ; <nl> + } <nl> + } <nl> + } <nl> + <nl> + user_agent_fields . push_back ( <nl> + absl : : StrFormat ( " grpc - c / % s ( % s ; % s ) " , grpc_version_string ( ) , <nl> + GPR_PLATFORM_STRING , transport_name ) ) ; <nl> + <nl> + for ( size_t i = 0 ; args & & i < args - > num_args ; i + + ) { <nl> + if ( 0 = = strcmp ( args - > args [ i ] . key , GRPC_ARG_SECONDARY_USER_AGENT_STRING ) ) { <nl> + if ( args - > args [ i ] . type ! = GRPC_ARG_STRING ) { <nl> + gpr_log ( GPR_ERROR , " Channel argument ' % s ' should be a string " , <nl> + GRPC_ARG_SECONDARY_USER_AGENT_STRING ) ; <nl> + } else { <nl> + user_agent_fields . push_back ( args - > args [ i ] . value . string ) ; <nl> + } <nl> + } <nl> + } <nl> + <nl> + std : : string user_agent_string = absl : : StrJoin ( user_agent_fields , " " ) ; <nl> + return grpc_core : : ManagedMemorySlice ( user_agent_string . c_str ( ) ) ; <nl> } <nl> <nl> / * Constructor for channel_data * / <nl> deleted file mode 100644 <nl> index d79fb49e763 . . 00000000000 <nl> mmm a / src / core / ext / filters / http / client / util . cc <nl> ppp / dev / null <nl> <nl> - / / <nl> - / / Copyright 2015 gRPC authors . <nl> - / / <nl> - / / Licensed under the Apache License , Version 2 . 0 ( the " License " ) ; <nl> - / / you may not use this file except in compliance with the License . <nl> - / / You may obtain a copy of the License at <nl> - / / <nl> - / / http : / / www . apache . org / licenses / LICENSE - 2 . 0 <nl> - / / <nl> - / / Unless required by applicable law or agreed to in writing , software <nl> - / / distributed under the License is distributed on an " AS IS " BASIS , <nl> - / / WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied . <nl> - / / See the License for the specific language governing permissions and <nl> - / / limitations under the License . <nl> - / / <nl> - <nl> - # include < grpc / support / port_platform . h > <nl> - <nl> - # include " absl / strings / str_format . h " <nl> - # include " absl / strings / str_join . h " <nl> - <nl> - # include " src / core / ext / filters / http / client / util . h " <nl> - <nl> - namespace grpc_core { <nl> - std : : string GenerateUserAgentFromArgs ( const grpc_channel_args * args , <nl> - const char * transport_name ) { <nl> - std : : vector < std : : string > user_agent_fields ; <nl> - for ( size_t i = 0 ; args & & i < args - > num_args ; i + + ) { <nl> - if ( 0 = = strcmp ( args - > args [ i ] . key , GRPC_ARG_PRIMARY_USER_AGENT_STRING ) ) { <nl> - if ( args - > args [ i ] . type ! = GRPC_ARG_STRING ) { <nl> - gpr_log ( GPR_ERROR , " Channel argument ' % s ' should be a string " , <nl> - GRPC_ARG_PRIMARY_USER_AGENT_STRING ) ; <nl> - } else { <nl> - user_agent_fields . push_back ( args - > args [ i ] . value . string ) ; <nl> - } <nl> - } <nl> - } <nl> - user_agent_fields . push_back ( <nl> - absl : : StrFormat ( " grpc - c / % s ( % s ; % s ) " , grpc_version_string ( ) , <nl> - GPR_PLATFORM_STRING , transport_name ) ) ; <nl> - for ( size_t i = 0 ; args & & i < args - > num_args ; i + + ) { <nl> - if ( 0 = = strcmp ( args - > args [ i ] . key , GRPC_ARG_SECONDARY_USER_AGENT_STRING ) ) { <nl> - if ( args - > args [ i ] . type ! = GRPC_ARG_STRING ) { <nl> - gpr_log ( GPR_ERROR , " Channel argument ' % s ' should be a string " , <nl> - GRPC_ARG_SECONDARY_USER_AGENT_STRING ) ; <nl> - } else { <nl> - user_agent_fields . push_back ( args - > args [ i ] . value . string ) ; <nl> - } <nl> - } <nl> - } <nl> - return absl : : StrJoin ( user_agent_fields , " " ) ; <nl> - } <nl> - } / / namespace grpc_core <nl> deleted file mode 100644 <nl> index 725c3dba4b4 . . 00000000000 <nl> mmm a / src / core / ext / filters / http / client / util . h <nl> ppp / dev / null <nl> <nl> - / / <nl> - / / Copyright 2015 gRPC authors . <nl> - / / <nl> - / / Licensed under the Apache License , Version 2 . 0 ( the " License " ) ; <nl> - / / you may not use this file except in compliance with the License . <nl> - / / You may obtain a copy of the License at <nl> - / / <nl> - / / http : / / www . apache . org / licenses / LICENSE - 2 . 0 <nl> - / / <nl> - / / Unless required by applicable law or agreed to in writing , software <nl> - / / distributed under the License is distributed on an " AS IS " BASIS , <nl> - / / WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied . <nl> - / / See the License for the specific language governing permissions and <nl> - / / limitations under the License . <nl> - / / <nl> - <nl> - # ifndef GRPC_CORE_EXT_FILTERS_HTTP_CLIENT_UTIL_H <nl> - # define GRPC_CORE_EXT_FILTERS_HTTP_CLIENT_UTIL_H <nl> - <nl> - # include < grpc / support / port_platform . h > <nl> - <nl> - # include " absl / strings / str_join . h " <nl> - <nl> - # include " src / core / lib / channel / channel_stack . h " <nl> - <nl> - namespace grpc_core { <nl> - std : : string GenerateUserAgentFromArgs ( const grpc_channel_args * args , <nl> - const char * transport_name ) ; <nl> - } / / namespace grpc_core <nl> - # endif / * GRPC_CORE_EXT_FILTERS_HTTP_CLIENT_UTIL_H * / <nl> mmm a / src / python / grpcio / grpc_core_dependencies . py <nl> ppp b / src / python / grpcio / grpc_core_dependencies . py <nl> <nl> ' src / core / ext / filters / client_idle / client_idle_filter . cc ' , <nl> ' src / core / ext / filters / deadline / deadline_filter . cc ' , <nl> ' src / core / ext / filters / http / client / http_client_filter . cc ' , <nl> - ' src / core / ext / filters / http / client / util . cc ' , <nl> ' src / core / ext / filters / http / client_authority_filter . cc ' , <nl> ' src / core / ext / filters / http / http_filters_plugin . cc ' , <nl> ' src / core / ext / filters / http / message_compress / message_compress_filter . cc ' , <nl> mmm a / test / cpp / end2end / xds_end2end_test . cc <nl> ppp b / test / cpp / end2end / xds_end2end_test . cc <nl> TEST_P ( LdsRdsTest , XdsRoutingHeadersMatching ) { <nl> gpr_unsetenv ( " GRPC_XDS_EXPERIMENTAL_ROUTING " ) ; <nl> } <nl> <nl> - TEST_P ( LdsRdsTest , XdsRoutingHeadersMatchingSpecialHeaderContentType ) { <nl> - gpr_setenv ( " GRPC_XDS_EXPERIMENTAL_ROUTING " , " true " ) ; <nl> - const char * kNewCluster1Name = " new_cluster_1 " ; <nl> - const size_t kNumEchoRpcs = 100 ; <nl> - SetNextResolution ( { } ) ; <nl> - SetNextResolutionForLbChannelAllBalancers ( ) ; <nl> - / / Populate new EDS resources . <nl> - AdsServiceImpl : : EdsResourceArgs args ( { <nl> - { " locality0 " , GetBackendPorts ( 0 , 1 ) } , <nl> - } ) ; <nl> - AdsServiceImpl : : EdsResourceArgs args1 ( { <nl> - { " locality0 " , GetBackendPorts ( 1 , 2 ) } , <nl> - } ) ; <nl> - balancers_ [ 0 ] - > ads_service ( ) - > SetEdsResource ( <nl> - AdsServiceImpl : : BuildEdsResource ( args ) ) ; <nl> - balancers_ [ 0 ] - > ads_service ( ) - > SetEdsResource ( <nl> - AdsServiceImpl : : BuildEdsResource ( args1 , kNewCluster1Name ) ) ; <nl> - / / Populate new CDS resources . <nl> - Cluster new_cluster1 = balancers_ [ 0 ] - > ads_service ( ) - > default_cluster ( ) ; <nl> - new_cluster1 . set_name ( kNewCluster1Name ) ; <nl> - balancers_ [ 0 ] - > ads_service ( ) - > SetCdsResource ( new_cluster1 ) ; <nl> - / / Populating Route Configurations for LDS . <nl> - RouteConfiguration route_config = <nl> - balancers_ [ 0 ] - > ads_service ( ) - > default_route_config ( ) ; <nl> - auto * route1 = route_config . mutable_virtual_hosts ( 0 ) - > mutable_routes ( 0 ) ; <nl> - route1 - > mutable_match ( ) - > set_prefix ( " " ) ; <nl> - auto * header_matcher1 = route1 - > mutable_match ( ) - > add_headers ( ) ; <nl> - header_matcher1 - > set_name ( " content - type " ) ; <nl> - header_matcher1 - > set_exact_match ( " notapplication / grpc " ) ; <nl> - route1 - > mutable_route ( ) - > set_cluster ( kNewCluster1Name ) ; <nl> - auto * default_route = route_config . mutable_virtual_hosts ( 0 ) - > add_routes ( ) ; <nl> - default_route - > mutable_match ( ) - > set_prefix ( " " ) ; <nl> - auto * header_matcher2 = default_route - > mutable_match ( ) - > add_headers ( ) ; <nl> - header_matcher2 - > set_name ( " content - type " ) ; <nl> - header_matcher2 - > set_exact_match ( " application / grpc " ) ; <nl> - default_route - > mutable_route ( ) - > set_cluster ( kDefaultResourceName ) ; <nl> - SetRouteConfiguration ( 0 , route_config ) ; <nl> - / / Make sure the backend is up . <nl> - WaitForAllBackends ( 0 , 1 ) ; <nl> - / / Send RPCs . <nl> - CheckRpcSendOk ( kNumEchoRpcs ) ; <nl> - EXPECT_EQ ( kNumEchoRpcs , backends_ [ 0 ] - > backend_service ( ) - > request_count ( ) ) ; <nl> - EXPECT_EQ ( 0 , backends_ [ 1 ] - > backend_service ( ) - > request_count ( ) ) ; <nl> - const auto & response_state = RouteConfigurationResponseState ( 0 ) ; <nl> - EXPECT_EQ ( response_state . state , AdsServiceImpl : : ResponseState : : ACKED ) ; <nl> - gpr_unsetenv ( " GRPC_XDS_EXPERIMENTAL_ROUTING " ) ; <nl> - } <nl> - <nl> - TEST_P ( LdsRdsTest , XdsRoutingHeadersMatchingSpecialHeaderUserAgent ) { <nl> - gpr_setenv ( " GRPC_XDS_EXPERIMENTAL_ROUTING " , " true " ) ; <nl> - const char * kNewCluster1Name = " new_cluster_1 " ; <nl> - const size_t kNumEchoRpcs = 100 ; <nl> - SetNextResolution ( { } ) ; <nl> - SetNextResolutionForLbChannelAllBalancers ( ) ; <nl> - / / Populate new EDS resources . <nl> - AdsServiceImpl : : EdsResourceArgs args ( { <nl> - { " locality0 " , GetBackendPorts ( 0 , 1 ) } , <nl> - } ) ; <nl> - AdsServiceImpl : : EdsResourceArgs args1 ( { <nl> - { " locality0 " , GetBackendPorts ( 1 , 2 ) } , <nl> - } ) ; <nl> - balancers_ [ 0 ] - > ads_service ( ) - > SetEdsResource ( <nl> - AdsServiceImpl : : BuildEdsResource ( args ) ) ; <nl> - balancers_ [ 0 ] - > ads_service ( ) - > SetEdsResource ( <nl> - AdsServiceImpl : : BuildEdsResource ( args1 , kNewCluster1Name ) ) ; <nl> - / / Populate new CDS resources . <nl> - Cluster new_cluster1 = balancers_ [ 0 ] - > ads_service ( ) - > default_cluster ( ) ; <nl> - new_cluster1 . set_name ( kNewCluster1Name ) ; <nl> - balancers_ [ 0 ] - > ads_service ( ) - > SetCdsResource ( new_cluster1 ) ; <nl> - / / Populating Route Configurations for LDS . <nl> - RouteConfiguration route_config = <nl> - balancers_ [ 0 ] - > ads_service ( ) - > default_route_config ( ) ; <nl> - auto * route1 = route_config . mutable_virtual_hosts ( 0 ) - > mutable_routes ( 0 ) ; <nl> - route1 - > mutable_match ( ) - > set_prefix ( " " ) ; <nl> - auto * header_matcher1 = route1 - > mutable_match ( ) - > add_headers ( ) ; <nl> - header_matcher1 - > set_name ( " user - agent " ) ; <nl> - header_matcher1 - > mutable_safe_regex_match ( ) - > set_regex ( <nl> - " ( does - not - match - grpc - c ) . * " ) ; <nl> - route1 - > mutable_route ( ) - > set_cluster ( kNewCluster1Name ) ; <nl> - auto * default_route = route_config . mutable_virtual_hosts ( 0 ) - > add_routes ( ) ; <nl> - default_route - > mutable_match ( ) - > set_prefix ( " " ) ; <nl> - auto * header_matcher2 = default_route - > mutable_match ( ) - > add_headers ( ) ; <nl> - header_matcher2 - > set_name ( " user - agent " ) ; <nl> - / / user - agent string is a 2 - part string like " grpc - c + + / 1 . 31 . 0 - dev <nl> - / / grpc - c / 11 . 0 . 0 " . <nl> - header_matcher2 - > mutable_safe_regex_match ( ) - > set_regex ( <nl> - " ( ( grpc - c ) . * [ 0 - 9 ] + . [ 0 - 9 ] + . [ 0 - 9 ] + . * ) { 2 } " ) ; <nl> - default_route - > mutable_route ( ) - > set_cluster ( kDefaultResourceName ) ; <nl> - SetRouteConfiguration ( 0 , route_config ) ; <nl> - / / Make sure backend is up . <nl> - WaitForAllBackends ( 0 , 1 ) ; <nl> - / / Send RPCs . <nl> - CheckRpcSendOk ( kNumEchoRpcs ) ; <nl> - EXPECT_EQ ( kNumEchoRpcs , backends_ [ 0 ] - > backend_service ( ) - > request_count ( ) ) ; <nl> - EXPECT_EQ ( 0 , backends_ [ 1 ] - > backend_service ( ) - > request_count ( ) ) ; <nl> - const auto & response_state = RouteConfigurationResponseState ( 0 ) ; <nl> - EXPECT_EQ ( response_state . state , AdsServiceImpl : : ResponseState : : ACKED ) ; <nl> - gpr_unsetenv ( " GRPC_XDS_EXPERIMENTAL_ROUTING " ) ; <nl> - } <nl> - <nl> - TEST_P ( LdsRdsTest , XdsRoutingHeadersMatchingSpecialHeaderGrpcTimeout ) { <nl> - gpr_setenv ( " GRPC_XDS_EXPERIMENTAL_ROUTING " , " true " ) ; <nl> - const char * kNewCluster1Name = " new_cluster_1 " ; <nl> - const size_t kNumEchoRpcs = 100 ; <nl> - SetNextResolution ( { } ) ; <nl> - SetNextResolutionForLbChannelAllBalancers ( ) ; <nl> - / / Populate new EDS resources . <nl> - AdsServiceImpl : : EdsResourceArgs args ( { <nl> - { " locality0 " , GetBackendPorts ( 0 , 1 ) } , <nl> - } ) ; <nl> - AdsServiceImpl : : EdsResourceArgs args1 ( { <nl> - { " locality0 " , GetBackendPorts ( 1 , 2 ) } , <nl> - } ) ; <nl> - balancers_ [ 0 ] - > ads_service ( ) - > SetEdsResource ( <nl> - AdsServiceImpl : : BuildEdsResource ( args ) ) ; <nl> - balancers_ [ 0 ] - > ads_service ( ) - > SetEdsResource ( <nl> - AdsServiceImpl : : BuildEdsResource ( args1 , kNewCluster1Name ) ) ; <nl> - / / Populate new CDS resources . <nl> - Cluster new_cluster1 = balancers_ [ 0 ] - > ads_service ( ) - > default_cluster ( ) ; <nl> - new_cluster1 . set_name ( kNewCluster1Name ) ; <nl> - balancers_ [ 0 ] - > ads_service ( ) - > SetCdsResource ( new_cluster1 ) ; <nl> - / / Populating Route Configurations for LDS . <nl> - RouteConfiguration route_config = <nl> - balancers_ [ 0 ] - > ads_service ( ) - > default_route_config ( ) ; <nl> - auto * route1 = route_config . mutable_virtual_hosts ( 0 ) - > mutable_routes ( 0 ) ; <nl> - route1 - > mutable_match ( ) - > set_prefix ( " " ) ; <nl> - auto * header_matcher1 = route1 - > mutable_match ( ) - > add_headers ( ) ; <nl> - header_matcher1 - > set_name ( " grpc - timeout " ) ; <nl> - header_matcher1 - > mutable_safe_regex_match ( ) - > set_regex ( " [ 0 - 9 ] + ( s | h ) " ) ; <nl> - route1 - > mutable_route ( ) - > set_cluster ( kNewCluster1Name ) ; <nl> - auto * default_route = route_config . mutable_virtual_hosts ( 0 ) - > add_routes ( ) ; <nl> - default_route - > mutable_match ( ) - > set_prefix ( " " ) ; <nl> - auto * header_matcher2 = default_route - > mutable_match ( ) - > add_headers ( ) ; <nl> - header_matcher2 - > set_name ( " grpc - timeout " ) ; <nl> - header_matcher2 - > mutable_safe_regex_match ( ) - > set_regex ( " [ 0 - 9 ] + ( m | S | M | H ) " ) ; <nl> - default_route - > mutable_route ( ) - > set_cluster ( kDefaultResourceName ) ; <nl> - SetRouteConfiguration ( 0 , route_config ) ; <nl> - / / Make sure backend is up . <nl> - WaitForAllBackends ( 0 , 1 ) ; <nl> - / / Send RPCs . <nl> - CheckRpcSendOk ( kNumEchoRpcs ) ; <nl> - EXPECT_EQ ( kNumEchoRpcs , backends_ [ 0 ] - > backend_service ( ) - > request_count ( ) ) ; <nl> - EXPECT_EQ ( 0 , backends_ [ 1 ] - > backend_service ( ) - > request_count ( ) ) ; <nl> - const auto & response_state = RouteConfigurationResponseState ( 0 ) ; <nl> - EXPECT_EQ ( response_state . state , AdsServiceImpl : : ResponseState : : ACKED ) ; <nl> - gpr_unsetenv ( " GRPC_XDS_EXPERIMENTAL_ROUTING " ) ; <nl> - } <nl> - <nl> - TEST_P ( LdsRdsTest , XdsRoutingHeadersMatchingSpecialCasesToIgnore ) { <nl> - gpr_setenv ( " GRPC_XDS_EXPERIMENTAL_ROUTING " , " true " ) ; <nl> - const char * kNewCluster1Name = " new_cluster_1 " ; <nl> - const char * kNewCluster2Name = " new_cluster_2 " ; <nl> - const char * kNewCluster3Name = " new_cluster_3 " ; <nl> - const size_t kNumEcho1Rpcs = 100 ; <nl> - const size_t kNumEchoRpcs = 5 ; <nl> - SetNextResolution ( { } ) ; <nl> - SetNextResolutionForLbChannelAllBalancers ( ) ; <nl> - / / Populate new EDS resources . <nl> - AdsServiceImpl : : EdsResourceArgs args ( { <nl> - { " locality0 " , GetBackendPorts ( 0 , 1 ) } , <nl> - } ) ; <nl> - AdsServiceImpl : : EdsResourceArgs args1 ( { <nl> - { " locality0 " , GetBackendPorts ( 1 , 2 ) } , <nl> - } ) ; <nl> - AdsServiceImpl : : EdsResourceArgs args2 ( { <nl> - { " locality0 " , GetBackendPorts ( 2 , 3 ) } , <nl> - } ) ; <nl> - AdsServiceImpl : : EdsResourceArgs args3 ( { <nl> - { " locality0 " , GetBackendPorts ( 3 , 4 ) } , <nl> - } ) ; <nl> - balancers_ [ 0 ] - > ads_service ( ) - > SetEdsResource ( <nl> - AdsServiceImpl : : BuildEdsResource ( args ) ) ; <nl> - balancers_ [ 0 ] - > ads_service ( ) - > SetEdsResource ( <nl> - AdsServiceImpl : : BuildEdsResource ( args1 , kNewCluster1Name ) ) ; <nl> - balancers_ [ 0 ] - > ads_service ( ) - > SetEdsResource ( <nl> - AdsServiceImpl : : BuildEdsResource ( args2 , kNewCluster2Name ) ) ; <nl> - balancers_ [ 0 ] - > ads_service ( ) - > SetEdsResource ( <nl> - AdsServiceImpl : : BuildEdsResource ( args3 , kNewCluster3Name ) ) ; <nl> - / / Populate new CDS resources . <nl> - Cluster new_cluster1 = balancers_ [ 0 ] - > ads_service ( ) - > default_cluster ( ) ; <nl> - new_cluster1 . set_name ( kNewCluster1Name ) ; <nl> - balancers_ [ 0 ] - > ads_service ( ) - > SetCdsResource ( new_cluster1 ) ; <nl> - Cluster new_cluster2 = balancers_ [ 0 ] - > ads_service ( ) - > default_cluster ( ) ; <nl> - new_cluster2 . set_name ( kNewCluster2Name ) ; <nl> - balancers_ [ 0 ] - > ads_service ( ) - > SetCdsResource ( new_cluster2 ) ; <nl> - Cluster new_cluster3 = balancers_ [ 0 ] - > ads_service ( ) - > default_cluster ( ) ; <nl> - new_cluster1 . set_name ( kNewCluster3Name ) ; <nl> - balancers_ [ 0 ] - > ads_service ( ) - > SetCdsResource ( new_cluster3 ) ; <nl> - / / Populating Route Configurations for LDS . <nl> - RouteConfiguration route_config = <nl> - balancers_ [ 0 ] - > ads_service ( ) - > default_route_config ( ) ; <nl> - auto * route1 = route_config . mutable_virtual_hosts ( 0 ) - > mutable_routes ( 0 ) ; <nl> - route1 - > mutable_match ( ) - > set_prefix ( " / grpc . testing . EchoTest1Service / " ) ; <nl> - auto * header_matcher1 = route1 - > mutable_match ( ) - > add_headers ( ) ; <nl> - header_matcher1 - > set_name ( " grpc - tags - bin " ) ; <nl> - header_matcher1 - > set_exact_match ( " grpc - tags - bin " ) ; <nl> - route1 - > mutable_route ( ) - > set_cluster ( kNewCluster1Name ) ; <nl> - auto route2 = route_config . mutable_virtual_hosts ( 0 ) - > add_routes ( ) ; <nl> - route2 - > mutable_match ( ) - > set_prefix ( " / grpc . testing . EchoTest1Service / " ) ; <nl> - auto * header_matcher2 = route2 - > mutable_match ( ) - > add_headers ( ) ; <nl> - header_matcher2 - > set_name ( " grpc - trace - bin " ) ; <nl> - header_matcher2 - > set_exact_match ( " grpc - trace - bin " ) ; <nl> - route2 - > mutable_route ( ) - > set_cluster ( kNewCluster2Name ) ; <nl> - auto route3 = route_config . mutable_virtual_hosts ( 0 ) - > add_routes ( ) ; <nl> - route3 - > mutable_match ( ) - > set_prefix ( " / grpc . testing . EchoTest1Service / " ) ; <nl> - auto * header_matcher3 = route3 - > mutable_match ( ) - > add_headers ( ) ; <nl> - header_matcher3 - > set_name ( " grpc - previous - rpc - attempts " ) ; <nl> - header_matcher3 - > set_exact_match ( " grpc - previous - rpc - attempts " ) ; <nl> - route3 - > mutable_route ( ) - > set_cluster ( kNewCluster3Name ) ; <nl> - auto * default_route = route_config . mutable_virtual_hosts ( 0 ) - > add_routes ( ) ; <nl> - default_route - > mutable_match ( ) - > set_prefix ( " " ) ; <nl> - default_route - > mutable_route ( ) - > set_cluster ( kDefaultResourceName ) ; <nl> - SetRouteConfiguration ( 0 , route_config ) ; <nl> - / / Send headers which will mismatch each route <nl> - std : : vector < std : : pair < std : : string , std : : string > > metadata = { <nl> - { " grpc - tags - bin " , " grpc - tags - bin " } , <nl> - { " grpc - trace - bin " , " grpc - trace - bin " } , <nl> - { " grpc - previous - rpc - attempts " , " grpc - previous - rpc - attempts " } , <nl> - } ; <nl> - WaitForAllBackends ( 0 , 1 ) ; <nl> - CheckRpcSendOk ( kNumEchoRpcs , RpcOptions ( ) . set_metadata ( metadata ) ) ; <nl> - CheckRpcSendOk ( kNumEcho1Rpcs , RpcOptions ( ) <nl> - . set_rpc_service ( SERVICE_ECHO1 ) <nl> - . set_rpc_method ( METHOD_ECHO1 ) <nl> - . set_metadata ( metadata ) ) ; <nl> - / / Verify that only the default backend got RPCs since all previous routes <nl> - / / were mismatched . <nl> - for ( size_t i = 1 ; i < 4 ; + + i ) { <nl> - EXPECT_EQ ( 0 , backends_ [ i ] - > backend_service ( ) - > request_count ( ) ) ; <nl> - EXPECT_EQ ( 0 , backends_ [ i ] - > backend_service1 ( ) - > request_count ( ) ) ; <nl> - EXPECT_EQ ( 0 , backends_ [ i ] - > backend_service2 ( ) - > request_count ( ) ) ; <nl> - } <nl> - EXPECT_EQ ( kNumEchoRpcs , backends_ [ 0 ] - > backend_service ( ) - > request_count ( ) ) ; <nl> - EXPECT_EQ ( kNumEcho1Rpcs , backends_ [ 0 ] - > backend_service1 ( ) - > request_count ( ) ) ; <nl> - EXPECT_EQ ( 0 , backends_ [ 0 ] - > backend_service2 ( ) - > request_count ( ) ) ; <nl> - const auto & response_state = RouteConfigurationResponseState ( 0 ) ; <nl> - EXPECT_EQ ( response_state . state , AdsServiceImpl : : ResponseState : : ACKED ) ; <nl> - gpr_unsetenv ( " GRPC_XDS_EXPERIMENTAL_ROUTING " ) ; <nl> - } <nl> TEST_P ( LdsRdsTest , XdsRoutingRuntimeFractionMatching ) { <nl> gpr_setenv ( " GRPC_XDS_EXPERIMENTAL_ROUTING " , " true " ) ; <nl> const char * kNewCluster1Name = " new_cluster_1 " ; <nl> mmm a / tools / doxygen / Doxyfile . c + + . internal <nl> ppp b / tools / doxygen / Doxyfile . c + + . internal <nl> src / core / ext / filters / deadline / deadline_filter . cc \ <nl> src / core / ext / filters / deadline / deadline_filter . h \ <nl> src / core / ext / filters / http / client / http_client_filter . cc \ <nl> src / core / ext / filters / http / client / http_client_filter . h \ <nl> - src / core / ext / filters / http / client / util . cc \ <nl> - src / core / ext / filters / http / client / util . h \ <nl> src / core / ext / filters / http / client_authority_filter . cc \ <nl> src / core / ext / filters / http / client_authority_filter . h \ <nl> src / core / ext / filters / http / http_filters_plugin . cc \ <nl> mmm a / tools / doxygen / Doxyfile . core . internal <nl> ppp b / tools / doxygen / Doxyfile . core . internal <nl> src / core / ext / filters / deadline / deadline_filter . cc \ <nl> src / core / ext / filters / deadline / deadline_filter . h \ <nl> src / core / ext / filters / http / client / http_client_filter . cc \ <nl> src / core / ext / filters / http / client / http_client_filter . h \ <nl> - src / core / ext / filters / http / client / util . cc \ <nl> - src / core / ext / filters / http / client / util . h \ <nl> src / core / ext / filters / http / client_authority_filter . cc \ <nl> src / core / ext / filters / http / client_authority_filter . h \ <nl> src / core / ext / filters / http / http_filters_plugin . cc \ <nl>
Revert " Adding Fake headers for header matching . "
grpc/grpc
d7565c37a891a22d8f6631c187e053d552d05730
2020-07-17T23:39:05Z
mmm a / lib / Sema / TypeCheckType . cpp <nl> ppp b / lib / Sema / TypeCheckType . cpp <nl> Type TypeResolver : : resolveAttributedType ( TypeAttributes & attrs , <nl> hasFunctionAttr = true ; <nl> break ; <nl> } <nl> - <nl> + <nl> + / / If we have an @ autoclosure then try resolving the top level type repr <nl> + / / first as it may be pointing to a typealias <nl> + if ( attrs . has ( TAK_autoclosure ) ) { <nl> + if ( auto CITR = dyn_cast < ComponentIdentTypeRepr > ( repr ) ) { <nl> + auto typeAliasResolver = TypeResolverContext : : TypeAliasDecl ; <nl> + if ( auto type = resolveTopLevelIdentTypeComponent ( resolution , CITR , <nl> + typeAliasResolver ) ) { <nl> + if ( auto TAT = dyn_cast < TypeAliasType > ( type . getPointer ( ) ) ) { <nl> + repr = TAT - > getDecl ( ) - > getUnderlyingTypeLoc ( ) . getTypeRepr ( ) ; <nl> + } <nl> + } <nl> + } <nl> + } <nl> + <nl> / / Function attributes require a syntactic function type . <nl> auto * fnRepr = dyn_cast < FunctionTypeRepr > ( repr ) ; <nl> <nl> mmm a / test / attr / attr_autoclosure . swift <nl> ppp b / test / attr / attr_autoclosure . swift <nl> func rdar_30906031 ( in arr : [ Int ] , fn : @ autoclosure ( ) - > Int ) - > Bool { <nl> arr . lazy . filter { $ 0 > = escapableF ( ) } . isEmpty <nl> } <nl> } <nl> + <nl> + / / SR - 2688 <nl> + class Foo { <nl> + typealias FooClosure = ( ) - > String <nl> + func fooFunction ( closure : @ autoclosure FooClosure ) { } / / ok <nl> + } <nl> + <nl> + class Bar { <nl> + typealias BarClosure = ( String ) - > String <nl> + func barFunction ( closure : @ autoclosure BarClosure ) { } / / expected - error { { argument type of @ autoclosure parameter must be ' ( ) ' } } <nl> + } <nl>
Merge remote - tracking branch ' origin / master ' into master - next
apple/swift
cf245b439ff3523d0cbcfd5267f9919f6d608070
2019-01-22T20:50:21Z
mmm a / runtime / CMakeLists . txt <nl> ppp b / runtime / CMakeLists . txt <nl> set ( SWIFTLIB_SOURCES <nl> add_custom_command ( <nl> OUTPUT $ { CMAKE_BINARY_DIR } / lib / swift . swift <nl> COMMAND cat $ { SWIFTLIB_SOURCES } > $ { CMAKE_BINARY_DIR } / lib / swift . swift <nl> - DEPENDS $ { SWIFTLIB_SOURCES } <nl> + DEPENDS $ { SWIFTLIB_SOURCES } swift <nl> COMMENT " FIXME : Concatenating Swift runtime sources into swift . swift " <nl> WORKING_DIRECTORY $ { CMAKE_CURRENT_SOURCE_DIR } ) <nl> <nl>
Add missing CMake dependency : we need to rebuild the Swift lib whenever the swift compiler changes , just in case
apple/swift
b23f678119d8e81e9d8c9806af32a81143655dac
2012-09-14T04:38:58Z
mmm a / tensorflow / python / framework / test_util . py <nl> ppp b / tensorflow / python / framework / test_util . py <nl> def decorator ( self , lazily_remote_copy , * args , * * kwargs ) : <nl> def run_in_graph_and_eager_modes ( func = None , <nl> config = None , <nl> use_gpu = True , <nl> - reset_test = True , <nl> assert_no_eager_garbage = False ) : <nl> " " " Execute the decorated test with and without enabling eager execution . <nl> <nl> def test_foo ( self ) : <nl> config : An optional config_pb2 . ConfigProto to use to configure the session <nl> when executing graphs . <nl> use_gpu : If True , attempt to run as many operations as possible on GPU . <nl> - reset_test : If True , tearDown and SetUp the test case between the two <nl> - executions of the test ( once with and once without eager execution ) . <nl> assert_no_eager_garbage : If True , sets DEBUG_SAVEALL on the garbage <nl> collector and asserts that no extra garbage has been created when running <nl> the test with eager execution enabled . This will fail if there are <nl> def run_eagerly ( self , * * kwargs ) : <nl> run_eagerly = assert_no_new_tensors ( <nl> assert_no_garbage_created ( run_eagerly ) ) <nl> <nl> - if reset_test : <nl> - # This decorator runs the wrapped test twice . <nl> - # Reset the test environment between runs . <nl> - self . tearDown ( ) <nl> - self . _tempdir = None <nl> + # This decorator runs the wrapped test twice . <nl> + # Reset the test environment between runs . <nl> + self . tearDown ( ) <nl> + self . _tempdir = None <nl> # Create a new graph for the eagerly executed version of this test for <nl> # better isolation . <nl> graph_for_eager_test = ops . Graph ( ) <nl> with graph_for_eager_test . as_default ( ) , context . eager_mode ( ) : <nl> - if reset_test : <nl> - self . setUp ( ) <nl> + self . setUp ( ) <nl> run_eagerly ( self , * * kwargs ) <nl> ops . dismantle_graph ( graph_for_eager_test ) <nl> <nl> mmm a / tensorflow / python / training / adadelta_test . py <nl> ppp b / tensorflow / python / training / adadelta_test . py <nl> def testBasic ( self ) : <nl> with self . cached_session ( ) : <nl> self . doTestBasic ( use_resource = False ) <nl> <nl> - @ test_util . run_in_graph_and_eager_modes ( reset_test = True ) <nl> + @ test_util . run_in_graph_and_eager_modes <nl> def testResourceBasic ( self ) : <nl> self . doTestBasic ( use_resource = True ) <nl> <nl> mmm a / tensorflow / python / training / adagrad_test . py <nl> ppp b / tensorflow / python / training / adagrad_test . py <nl> def doTestBasic ( self , <nl> def testBasic ( self ) : <nl> self . doTestBasic ( use_locking = False ) <nl> <nl> - @ test_util . run_in_graph_and_eager_modes ( reset_test = True ) <nl> + @ test_util . run_in_graph_and_eager_modes <nl> def testBasicResource ( self ) : <nl> self . doTestBasic ( use_locking = False , use_resource = True ) <nl> <nl> mmm a / tensorflow / python / training / adam_test . py <nl> ppp b / tensorflow / python / training / adam_test . py <nl> def testBasic ( self ) : <nl> with self . cached_session ( ) : <nl> self . doTestBasic ( use_resource = False ) <nl> <nl> - @ test_util . run_in_graph_and_eager_modes ( reset_test = True ) <nl> + @ test_util . run_in_graph_and_eager_modes <nl> def testResourceBasic ( self ) : <nl> self . doTestBasic ( use_resource = True ) <nl> <nl> mmm a / tensorflow / python / training / momentum_test . py <nl> ppp b / tensorflow / python / training / momentum_test . py <nl> def testBasic ( self ) : <nl> with self . cached_session ( ) : <nl> self . doTestBasic ( use_resource = False ) <nl> <nl> - @ test_util . run_in_graph_and_eager_modes ( reset_test = True ) <nl> + @ test_util . run_in_graph_and_eager_modes <nl> def testResourceBasic ( self ) : <nl> self . doTestBasic ( use_resource = True ) <nl> <nl> def testSparseNesterovMomentum ( self ) : <nl> self . assertAllClose ( var0_np , self . evaluate ( var0 ) ) <nl> self . assertAllClose ( var1_np , self . evaluate ( var1 ) ) <nl> <nl> - @ test_util . run_in_graph_and_eager_modes ( reset_test = True ) <nl> + @ test_util . run_in_graph_and_eager_modes <nl> def testMinimizeSparseResourceVariable ( self ) : <nl> for dtype in [ dtypes . half , dtypes . float32 , dtypes . float64 ] : <nl> # This test invokes the ResourceSparseApplyMomentum operation , which <nl> def loss ( ) : <nl> # Validate updated params <nl> self . assertAllCloseAccordingToType ( [ [ - 111 , - 138 ] ] , self . evaluate ( var0 ) ) <nl> <nl> - @ test_util . run_in_graph_and_eager_modes ( reset_test = True ) <nl> + @ test_util . run_in_graph_and_eager_modes <nl> def testMinimizeWith2DIndicesForEmbeddingLookup ( self ) : <nl> # This test invokes the ResourceSparseApplyMomentum operation , which <nl> # did not have a registered GPU kernel as of April 2018 . With graph <nl>
Remove reset_test argument from test_util . run_in_graph_and_eager_modes
tensorflow/tensorflow
2819e2a2728daac29cc8a8b524a6c1c4fb032f58
2020-04-30T22:09:55Z
mmm a / js / server / tests / aql - multi - modify . js <nl> ppp b / js / server / tests / aql - multi - modify . js <nl> <nl> / * jshint globalstrict : false , strict : false , sub : true , maxlen : 500 * / <nl> - / * global assertEqual , assertFalse , assertNull , assertNotNull , assertTrue , <nl> - assertNotEqual , assertUndefined , fail , AQL_EXECUTE * / <nl> + / * global assertEqual , assertFalse , assertTrue , AQL_EXECUTE * / <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> / / / @ brief tests for multi - modify operations <nl> var internal = require ( " internal " ) ; <nl> var db = require ( " org / arangodb " ) . db ; <nl> var jsunity = require ( " jsunity " ) ; <nl> var helper = require ( " org / arangodb / aql - helper " ) ; <nl> - var getModifyQueryResults = helper . getModifyQueryResults ; <nl> - var getModifyQueryResultsRaw = helper . getModifyQueryResultsRaw ; <nl> - var isEqual = helper . isEqual ; <nl> var assertQueryError = helper . assertQueryError ; <nl> - var errors = internal . errors ; <nl> - <nl> - var sanitizeStats = function ( stats ) { <nl> - / / remove these members from the stats because they don ' t matter <nl> - / / for the comparisons <nl> - delete stats . scannedFull ; <nl> - delete stats . scannedIndex ; <nl> - delete stats . filtered ; <nl> - return stats ; <nl> - } ; <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> / / / @ brief test suite <nl>
jslint
arangodb/arangodb
b4042232123c3c18afd2be5e2fe5b8718bdb755b
2015-12-03T12:07:17Z
mmm a / src / parsing / parser . cc <nl> ppp b / src / parsing / parser . cc <nl> FunctionLiteral * Parser : : ParseLazy ( Isolate * isolate , ParseInfo * info ) { <nl> <nl> if ( FLAG_trace_parse & & result ! = NULL ) { <nl> double ms = timer . Elapsed ( ) . InMillisecondsF ( ) ; <nl> + / / We need to make sure that the debug - name is available . <nl> + ast_value_factory ( ) - > Internalize ( isolate ) ; <nl> std : : unique_ptr < char [ ] > name_chars = result - > debug_name ( ) - > ToCString ( ) ; <nl> PrintF ( " [ parsing function : % s - took % 0 . 3f ms ] \ n " , name_chars . get ( ) , ms ) ; <nl> } <nl>
Internalize the ast - value - factory before tracing in trace - parse . Otherwise the string - handle isn ' t available yet
v8/v8
cab644f3f26366199cbf0c7711db112aab253b2f
2016-09-20T10:30:50Z
mmm a / doc / tutorials / core / basic_linear_transform / basic_linear_transform . markdown <nl> ppp b / doc / tutorials / core / basic_linear_transform / basic_linear_transform . markdown <nl> The following image has been corrected with : \ f $ \ alpha = 1 . 3 \ f $ and \ f $ \ beta <nl> ! [ By Visem ( Own work ) [ CC BY - SA 3 . 0 ] , via Wikimedia Commons ] ( images / Basic_Linear_Transform_Tutorial_linear_transform_correction . jpg ) <nl> <nl> The overall brightness has been improved but you can notice that the clouds are now greatly saturated due to the numerical saturation <nl> - of the implementation used . A custom method that preserves the original color range can of course be implemented instead . <nl> + of the implementation used ( [ highlight clipping ] ( https : / / en . wikipedia . org / wiki / Clipping_ ( photography ) ) in photography ) . <nl> <nl> The following image has been corrected with : \ f $ \ gamma = 0 . 4 \ f $ . <nl> <nl> ! [ By Visem ( Own work ) [ CC BY - SA 3 . 0 ] , via Wikimedia Commons ] ( images / Basic_Linear_Transform_Tutorial_gamma_correction . jpg ) <nl> <nl> - The gamma correction should tend to add less saturation effect but should introduce some other type of color artifacts instead . <nl> + The gamma correction should tend to add less saturation effect as the mapping is non linear and there is no numerical saturation possible as in the previous method . <nl> <nl> ! [ Left : histogram after alpha , beta correction ; Center : histogram of the original image ; Right : histogram after the gamma correction ] ( images / Basic_Linear_Transform_Tutorial_histogram_compare . png ) <nl> <nl> and are not intended to be used as a replacement of a raster graphics editor ! * * <nl> <nl> # # # Code <nl> <nl> - Code for the tutorial is [ here ] ( changing_contrast_brightness_image . cpp ) . Code for the gamma correction : <nl> + Code for the tutorial is [ here ] ( https : / / github . com / opencv / opencv / blob / master / samples / cpp / tutorial_code / ImgProc / changing_contrast_brightness_image / changing_contrast_brightness_image . cpp ) . <nl> + Code for the gamma correction : <nl> <nl> @ snippet changing_contrast_brightness_image . cpp changing - contrast - brightness - gamma - correction <nl> <nl> A look - up table is used to improve the performance of the computation as only 256 values needs to be calculated once . <nl> + <nl> + # # # Additional resources <nl> + <nl> + - [ Gamma correction in graphics rendering ] ( https : / / learnopengl . com / # ! Advanced - Lighting / Gamma - Correction ) <nl> + - [ Gamma correction and images displayed on CRT monitors ] ( http : / / www . graphics . cornell . edu / ~ westin / gamma / gamma . html ) <nl> + - [ Digital exposure techniques ] ( http : / / www . cambridgeincolour . com / tutorials / digital - exposure - techniques . htm ) <nl>
Fix tutorial code link in basic_linear_transform tutorial . Add some resource links .
opencv/opencv
00d2e279d77abd79c48413979d4b53020ffd1668
2017-01-17T16:47:27Z
mmm a / src / core / file_sys / archive_backend . h <nl> ppp b / src / core / file_sys / archive_backend . h <nl> class ArchiveBackend : NonCopyable { <nl> * Open a file specified by its path , using the specified mode <nl> * @ param path Path relative to the archive <nl> * @ param mode Mode to open the file with <nl> - * @ return Opened file , or nullptr <nl> + * @ return Opened file , or error code <nl> * / <nl> - virtual std : : unique_ptr < FileBackend > OpenFile ( const Path & path , const Mode mode ) const = 0 ; <nl> + virtual ResultVal < std : : unique_ptr < FileBackend > > OpenFile ( const Path & path , const Mode mode ) const = 0 ; <nl> <nl> / * * <nl> * Delete a file specified by its path <nl> mmm a / src / core / file_sys / disk_archive . cpp <nl> ppp b / src / core / file_sys / disk_archive . cpp <nl> <nl> <nl> namespace FileSys { <nl> <nl> - std : : unique_ptr < FileBackend > DiskArchive : : OpenFile ( const Path & path , const Mode mode ) const { <nl> + ResultVal < std : : unique_ptr < FileBackend > > DiskArchive : : OpenFile ( const Path & path , const Mode mode ) const { <nl> LOG_DEBUG ( Service_FS , " called path = % s mode = % 01X " , path . DebugStr ( ) . c_str ( ) , mode . hex ) ; <nl> auto file = Common : : make_unique < DiskFile > ( * this , path , mode ) ; <nl> - if ( ! file - > Open ( ) ) <nl> - return nullptr ; <nl> - return std : : move ( file ) ; <nl> + ResultCode result = file - > Open ( ) ; <nl> + if ( result . IsError ( ) ) <nl> + return result ; <nl> + return MakeResult < std : : unique_ptr < FileBackend > > ( std : : move ( file ) ) ; <nl> } <nl> <nl> ResultCode DiskArchive : : DeleteFile ( const Path & path ) const { <nl> DiskFile : : DiskFile ( const DiskArchive & archive , const Path & path , const Mode mode <nl> this - > mode . hex = mode . hex ; <nl> } <nl> <nl> - bool DiskFile : : Open ( ) { <nl> - if ( ! mode . create_flag & & ! FileUtil : : Exists ( path ) ) { <nl> - LOG_ERROR ( Service_FS , " Non - existing file % s can ' t be open without mode create . " , path . c_str ( ) ) ; <nl> - return false ; <nl> + ResultCode DiskFile : : Open ( ) { <nl> + if ( FileUtil : : IsDirectory ( path ) ) <nl> + return ResultCode ( ErrorDescription : : FS_NotAFile , ErrorModule : : FS , ErrorSummary : : Canceled , ErrorLevel : : Status ) ; <nl> + <nl> + / / Specifying only the Create flag is invalid <nl> + if ( mode . create_flag & & ! mode . read_flag & & ! mode . write_flag ) { <nl> + return ResultCode ( ErrorDescription : : FS_InvalidOpenFlags , ErrorModule : : FS , ErrorSummary : : Canceled , ErrorLevel : : Status ) ; <nl> } <nl> <nl> - std : : string mode_string ; <nl> - if ( mode . create_flag ) <nl> - mode_string = " w + " ; <nl> - else if ( mode . write_flag ) <nl> - mode_string = " r + " ; / / Files opened with Write access can be read from <nl> + if ( ! FileUtil : : Exists ( path ) ) { <nl> + if ( ! mode . create_flag ) { <nl> + LOG_ERROR ( Service_FS , " Non - existing file % s can ' t be open without mode create . " , path . c_str ( ) ) ; <nl> + return ResultCode ( ErrorDescription : : FS_NotFound , ErrorModule : : FS , ErrorSummary : : NotFound , ErrorLevel : : Status ) ; <nl> + } else { <nl> + / / Create the file <nl> + FileUtil : : CreateEmptyFile ( path ) ; <nl> + } <nl> + } <nl> + <nl> + std : : string mode_string = " " ; <nl> + if ( mode . write_flag ) <nl> + mode_string + = " r + " ; / / Files opened with Write access can be read from <nl> else if ( mode . read_flag ) <nl> - mode_string = " r " ; <nl> + mode_string + = " r " ; <nl> <nl> / / Open the file in binary mode , to avoid problems with CR / LF on Windows systems <nl> mode_string + = " b " ; <nl> <nl> file = Common : : make_unique < FileUtil : : IOFile > ( path , mode_string . c_str ( ) ) ; <nl> - return file - > IsOpen ( ) ; <nl> + if ( file - > IsOpen ( ) ) <nl> + return RESULT_SUCCESS ; <nl> + return ResultCode ( ErrorDescription : : FS_NotFound , ErrorModule : : FS , ErrorSummary : : NotFound , ErrorLevel : : Status ) ; <nl> } <nl> <nl> ResultVal < size_t > DiskFile : : Read ( const u64 offset , const size_t length , u8 * buffer ) const { <nl> mmm a / src / core / file_sys / disk_archive . h <nl> ppp b / src / core / file_sys / disk_archive . h <nl> class DiskArchive : public ArchiveBackend { <nl> <nl> virtual std : : string GetName ( ) const override { return " DiskArchive : " + mount_point ; } <nl> <nl> - std : : unique_ptr < FileBackend > OpenFile ( const Path & path , const Mode mode ) const override ; <nl> + ResultVal < std : : unique_ptr < FileBackend > > OpenFile ( const Path & path , const Mode mode ) const override ; <nl> ResultCode DeleteFile ( const Path & path ) const override ; <nl> bool RenameFile ( const Path & src_path , const Path & dest_path ) const override ; <nl> bool DeleteDirectory ( const Path & path ) const override ; <nl> class DiskFile : public FileBackend { <nl> public : <nl> DiskFile ( const DiskArchive & archive , const Path & path , const Mode mode ) ; <nl> <nl> - bool Open ( ) override ; <nl> + ResultCode Open ( ) override ; <nl> ResultVal < size_t > Read ( u64 offset , size_t length , u8 * buffer ) const override ; <nl> ResultVal < size_t > Write ( u64 offset , size_t length , bool flush , const u8 * buffer ) const override ; <nl> u64 GetSize ( ) const override ; <nl> mmm a / src / core / file_sys / file_backend . h <nl> ppp b / src / core / file_sys / file_backend . h <nl> class FileBackend : NonCopyable { <nl> <nl> / * * <nl> * Open the file <nl> - * @ return true if the file opened correctly <nl> + * @ return Result of the file operation <nl> * / <nl> - virtual bool Open ( ) = 0 ; <nl> + virtual ResultCode Open ( ) = 0 ; <nl> <nl> / * * <nl> * Read data from the file <nl> mmm a / src / core / file_sys / ivfc_archive . cpp <nl> ppp b / src / core / file_sys / ivfc_archive . cpp <nl> std : : string IVFCArchive : : GetName ( ) const { <nl> return " IVFC " ; <nl> } <nl> <nl> - std : : unique_ptr < FileBackend > IVFCArchive : : OpenFile ( const Path & path , const Mode mode ) const { <nl> - return Common : : make_unique < IVFCFile > ( romfs_file , data_offset , data_size ) ; <nl> + ResultVal < std : : unique_ptr < FileBackend > > IVFCArchive : : OpenFile ( const Path & path , const Mode mode ) const { <nl> + return MakeResult < std : : unique_ptr < FileBackend > > ( Common : : make_unique < IVFCFile > ( romfs_file , data_offset , data_size ) ) ; <nl> } <nl> <nl> ResultCode IVFCArchive : : DeleteFile ( const Path & path ) const { <nl> mmm a / src / core / file_sys / ivfc_archive . h <nl> ppp b / src / core / file_sys / ivfc_archive . h <nl> class IVFCArchive : public ArchiveBackend { <nl> <nl> std : : string GetName ( ) const override ; <nl> <nl> - std : : unique_ptr < FileBackend > OpenFile ( const Path & path , const Mode mode ) const override ; <nl> + ResultVal < std : : unique_ptr < FileBackend > > OpenFile ( const Path & path , const Mode mode ) const override ; <nl> ResultCode DeleteFile ( const Path & path ) const override ; <nl> bool RenameFile ( const Path & src_path , const Path & dest_path ) const override ; <nl> bool DeleteDirectory ( const Path & path ) const override ; <nl> class IVFCFile : public FileBackend { <nl> IVFCFile ( std : : shared_ptr < FileUtil : : IOFile > file , u64 offset , u64 size ) <nl> : romfs_file ( file ) , data_offset ( offset ) , data_size ( size ) { } <nl> <nl> - bool Open ( ) override { return true ; } <nl> + ResultCode Open ( ) override { return RESULT_SUCCESS ; } <nl> ResultVal < size_t > Read ( u64 offset , size_t length , u8 * buffer ) const override ; <nl> ResultVal < size_t > Write ( u64 offset , size_t length , bool flush , const u8 * buffer ) const override ; <nl> u64 GetSize ( ) const override ; <nl> mmm a / src / core / hle / service / fs / archive . cpp <nl> ppp b / src / core / hle / service / fs / archive . cpp <nl> ResultVal < Kernel : : SharedPtr < File > > OpenFileFromArchive ( ArchiveHandle archive_han <nl> if ( archive = = nullptr ) <nl> return ERR_INVALID_HANDLE ; <nl> <nl> - std : : unique_ptr < FileSys : : FileBackend > backend = archive - > OpenFile ( path , mode ) ; <nl> - if ( backend = = nullptr ) { <nl> + auto backend = archive - > OpenFile ( path , mode ) ; <nl> + if ( backend . Failed ( ) ) { <nl> + return backend . Code ( ) ; <nl> return ResultCode ( ErrorDescription : : FS_NotFound , ErrorModule : : FS , <nl> ErrorSummary : : NotFound , ErrorLevel : : Status ) ; <nl> } <nl> <nl> - auto file = Kernel : : SharedPtr < File > ( new File ( std : : move ( backend ) , path ) ) ; <nl> + auto file = Kernel : : SharedPtr < File > ( new File ( backend . MoveFrom ( ) , path ) ) ; <nl> return MakeResult < Kernel : : SharedPtr < File > > ( std : : move ( file ) ) ; <nl> } <nl> <nl>
HLE / FS : Return the proper error codes when opening files .
yuzu-emu/yuzu
95b34f8081e26cfe75d63a853d1626fdd5b636e6
2016-03-20T19:28:22Z
mmm a / bazel / grpc_deps . bzl <nl> ppp b / bazel / grpc_deps . bzl <nl> def grpc_deps ( ) : <nl> if " upb " not in native . existing_rules ( ) : <nl> http_archive ( <nl> name = " upb " , <nl> - sha256 = " d2142e966d4d122ace5c2cc9afc86f12bf0ff93c39544d7a6c6fdcd69d767985 " , <nl> - strip_prefix = " upb - 9605c7093c269a4767b9c4a57e2ca021c725644d " , <nl> - url = " https : / / github . com / nicolasnoble / upb / archive / 9605c7093c269a4767b9c4a57e2ca021c725644d . tar . gz " , <nl> + sha256 = " f479b263d82690a97b234cb37c8b4026cb7aa10ec578f14e8408dbfa2529a635 " , <nl> + strip_prefix = " upb - 6e85c2bf036c4a18a45224dd9d929ab3639e67f3 " , <nl> + url = " https : / / github . com / nicolasnoble / upb / archive / 6e85c2bf036c4a18a45224dd9d929ab3639e67f3 . tar . gz " , <nl> ) <nl> if " envoy_api " not in native . existing_rules ( ) : <nl> http_archive ( <nl>
Trying one another fix for upb .
grpc/grpc
685695f8bcb27dc2e7c70e0e34e2169dfb414b53
2019-07-22T20:33:14Z
mmm a / modules / python / src2 / pycompat . hpp <nl> ppp b / modules / python / src2 / pycompat . hpp <nl> <nl> / / Python3 strings are unicode , these defines mimic the Python2 functionality . <nl> # define PyString_Check PyUnicode_Check <nl> # define PyString_FromString PyUnicode_FromString <nl> - # define PyString_AsString PyUnicode_AsUTF8 <nl> # define PyString_FromStringAndSize PyUnicode_FromStringAndSize <nl> # define PyString_Size PyUnicode_GET_SIZE <nl> + <nl> + / / PyUnicode_AsUTF8 isn ' t available until Python 3 . 3 <nl> + # if ( PY_VERSION_HEX < 0x03030000 ) <nl> + # define PyString_AsString _PyUnicode_AsString <nl> + # else <nl> + # define PyString_AsString PyUnicode_AsUTF8 <nl> + # endif <nl> # endif <nl> <nl> # endif / / END HEADER GUARD <nl>
Merge pull request from prattmic : python3_asstring
opencv/opencv
469d7eed72215615ff05e43c7f08d3b0a851f304
2013-11-11T10:01:07Z
mmm a / tools / whitespace . txt <nl> ppp b / tools / whitespace . txt <nl> A Smi balks into a war and says : <nl> The doubles heard this and started to unbox . <nl> The Smi looked at them when a crazy v8 - autoroll account showed up . . . <nl> The autoroller bought a round of Himbeerbrause . Suddenly . . . <nl> - The bartender starts to shake the bottles . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . <nl> + The bartender starts to shake the bottles . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . <nl>
[ tools ] Whitespace CL
v8/v8
e6bebb3a28dcb9b3fd4db1c506d8c3212ca19e61
2018-07-28T12:38:54Z
mmm a / tests / CMakeLists . txt <nl> ppp b / tests / CMakeLists . txt <nl> if ( WITH_PYTHON ) <nl> TEST_FILES restarting / from_7 . 0 . 0 / ConfigureTestRestart - 1 . txt <nl> restarting / from_7 . 0 . 0 / ConfigureTestRestart - 2 . txt ) <nl> add_fdb_test ( <nl> - TEST_FILES restarting / CycleTestRestart - 1 . txt <nl> - restarting / CycleTestRestart - 2 . txt ) <nl> + TEST_FILES restarting / from_5 . 0 . 0 / CycleTestRestart - 1 . txt <nl> + restarting / from_5 . 0 . 0 / CycleTestRestart - 2 . txt ) <nl> add_fdb_test ( <nl> - TEST_FILES restarting / StorefrontTestRestart - 1 . txt <nl> - restarting / StorefrontTestRestart - 2 . txt ) <nl> + TEST_FILES restarting / from_5 . 0 . 0 / StorefrontTestRestart - 1 . txt <nl> + restarting / from_5 . 0 . 0 / StorefrontTestRestart - 2 . txt ) <nl> add_fdb_test ( <nl> TEST_FILES restarting / from_6 . 2 . 0 / SnapTestAttrition - 1 . txt <nl> restarting / from_6 . 2 . 0 / SnapTestAttrition - 2 . txt ) <nl> similarity index 100 % <nl> rename from tests / restarting / from_4 . 6 . 0 / CycleTestRestart - 1 . txt <nl> rename to tests / restarting / from_5 . 0 . 0 / CycleTestRestart - 1 . txt <nl> similarity index 100 % <nl> rename from tests / restarting / from_4 . 6 . 0 / CycleTestRestart - 2 . txt <nl> rename to tests / restarting / from_5 . 0 . 0 / CycleTestRestart - 2 . txt <nl> similarity index 100 % <nl> rename from tests / restarting / from_4 . 6 . 0 / StorefrontTestRestart - 1 . txt <nl> rename to tests / restarting / from_5 . 0 . 0 / StorefrontTestRestart - 1 . txt <nl> similarity index 100 % <nl> rename from tests / restarting / from_4 . 6 . 0 / StorefrontTestRestart - 2 . txt <nl> rename to tests / restarting / from_5 . 0 . 0 / StorefrontTestRestart - 2 . txt <nl>
Bump to 5 . 0 . 0 as 4 . 6 . 4 is using version larger than hasMultiGenerationTLog ( )
apple/foundationdb
2295bb223abd67f1171bb7568c10bc9d3f3972e2
2020-08-16T16:29:14Z
new file mode 100644 <nl> index 000000000000 . . caa2546aad7d <nl> mmm / dev / null <nl> ppp b / project / BuildDependencies / scripts / tinyxml_d . bat <nl> <nl> + @ ECHO ON <nl> + <nl> + SET LOC_PATH = % CD % <nl> + SET FILES = % LOC_PATH % \ tinyxml_d . txt <nl> + <nl> + CALL dlextract . bat tinyxml % FILES % <nl> + <nl> + cd % TMP_PATH % <nl> + <nl> + xcopy tinyxml - 2 . 6 . 2 - win32 \ include \ tinyxml \ * " % CUR_PATH % \ include \ " / E / Q / I / Y <nl> + copy tinyxml - 2 . 6 . 2 - win32 \ lib \ * " % CUR_PATH % \ lib \ " / Y <nl> + <nl> + cd % LOC_PATH % <nl> new file mode 100644 <nl> index 000000000000 . . 6986140c16eb <nl> mmm / dev / null <nl> ppp b / project / BuildDependencies / scripts / tinyxml_d . txt <nl> <nl> + ; filename mirror of the file <nl> + tinyxml - 2 . 6 . 2 - win32 . 7z http : / / mirrors . xbmc . org / build - deps / win32 / <nl>
[ WIN32 ] added scripts to download tinyxml from our mirrors
xbmc/xbmc
567d9623dd49b59b0a94b2a48989930c5fa6c94c
2012-05-04T02:30:51Z
mmm a / Changelog <nl> ppp b / Changelog <nl> <nl> - BUGFIX : Update torrent progress when its content changed ( filtered files ) <nl> - BUGFIX : Improved the way menu icons are installed to avoid problems on some systems <nl> - BUGFIX : Improved incremental download <nl> + - BUGFIX : Improved unicode support <nl> - COSMETIC : Redesigned torrent properties a little <nl> - COSMETIC : Redesigned options a little <nl> - COSMETIC : Display more logs messages concerning features <nl> mmm a / TODO <nl> ppp b / TODO <nl> beta4 - > beta5 changelog : <nl> - BUGFIX : Improved incremental download <nl> - BUGFIX : Fixed preview from seeding list <nl> - BUGFIX : Fixed Alt + 3 & Ctrl + F keyboard shortcuts for third tab <nl> + - BUGFIX : Improved unicode support <nl> - I18N : Updated Italian , Polish , Portuguese , Brazilian and Spanish translations <nl> - COSMETIC : Changed the way progress bars are rendered <nl> mmm a / src / GUI . cpp <nl> ppp b / src / GUI . cpp <nl> void GUI : : configureSession ( bool deleteOptions ) { <nl> BTSession - > setListeningPortsRange ( options - > getPorts ( ) ) ; <nl> new_listenPort = BTSession - > getListenPort ( ) ; <nl> if ( new_listenPort ! = old_listenPort ) { <nl> - setInfoBar ( tr ( " qBittorrent is bind to port : % 1 " , " e . g : qBittorrent is bind to port : 1666 " ) . arg ( QString ( misc : : toString ( new_listenPort ) . c_str ( ) ) ) ) ; <nl> + setInfoBar ( tr ( " qBittorrent is bind to port : % 1 " , " e . g : qBittorrent is bind to port : 1666 " ) . arg ( misc : : toQString ( new_listenPort ) ) ) ; <nl> } <nl> / / Apply max connec limit ( - 1 if disabled ) <nl> BTSession - > setMaxConnections ( options - > getMaxConnec ( ) ) ; <nl> mmm a / src / createtorrent_imp . cpp <nl> ppp b / src / createtorrent_imp . cpp <nl> createtorrent : : createtorrent ( QWidget * parent ) : QDialog ( parent ) { <nl> } <nl> <nl> void createtorrent : : on_browse_destination_clicked ( ) { <nl> - QString destination = QFileDialog : : getSaveFileName ( this , tr ( " Select destination torrent file " ) , QDir : : homePath ( ) , tr ( " Torrent Files " ) + " ( * . torrent ) " ) ; <nl> + QString destination = QFileDialog : : getSaveFileName ( this , tr ( " Select destination torrent file " ) , QDir : : homePath ( ) , tr ( " Torrent Files " ) + QString : : fromUtf8 ( " ( * . torrent ) " ) ) ; <nl> if ( ! destination . isEmpty ( ) ) { <nl> - if ( ! destination . endsWith ( " . torrent " ) ) <nl> - destination + = " . torrent " ; <nl> + if ( ! destination . endsWith ( QString : : fromUtf8 ( " . torrent " ) ) ) <nl> + destination + = QString : : fromUtf8 ( " . torrent " ) ; <nl> txt_destination - > setText ( destination ) ; <nl> } <nl> } <nl> void createtorrent : : on_createButton_clicked ( ) { <nl> } <nl> catch ( std : : exception & e ) { <nl> std : : cerr < < e . what ( ) < < " \ n " ; <nl> - QMessageBox : : information ( 0 , tr ( " Torrent creation " ) , tr ( " Torrent creation was unsuccessful , reason : % 1 " ) . arg ( QString ( e . what ( ) ) ) ) ; <nl> + QMessageBox : : information ( 0 , tr ( " Torrent creation " ) , tr ( " Torrent creation was unsuccessful , reason : % 1 " ) . arg ( QString : : fromUtf8 ( e . what ( ) ) ) ) ; <nl> hide ( ) ; <nl> return ; <nl> } <nl> mmm a / src / main . cpp <nl> ppp b / src / main . cpp <nl> <nl> <nl> void useStyle ( QApplication * app , QString style ) { <nl> std : : cout < < " * Style : Using " < < style . toStdString ( ) < < " style \ n " ; <nl> - if ( style = = " Cleanlooks " ) { <nl> + if ( style = = QString : : fromUtf8 ( " Cleanlooks " ) ) { <nl> app - > setStyle ( new QCleanlooksStyle ( ) ) ; <nl> return ; <nl> } <nl> - if ( style = = " Motif " ) { <nl> + if ( style = = QString : : fromUtf8 ( " Motif " ) ) { <nl> app - > setStyle ( new QMotifStyle ( ) ) ; <nl> return ; <nl> } <nl> - if ( style = = " CDE " ) { <nl> + if ( style = = QString : : fromUtf8 ( " CDE " ) ) { <nl> app - > setStyle ( new QCDEStyle ( ) ) ; <nl> return ; <nl> } <nl> # ifdef Q_WS_MAC <nl> - if ( style = = " MacOS " ) { <nl> + if ( style = = QString : : fromUtf8 ( " MacOS " ) ) { <nl> app - > setStyle ( new QMacStyle ( ) ) ; <nl> return ; <nl> } <nl> # endif <nl> # ifdef Q_WS_WIN <nl> - if ( style = = " WinXP " ) { <nl> + if ( style = = QString : : fromUtf8 ( " WinXP " ) ) { <nl> app - > setStyle ( new QWindowsXPStyle ( ) ) ; <nl> return ; <nl> } <nl> int main ( int argc , char * argv [ ] ) { <nl> QFile file ; <nl> QString locale ; <nl> if ( argc > 1 ) { <nl> - if ( QString ( argv [ 1 ] ) = = " - - version " ) { <nl> + if ( QString : : fromUtf8 ( argv [ 1 ] ) = = QString : : fromUtf8 ( " - - version " ) ) { <nl> std : : cout < < " qBittorrent " < < VERSION < < ' \ n ' ; <nl> return 0 ; <nl> } <nl> - if ( QString ( argv [ 1 ] ) = = " - - help " ) { <nl> + if ( QString : : fromUtf8 ( argv [ 1 ] ) = = QString : : fromUtf8 ( " - - help " ) ) { <nl> std : : cout < < " Usage : \ n " ; <nl> std : : cout < < ' \ t ' < < argv [ 0 ] < < " - - version : displays program version \ n " ; <nl> std : : cout < < ' \ t ' < < argv [ 0 ] < < " - - help : displays this help message \ n " ; <nl> int main ( int argc , char * argv [ ] ) { <nl> if ( argc > 1 ) { <nl> QStringList params ; <nl> for ( int i = 1 ; i < argc ; + + i ) { <nl> - params < < QString ( argv [ i ] ) ; <nl> - std : : cout < < QString ( argv [ i ] ) . toStdString ( ) < < ' \ n ' ; <nl> + params < < QString : : fromUtf8 ( argv [ i ] ) ; <nl> + std : : cout < < argv [ i ] < < ' \ n ' ; <nl> } <nl> QByteArray block = params . join ( " \ n " ) . toUtf8 ( ) ; <nl> std : : cout < < " writting : " < < block . data ( ) < < ' \ n ' ; <nl> int main ( int argc , char * argv [ ] ) { <nl> return 0 ; <nl> } <nl> QApplication app ( argc , argv ) ; <nl> - QSettings settings ( " qBittorrent " , " qBittorrent " ) ; <nl> + QSettings settings ( QString : : fromUtf8 ( " qBittorrent " ) , QString : : fromUtf8 ( " qBittorrent " ) ) ; <nl> QString style ; <nl> # ifdef Q_WS_WIN <nl> - style = settings . value ( " Options / Style " , " WinXP " ) . toString ( ) ; <nl> + style = settings . value ( QString : : fromUtf8 ( " Options / Style " ) , QString : : fromUtf8 ( " WinXP " ) ) . toString ( ) ; <nl> # endif <nl> # ifdef Q_WS_MAC <nl> - style = settings . value ( " Options / Style " , " MacOS " ) . toString ( ) ; <nl> + style = settings . value ( QString : : fromUtf8 ( " Options / Style " ) , QString : : fromUtf8 ( " MacOS " ) ) . toString ( ) ; <nl> # endif <nl> # ifndef Q_WS_WIN <nl> # ifndef Q_WS_MAC <nl> - style = settings . value ( " Options / Style " , " Plastique " ) . toString ( ) ; <nl> + style = settings . value ( QString : : fromUtf8 ( " Options / Style " ) , QString : : fromUtf8 ( " Plastique " ) ) . toString ( ) ; <nl> # endif <nl> # endif <nl> useStyle ( & app , style ) ; <nl> - QSplashScreen * splash = new QSplashScreen ( QPixmap ( " : / Icons / splash . png " ) ) ; <nl> + QSplashScreen * splash = new QSplashScreen ( QPixmap ( QString : : fromUtf8 ( " : / Icons / splash . png " ) ) ) ; <nl> splash - > show ( ) ; <nl> / / Open options file to read locale <nl> - locale = settings . value ( " Options / Language / Locale " , QString ( ) ) . toString ( ) ; <nl> + locale = settings . value ( QString : : fromUtf8 ( " Options / Language / Locale " ) , QString ( ) ) . toString ( ) ; <nl> QTranslator translator ; <nl> if ( locale . isEmpty ( ) ) { <nl> locale = QLocale : : system ( ) . name ( ) ; <nl> - settings . setValue ( " Options / Language / Locale " , locale ) ; <nl> + settings . setValue ( QString : : fromUtf8 ( " Options / Language / Locale " ) , locale ) ; <nl> } <nl> - if ( translator . load ( QString ( " : / lang / qbittorrent_ " ) + locale ) ) { <nl> + if ( translator . load ( QString : : fromUtf8 ( " : / lang / qbittorrent_ " ) + locale ) ) { <nl> qDebug ( " % s locale recognized , using translation . " , ( const char * ) locale . toUtf8 ( ) ) ; <nl> } else { <nl> qDebug ( " % s locale unrecognized , using default ( en_GB ) . " , ( const char * ) locale . toUtf8 ( ) ) ; <nl> } <nl> app . installTranslator ( & translator ) ; <nl> - app . setApplicationName ( " qBittorrent " ) ; <nl> + app . setApplicationName ( QString : : fromUtf8 ( " qBittorrent " ) ) ; <nl> app . setQuitOnLastWindowClosed ( false ) ; <nl> / / Read torrents given on command line <nl> QStringList torrentCmdLine = app . arguments ( ) ; <nl> mmm a / src / options_imp . cpp <nl> ppp b / src / options_imp . cpp <nl> void options_imp : : processFilterFile ( QString filePath ) { <nl> address_v4 last ( ( IP . at ( 0 ) . toInt ( ) < < 24 ) + ( IP . at ( 1 ) . toInt ( ) < < 16 ) + ( IP . at ( 2 ) . toInt ( ) < < 8 ) + IP . at ( 3 ) . toInt ( ) ) ; <nl> <nl> / / add it to list <nl> - QStringList item ( QString ( start . to_string ( ) . c_str ( ) ) ) ; <nl> - item . append ( QString ( last . to_string ( ) . c_str ( ) ) ) ; <nl> + QStringList item ( QString : : fromUtf8 ( start . to_string ( ) . c_str ( ) ) ) ; <nl> + item . append ( QString : : fromUtf8 ( last . to_string ( ) . c_str ( ) ) ) ; <nl> if ( ! i ) { <nl> item . append ( QString : : fromUtf8 ( " Manual " ) ) ; <nl> } else { <nl> void options_imp : : processFilterFile ( QString filePath ) { <nl> address_v6 last = address_v6 : : from_string ( strEndIP . remove ( ' : ' , 0 ) . toUtf8 ( ) . data ( ) ) ; <nl> <nl> / / add it to list <nl> - QStringList item ( QString ( start . to_string ( ) . c_str ( ) ) ) ; <nl> - item . append ( QString ( last . to_string ( ) . c_str ( ) ) ) ; <nl> + QStringList item ( QString : : fromUtf8 ( start . to_string ( ) . c_str ( ) ) ) ; <nl> + item . append ( QString : : fromUtf8 ( last . to_string ( ) . c_str ( ) ) ) ; <nl> if ( ! i ) { <nl> item . append ( QString : : fromUtf8 ( " Manual " ) ) ; <nl> } else { <nl> mmm a / src / properties_imp . cpp <nl> ppp b / src / properties_imp . cpp <nl> void properties : : loadTrackers ( ) { <nl> trackersURLS - > clear ( ) ; <nl> unsigned int nbTrackers = trackers . size ( ) ; <nl> for ( unsigned int i = 0 ; i < nbTrackers ; + + i ) { <nl> - trackersURLS - > addItem ( QString ( trackers [ i ] . url . c_str ( ) ) ) ; <nl> + trackersURLS - > addItem ( misc : : toQString ( trackers [ i ] . url ) ) ; <nl> } <nl> QString tracker = h . current_tracker ( ) . trimmed ( ) ; <nl> if ( ! tracker . isEmpty ( ) ) { <nl> void properties : : lowerSelectedTracker ( ) { <nl> foreach ( item , selectedItems ) { <nl> QString url = item - > text ( ) ; <nl> for ( i = 0 ; i < nbTrackers ; + + i ) { <nl> - if ( QString ( trackers . at ( i ) . url . c_str ( ) ) = = url ) { <nl> + if ( misc : : toQString ( trackers . at ( i ) . url ) = = url ) { <nl> qDebug ( " Asked to lower % s " , trackers . at ( i ) . url . c_str ( ) ) ; <nl> qDebug ( " its tier was % d and will become % d " , trackers [ i ] . tier , trackers [ i ] . tier + 1 ) ; <nl> if ( i < nbTrackers - 1 ) { <nl> mmm a / src / rss . h <nl> ppp b / src / rss . h <nl> class RssStream : public QObject { <nl> / / download the icon from the adress <nl> QString getIconUrl ( ) { <nl> QUrl siteUrl ( url ) ; <nl> - return QString ( " http : / / " + siteUrl . host ( ) + " / favicon . ico " ) ; <nl> + return QString : : fromUtf8 ( " http : / / " ) + siteUrl . host ( ) + QString : : fromUtf8 ( " / favicon . ico " ) ; <nl> } <nl> <nl> private : <nl> class RssStream : public QObject { <nl> short readDoc ( const QDomDocument & doc ) { <nl> / / is it a rss file ? <nl> QDomElement root = doc . documentElement ( ) ; <nl> - if ( root . tagName ( ) = = " html " ) { <nl> + if ( root . tagName ( ) = = QString : : fromUtf8 ( " html " ) ) { <nl> qDebug ( " the file is empty , maybe the url is invalid or the server is too busy " ) ; <nl> return - 1 ; <nl> } <nl> - else if ( root . tagName ( ) ! = " rss " ) { <nl> - qDebug ( " the file is not a rss stream , < rss > omitted : % s " , ( const char * ) root . tagName ( ) . toUtf8 ( ) ) ; <nl> + else if ( root . tagName ( ) ! = QString : : fromUtf8 ( " rss " ) ) { <nl> + qDebug ( " the file is not a rss stream , < rss > omitted : % s " , root . tagName ( ) . toUtf8 ( ) . data ( ) ) ; <nl> return - 1 ; <nl> } <nl> QDomNode rss = root . firstChild ( ) ; <nl> mmm a / src / rss_imp . cpp <nl> ppp b / src / rss_imp . cpp <nl> <nl> RssStream * stream ; <nl> foreach ( stream , feeds ) { <nl> QTreeWidgetItem * item = new QTreeWidgetItem ( listStreams ) ; <nl> - item - > setData ( 0 , Qt : : DisplayRole , stream - > getAliasOrUrl ( ) + QString ( " ( 0 ) " ) ) ; <nl> - item - > setData ( 0 , Qt : : DecorationRole , QVariant ( QIcon ( " : / Icons / loading . png " ) ) ) ; <nl> + item - > setData ( 0 , Qt : : DisplayRole , stream - > getAliasOrUrl ( ) + QString : : fromUtf8 ( " ( 0 ) " ) ) ; <nl> + item - > setData ( 0 , Qt : : DecorationRole , QVariant ( QIcon ( QString : : fromUtf8 ( " : / Icons / loading . png " ) ) ) ) ; <nl> item - > setData ( 1 , Qt : : DisplayRole , stream - > getUrl ( ) ) ; <nl> - item - > setToolTip ( 0 , QString ( " < b > " ) + tr ( " Description : " ) + QString ( " < / b > " ) + stream - > getDescription ( ) + QString ( " < br / > < b > " ) + tr ( " url : " ) + QString ( " < / b > " ) + stream - > getUrl ( ) + QString ( " < br / > < b > " ) + tr ( " Last refresh : " ) + QString ( " < / b > " ) + stream - > getLastRefreshElapsedString ( ) ) ; <nl> + item - > setToolTip ( 0 , QString : : fromUtf8 ( " < b > " ) + tr ( " Description : " ) + QString : : fromUtf8 ( " < / b > " ) + stream - > getDescription ( ) + QString : : fromUtf8 ( " < br / > < b > " ) + tr ( " url : " ) + QString : : fromUtf8 ( " < / b > " ) + stream - > getUrl ( ) + QString : : fromUtf8 ( " < br / > < b > " ) + tr ( " Last refresh : " ) + QString : : fromUtf8 ( " < / b > " ) + stream - > getLastRefreshElapsedString ( ) ) ; <nl> } <nl> } <nl> <nl> <nl> return ; <nl> } <nl> QTreeWidgetItem * item = new QTreeWidgetItem ( listStreams ) ; <nl> - item - > setText ( 0 , stream - > getAliasOrUrl ( ) + QString ( " ( 0 ) " ) ) ; <nl> + item - > setText ( 0 , stream - > getAliasOrUrl ( ) + QString : : fromUtf8 ( " ( 0 ) " ) ) ; <nl> item - > setText ( 1 , stream - > getUrl ( ) ) ; <nl> item - > setData ( 0 , Qt : : DecorationRole , QVariant ( QIcon ( " : / Icons / loading . png " ) ) ) ; <nl> - item - > setToolTip ( 0 , QString ( " < b > " ) + tr ( " Description : " ) + QString ( " < / b > " ) + stream - > getDescription ( ) + QString ( " < br / > < b > " ) + tr ( " url : " ) + QString ( " < / b > " ) + stream - > getUrl ( ) + QString ( " < br / > < b > " ) + tr ( " Last refresh : " ) + QString ( " < / b > " ) + stream - > getLastRefreshElapsedString ( ) ) ; <nl> + item - > setToolTip ( 0 , QString : : fromUtf8 ( " < b > " ) + tr ( " Description : " ) + QString : : fromUtf8 ( " < / b > " ) + stream - > getDescription ( ) + QString : : fromUtf8 ( " < br / > < b > " ) + tr ( " url : " ) + QString : : fromUtf8 ( " < / b > " ) + stream - > getUrl ( ) + QString : : fromUtf8 ( " < br / > < b > " ) + tr ( " Last refresh : " ) + QString : : fromUtf8 ( " < / b > " ) + stream - > getLastRefreshElapsedString ( ) ) ; <nl> if ( listStreams - > topLevelItemCount ( ) = = 1 ) <nl> selectFirstFeed ( ) ; <nl> rssmanager - > refresh ( newUrl ) ; <nl> <nl> for ( unsigned int i = 0 ; i < nbFeeds ; + + i ) { <nl> QTreeWidgetItem * item = listStreams - > topLevelItem ( i ) ; <nl> RssStream * stream = rssmanager - > getFeed ( item - > data ( 1 , Qt : : DisplayRole ) . toString ( ) ) ; <nl> - item - > setToolTip ( 0 , QString ( " < b > " ) + tr ( " Description : " ) + QString ( " < / b > " ) + stream - > getDescription ( ) + QString ( " < br / > < b > " ) + tr ( " url : " ) + QString ( " < / b > " ) + stream - > getUrl ( ) + QString ( " < br / > < b > " ) + tr ( " Last refresh : " ) + QString ( " < / b > " ) + stream - > getLastRefreshElapsedString ( ) ) ; <nl> + item - > setToolTip ( 0 , QString : : fromUtf8 ( " < b > " ) + tr ( " Description : " ) + QString : : fromUtf8 ( " < / b > " ) + stream - > getDescription ( ) + QString : : fromUtf8 ( " < br / > < b > " ) + tr ( " url : " ) + QString : : fromUtf8 ( " < / b > " ) + stream - > getUrl ( ) + QString : : fromUtf8 ( " < br / > < b > " ) + tr ( " Last refresh : " ) + QString : : fromUtf8 ( " < / b > " ) + stream - > getLastRefreshElapsedString ( ) ) ; <nl> } <nl> } <nl> <nl> <nl> void RSSImp : : updateFeedNbNews ( QString url ) { <nl> QTreeWidgetItem * item = getTreeItemFromUrl ( url ) ; <nl> RssStream * stream = rssmanager - > getFeed ( url ) ; <nl> - item - > setText ( 0 , stream - > getAliasOrUrl ( ) + QString ( " ( " ) + QString : : number ( stream - > getNbUnRead ( ) , 10 ) + String ( " ) " ) ) ; <nl> + item - > setText ( 0 , stream - > getAliasOrUrl ( ) + QString : : fromUtf8 ( " ( " ) + QString : : number ( stream - > getNbUnRead ( ) , 10 ) + String ( " ) " ) ) ; <nl> } <nl> <nl> void RSSImp : : updateFeedInfos ( QString url , QString aliasOrUrl , unsigned int nbUnread ) { <nl> QTreeWidgetItem * item = getTreeItemFromUrl ( url ) ; <nl> RssStream * stream = rssmanager - > getFeed ( url ) ; <nl> - item - > setText ( 0 , aliasOrUrl + QString ( " ( " ) + QString : : number ( nbUnread , 10 ) + String ( " ) " ) ) ; <nl> + item - > setText ( 0 , aliasOrUrl + QString : : fromUtf8 ( " ( " ) + QString : : number ( nbUnread , 10 ) + String ( " ) " ) ) ; <nl> item - > setData ( 0 , Qt : : DecorationRole , QVariant ( QIcon ( stream - > getIconPath ( ) ) ) ) ; <nl> - item - > setToolTip ( 0 , QString ( " < b > " ) + tr ( " Description : " ) + QString ( " < / b > " ) + stream - > getDescription ( ) + QString ( " < br / > < b > " ) + tr ( " url : " ) + QString ( " < / b > " ) + stream - > getUrl ( ) + QString ( " < br / > < b > " ) + tr ( " Last refresh : " ) + QString ( " < / b > " ) + stream - > getLastRefreshElapsedString ( ) ) ; <nl> + item - > setToolTip ( 0 , QString : : fromUtf8 ( " < b > " ) + tr ( " Description : " ) + QString : : fromUtf8 ( " < / b > " ) + stream - > getDescription ( ) + QString : : fromUtf8 ( " < br / > < b > " ) + tr ( " url : " ) + QString : : fromUtf8 ( " < / b > " ) + stream - > getUrl ( ) + QString : : fromUtf8 ( " < br / > < b > " ) + tr ( " Last refresh : " ) + QString : : fromUtf8 ( " < / b > " ) + stream - > getLastRefreshElapsedString ( ) ) ; <nl> / / If the feed is selected , update the displayed news <nl> if ( selectedFeedUrl = = url ) { <nl> refreshNewsList ( getTreeItemFromUrl ( url ) , 0 ) ; <nl> mmm a / src / searchEngine . cpp <nl> ppp b / src / searchEngine . cpp <nl> void SearchEngine : : saveColWidthSearchList ( ) const { <nl> QSettings settings ( " qBittorrent " , " qBittorrent " ) ; <nl> QStringList width_list ; <nl> for ( int i = 0 ; i < SearchListModel - > columnCount ( ) ; + + i ) { <nl> - width_list < < QString ( misc : : toString ( resultsBrowser - > columnWidth ( i ) ) . c_str ( ) ) ; <nl> + width_list < < misc : : toQString ( resultsBrowser - > columnWidth ( i ) ) ; <nl> } <nl> settings . setValue ( " SearchListColsWidth " , width_list . join ( " " ) ) ; <nl> qDebug ( " Search list columns width saved " ) ; <nl> void SearchEngine : : readSearchOutput ( ) { <nl> foreach ( line , lines_list ) { <nl> appendSearchResult ( QString ( line ) ) ; <nl> } <nl> - results_lbl - > setText ( tr ( " Results " ) + " < i > ( " + QString ( misc : : toString ( nb_search_results ) . c_str ( ) ) + " ) < / i > : " ) ; <nl> + results_lbl - > setText ( tr ( " Results " ) + QString : : fromUtf8 ( " < i > ( " ) + misc : : toQString ( nb_search_results ) + QString : : fromUtf8 ( " ) < / i > : " ) ) ; <nl> } <nl> <nl> / / Returns version of nova . py search engine <nl> QByteArray SearchEngine : : getNovaChangelog ( QString novaPath , float my_version ) co <nl> QString end_version = " # Version : " ; <nl> char tmp [ 5 ] ; <nl> snprintf ( tmp , 5 , " % . 2f " , my_version ) ; <nl> - end_version + = QString ( tmp ) ; <nl> + end_version + = QString : : fromUtf8 ( tmp ) ; <nl> if ( line . startsWith ( ( const char * ) end_version . toUtf8 ( ) ) ) break ; <nl> if ( in_changelog ) { <nl> line . remove ( 0 , 1 ) ; <nl> void SearchEngine : : searchFinished ( int exitcode , QProcess : : ExitStatus ) { <nl> } <nl> } <nl> } <nl> - results_lbl - > setText ( tr ( " Results " , " i . e : Search results " ) + " < i > ( " + QString ( misc : : toString ( nb_search_results ) . c_str ( ) ) + " ) < / i > : " ) ; <nl> + results_lbl - > setText ( tr ( " Results " , " i . e : Search results " ) + QString : : fromUtf8 ( " < i > ( " ) + misc : : toQString ( nb_search_results ) + QString : : fromUtf8 ( " ) < / i > : " ) ) ; <nl> search_button - > setEnabled ( true ) ; <nl> stop_search_button - > setEnabled ( false ) ; <nl> } <nl>
- Improved unicode support a bit
qbittorrent/qBittorrent
8ba1cb329d85a8ecb6c2a165bdba5306196a69b0
2007-08-20T07:03:41Z